From e396dd49d58ddc3d11c760e9e75ec7f1ac8c53b2 Mon Sep 17 00:00:00 2001 From: Nikita Malinin Date: Mon, 18 Sep 2023 17:45:00 +0200 Subject: [PATCH] Release v2.6.0 of NNCF to master --- .file-header | 10 + .git-blame-ignore-revs | 5 + .github/action_configs/labeler.yml | 50 +- .github/workflows/api_changes_check.yml | 4 +- .github/workflows/build_and_publish_doc.yml | 2 +- .github/workflows/build_html_doc.yml | 4 +- .github/workflows/build_schema_page.yml | 4 +- .github/workflows/labeler.yml | 2 +- .github/workflows/post_pr_merge.yml | 40 + .github/workflows/pre-commit-linters.yml | 21 + .github/workflows/precommit.yml | 33 + .github/workflows/python-publish.yml | 2 +- .gitignore | 9 + .isort.cfg | 6 + .markdownlint.yaml | 9 + .pre-commit-config.yaml | 22 + .pylintrc | 5 +- CODEOWNERS | 8 +- CONTRIBUTING.md | 40 +- Makefile | 121 +- README.md | 250 +- ReleaseNotes.md | 137 +- Security.md | 5 +- codecov.yml | 27 + docker/README.md | 9 +- docs/Algorithms.md | 4 +- docs/ConfigFile.md | 9 +- docs/FAQ.md | 50 +- docs/Installation.md | 40 +- docs/ModelZoo.md | 943 ++ docs/NNCFArchitecture.md | 60 +- docs/Usage.md | 66 +- .../AdaptiveCompressionLevelTraining.md | 22 +- .../EarlyExitTraining.md | 6 +- docs/api/source/conf.py | 46 +- .../BatchnormAdaptation.md | 14 +- docs/compression_algorithms/Binarization.md | 34 +- .../compression_algorithms/CompressWeights.md | 22 + .../KnowledgeDistillation.md | 53 +- docs/compression_algorithms/Pruning.md | 258 +- docs/compression_algorithms/Quantization.md | 72 +- docs/compression_algorithms/Sparsity.md | 16 +- .../post_training/ONNX.md | 9 +- .../post_training/Quantization.md | 41 +- docs/styleguide/PyGuide.md | 191 +- examples/common/README.md | 2 +- .../torch/classification/Quickstart.md | 53 +- .../torch/classification/bootstrap_nas.py | 10 +- .../classification/bootstrap_nas_search.py | 10 +- .../onnx/mobilenet_v2/README.md | 21 +- .../onnx/mobilenet_v2/main.py | 4 +- .../onnx/mobilenet_v2/requirements.txt | 2 +- .../README.md | 23 +- .../main.py | 50 +- .../requirements.txt | 2 + .../openvino/mobilenet_v2/README.md | 23 +- .../openvino/mobilenet_v2/main.py | 50 +- .../openvino/mobilenet_v2/requirements.txt | 2 +- .../requirements.txt | 2 - .../openvino/yolov8/README.md | 21 +- .../openvino/yolov8/main.py | 37 +- .../openvino/yolov8/requirements.txt | 6 +- .../README.md | 26 +- .../main.py | 44 +- .../requirements.txt | 6 +- .../tensorflow/mobilenet_v2/README.md | 23 +- .../tensorflow/mobilenet_v2/main.py | 43 +- .../tensorflow/mobilenet_v2/requirements.txt | 4 +- .../torch/mobilenet_v2/README.md | 23 +- .../torch/mobilenet_v2/main.py | 76 +- .../torch/mobilenet_v2/requirements.txt | 5 +- .../torch/ssd300_vgg16/README.md | 23 +- .../torch/ssd300_vgg16/main.py | 122 +- .../torch/ssd300_vgg16/requirements.txt | 10 +- examples/tensorflow/classification/README.md | 68 +- examples/tensorflow/classification/main.py | 6 + .../object_detection/architecture/darknet.py | 2 + .../object_detection/utils/yolo_v4_utils.py | 6 +- examples/tensorflow/common/utils.py | 3 - .../tensorflow/object_detection/README.md | 62 +- examples/tensorflow/object_detection/main.py | 15 +- examples/tensorflow/requirements.txt | 4 +- examples/tensorflow/segmentation/README.md | 72 +- examples/torch/README.md | 18 +- examples/torch/classification/README.md | 105 +- examples/torch/classification/main.py | 7 +- examples/torch/common/example_logger.py | 11 + examples/torch/common/export.py | 8 +- examples/torch/common/models/__init__.py | 11 + .../models/classification/rmnet_cifar.py | 11 + .../common/models/segmentation/__init__.py | 11 + .../torch/common/models/segmentation/icnet.py | 4 +- .../torch/common/models/segmentation/unet.py | 4 +- examples/torch/object_detection/README.md | 39 +- examples/torch/object_detection/eval.py | 2 +- .../torch/object_detection/layers/__init__.py | 11 + .../layers/extensions/__init__.py | 11 + .../layers/functions/__init__.py | 11 + .../layers/modules/__init__.py | 11 + examples/torch/object_detection/main.py | 20 +- .../object_detection/utils/augmentations.py | 3 +- examples/torch/requirements.txt | 4 +- .../torch/semantic_segmentation/README.md | 41 +- .../datasets/__init__.py | 11 + examples/torch/semantic_segmentation/main.py | 7 +- .../semantic_segmentation/metric/__init__.py | 11 + nncf/__init__.py | 45 +- nncf/api/compression.py | 3 +- nncf/common/accuracy_aware_training/runner.py | 26 +- .../accuracy_aware_training/training_loop.py | 18 +- nncf/common/compression.py | 4 +- nncf/common/deprecation.py | 29 +- nncf/common/factory.py | 45 +- nncf/common/graph/graph.py | 175 +- nncf/common/graph/graph_matching.py | 226 +- nncf/common/graph/layer_attributes.py | 92 +- nncf/common/graph/operator_metatypes.py | 1 + nncf/common/graph/patterns/patterns.py | 26 +- nncf/common/hardware/config.py | 3 +- nncf/common/hardware/configs/cpu.json | 13 +- nncf/common/hardware/configs/gpu.json | 6 + nncf/common/insertion_point_graph.py | 29 +- nncf/common/plotting.py | 22 + nncf/common/pruning/mask_propagation.py | 6 +- nncf/common/pruning/operations.py | 34 +- .../common/pruning/shape_pruning_processor.py | 4 +- nncf/common/pruning/statistics.py | 8 +- nncf/common/pruning/utils.py | 10 +- nncf/common/quantization/config_assignment.py | 20 +- .../quantizer_propagation/graph.py | 171 +- .../quantizer_propagation/solver.py | 251 +- nncf/common/quantization/quantizer_removal.py | 6 +- nncf/common/quantization/structs.py | 10 +- nncf/common/scopes.py | 8 +- nncf/common/sparsity/statistics.py | 2 +- nncf/common/statistics.py | 2 +- nncf/common/strip.py | 39 + nncf/common/tensor_statistics/aggregator.py | 28 +- nncf/common/tensor_statistics/collectors.py | 7 +- .../tensor_statistics/statistic_point.py | 19 +- nncf/common/tensor_statistics/statistics.py | 10 +- nncf/common/utils/backend.py | 122 +- nncf/common/utils/dot_file_rw.py | 11 + nncf/common/utils/os.py | 27 + nncf/common/utils/timer.py | 7 +- nncf/config/config.py | 4 +- nncf/config/extractors.py | 4 +- nncf/config/schemata/algo/filter_pruning.py | 1 + nncf/config/schemata/common/targeting.py | 11 + nncf/config/schemata/defaults.py | 2 + nncf/experimental/common/graph/netron.py | 33 +- .../common/pruning/nodes_grouping.py | 6 +- .../experimental/common/pruning/operations.py | 51 +- .../common/pruning/propagation_data.py | 2 +- .../common/tensor_statistics/collectors.py | 81 +- .../openvino/quantization/quantize_model.py | 157 - nncf/experimental/tensor/README.md | 174 + nncf/experimental/tensor/__init__.py | 16 + nncf/experimental/tensor/enums.py | 43 + nncf/experimental/tensor/functions.py | 361 + nncf/experimental/tensor/numpy_functions.py | 164 + nncf/experimental/tensor/tensor.py | 181 + nncf/experimental/tensor/torch_functions.py | 148 + .../torch/nas/bootstrapNAS/BootstrapNAS.md | 110 +- .../bootstrapNAS/elasticity/elastic_kernel.py | 31 +- .../bootstrapNAS/elasticity/elastic_width.py | 18 +- .../elasticity/multi_elasticity_handler.py | 13 + .../bootstrapNAS/elasticity/visualization.py | 6 +- .../bootstrapNAS/search/evaluator_handler.py | 11 + .../torch/nas/bootstrapNAS/search/search.py | 112 +- .../torch/nas/bootstrapNAS/search/supernet.py | 158 + .../training/progressive_shrinking_builder.py | 15 + .../progressive_shrinking_controller.py | 6 +- nncf/experimental/torch/pruning/operations.py | 29 +- .../torch/quantization/quantize_model.py | 15 +- .../replace_custom_modules}/__init__.py | 0 .../timm_custom_modules.py | 171 + .../search_building_blocks/search_graph.py | 7 +- .../sparsity/movement/MovementSparsity.md | 16 +- .../torch/sparsity/movement/layers.py | 2 +- .../movement/structured_mask_handler.py | 2 +- nncf/onnx/graph/metatypes/onnx_metatypes.py | 295 +- nncf/onnx/graph/model_transformer.py | 2 +- nncf/onnx/graph/model_utils.py | 53 + nncf/onnx/graph/nncf_graph_builder.py | 223 +- nncf/onnx/graph/node_utils.py | 196 +- nncf/onnx/graph/onnx_graph.py | 197 +- .../graph/transformations/command_creation.py | 2 +- nncf/onnx/graph/transformations/commands.py | 18 +- nncf/onnx/hardware/fused_patterns.py | 135 +- nncf/onnx/hardware/pattern_operations.py | 6 +- .../onnx/quantization/default_quantization.py | 55 +- nncf/onnx/quantization/ignored_patterns.py | 86 +- nncf/onnx/quantization/quantize_model.py | 4 +- .../onnx/quantization/quantizer_parameters.py | 29 +- nncf/onnx/statistics/aggregator.py | 11 +- nncf/onnx/statistics/collectors.py | 10 +- nncf/onnx/statistics/statistics.py | 4 +- nncf/openvino/engine.py | 46 +- nncf/openvino/graph/layer_attributes.py | 129 + nncf/openvino/graph/metatypes/common.py | 14 +- .../graph/metatypes/openvino_metatypes.py | 146 +- nncf/openvino/graph/model_transformer.py | 119 +- nncf/openvino/graph/model_utils.py | 80 + nncf/openvino/graph/nncf_graph_builder.py | 109 +- nncf/openvino/graph/node_utils.py | 57 +- .../graph/transformations/command_creation.py | 21 + .../graph/transformations/commands.py | 46 +- nncf/openvino/hardware/fused_patterns.py | 262 +- nncf/openvino/hardware/pattern_operations.py | 1 + nncf/openvino/pot/engine.py | 17 +- .../pot/quantization/quantize_model.py | 15 +- .../quantization/default_quantization.py | 20 +- .../openvino/quantization/ignored_patterns.py | 96 +- nncf/openvino/quantization/quantize_model.py | 141 +- .../quantization/weights_compression.py | 99 + nncf/openvino/statistics/aggregator.py | 15 +- nncf/openvino/statistics/collectors.py | 13 +- nncf/openvino/statistics/statistics.py | 4 +- nncf/quantization/__init__.py | 1 + nncf/quantization/advanced_parameters.py | 73 +- .../algorithms/accuracy_control/algorithm.py | 295 +- .../algorithms/accuracy_control/backend.py | 38 +- .../algorithms/accuracy_control/evaluator.py | 340 + .../accuracy_control/openvino_backend.py | 46 +- .../accuracy_control/rank_functions.py | 35 +- .../algorithms/accuracy_control/ranker.py | 301 +- .../accuracy_control/subset_selection.py | 70 + nncf/quantization/algorithms/algorithm.py | 47 +- .../algorithms/bias_correction/algorithm.py | 401 +- .../algorithms/bias_correction/backend.py | 58 +- .../bias_correction/onnx_backend.py | 46 +- .../bias_correction/openvino_backend.py | 51 +- .../algorithms/channel_alignment}/__init__.py | 0 .../algorithms/channel_alignment/algorithm.py | 481 + .../algorithms/channel_alignment/backend.py | 144 + .../channel_alignment/openvino_backend.py | 148 + .../fast_bias_correction/algorithm.py | 141 +- .../fast_bias_correction/backend.py | 71 +- .../fast_bias_correction/onnx_backend.py | 44 +- .../fast_bias_correction/openvino_backend.py | 32 +- .../fast_bias_correction/torch_backend.py | 132 + .../hyperparameter_tuner/__init__.py | 10 + .../hyperparameter_tuner/algorithm.py | 371 + .../hyperparameter_tuner/param_grid.py | 74 + .../algorithms/min_max/algorithm.py | 306 +- .../algorithms/min_max/backend.py | 65 +- .../algorithms/min_max/onnx_backend.py | 169 +- .../algorithms/min_max/openvino_backend.py | 143 +- .../algorithms/min_max/torch_backend.py | 71 +- .../algorithms/post_training/algorithm.py | 124 +- .../algorithms/smooth_quant/__init__.py | 10 + .../algorithms/smooth_quant/algorithm.py | 361 + .../algorithms/smooth_quant/backend.py | 238 + .../smooth_quant/openvino_backend.py | 182 + nncf/quantization/fake_quantize.py | 24 + nncf/quantization/passes.py | 22 +- nncf/quantization/quantize_model.py | 103 +- nncf/scopes.py | 8 +- nncf/telemetry/extractors.py | 1 - nncf/telemetry/wrapper.py | 28 +- nncf/tensorflow/__init__.py | 10 +- .../keras_model_utils.py | 2 +- .../accuracy_aware_training/runner.py | 2 +- nncf/tensorflow/exporter.py | 2 +- nncf/tensorflow/graph/converter.py | 35 +- nncf/tensorflow/graph/metatypes/common.py | 26 +- nncf/tensorflow/graph/model_transformer.py | 2 + nncf/tensorflow/graph/pattern_operations.py | 2 +- .../graph/transformations/commands.py | 7 +- nncf/tensorflow/pruning/base_algorithm.py | 6 +- .../pruning/filter_pruning/algorithm.py | 28 +- nncf/tensorflow/pruning/utils.py | 7 +- nncf/tensorflow/quantization/algorithm.py | 37 +- .../quantization/default_quantization.py | 21 +- .../tensorflow/quantization/quantize_model.py | 18 +- .../sparsity/magnitude/algorithm.py | 2 +- nncf/tensorflow/sparsity/rb/algorithm.py | 2 +- nncf/tensorflow/tf_internals.py | 10 +- nncf/torch/__init__.py | 11 +- nncf/torch/accuracy_aware_training/runner.py | 2 +- nncf/torch/automl/agent/ddpg/ddpg.py | 2 +- nncf/torch/binarization/binarize_functions.py | 12 +- nncf/torch/binarization/extensions.py | 5 + nncf/torch/binarization/reference.py | 25 +- nncf/torch/composite_compression.py | 2 +- nncf/torch/compression_method_api.py | 31 +- nncf/torch/dynamic_graph/graph.py | 35 +- nncf/torch/dynamic_graph/io_handling.py | 11 + .../layer_attributes_handlers.py | 20 +- nncf/torch/dynamic_graph/patch_pytorch.py | 76 +- nncf/torch/dynamic_graph/scope.py | 7 +- nncf/torch/dynamic_graph/scope_access.py | 11 + nncf/torch/dynamic_graph/wrappers.py | 2 +- nncf/torch/exporter.py | 2 +- nncf/torch/extensions/__init__.py | 47 +- .../extensions/include/common_cpu_funcs.h | 1 + .../extensions/include/common_cuda_defs.cuh | 5 +- .../extensions/include/common_cuda_funcs.cuh | 42 +- nncf/torch/extensions/include/common_defs.h | 1 + nncf/torch/extensions/include/dispatch.h | 12 + .../src/binarization/cpu/functions_cpu.cpp | 6 +- .../binarization/cuda/functions_cuda_impl.cu | 12 +- .../src/quantization/cpu/functions_cpu.cpp | 5 +- .../quantization/cuda/functions_cuda_impl.cu | 8 +- nncf/torch/graph/graph.py | 5 +- nncf/torch/graph/graph_builder.py | 1 + nncf/torch/graph/operator_metatypes.py | 102 +- .../graph/transformations/command_creation.py | 29 + nncf/torch/graph/transformations/commands.py | 101 +- nncf/torch/graph/transformations/layout.py | 11 + nncf/torch/hardware/fused_patterns.py | 40 +- nncf/torch/initialization.py | 11 + nncf/torch/layer_utils.py | 15 + nncf/torch/layers.py | 9 +- nncf/torch/model_analyzer.py | 87 + nncf/torch/model_creation.py | 39 +- nncf/torch/model_transformer.py | 169 + nncf/torch/nncf_module_replacement.py | 23 +- nncf/torch/nncf_network.py | 137 +- nncf/torch/pruning/base_algo.py | 2 +- nncf/torch/pruning/filter_pruning/algo.py | 2 +- nncf/torch/pruning/operations.py | 46 +- nncf/torch/pruning/utils.py | 4 +- nncf/torch/quantization/algo.py | 18 +- .../quantization/default_quantization.py | 41 +- nncf/torch/quantization/extensions.py | 5 + nncf/torch/quantization/ignored_patterns.py | 81 +- nncf/torch/quantization/init_precision.py | 11 + nncf/torch/quantization/layers.py | 23 +- nncf/torch/quantization/metrics.py | 13 +- .../precision_init/adjacent_quantizers.py | 2 +- .../quantization/precision_init/base_init.py | 22 +- .../precision_init/bitwidth_graph.py | 29 +- .../quantization/precision_init/hawq_debug.py | 32 +- nncf/torch/quantization/quantize_functions.py | 14 +- nncf/torch/quantization/quantize_model.py | 33 +- nncf/torch/quantization/reference.py | 15 +- nncf/torch/quantization/strip.py | 34 +- .../torch/quantization/weights_compression.py | 103 + nncf/torch/statistics/aggregator.py | 7 +- nncf/torch/strip.py | 25 + nncf/torch/tensor_statistics/collectors.py | 31 +- nncf/torch/tensor_statistics/statistics.py | 7 + nncf/torch/utils.py | 2 +- nncf/version.py | 19 +- setup.py | 37 +- .../accuracy_control/test_calculate_drop.py | 98 + .../common/accuracy_control/test_evaluator.py | 100 + tests/common/accuracy_control/test_ranking.py | 44 +- tests/common/conftest.py | 12 + tests/common/graph/test_graph_matching.py | 27 +- tests/common/graph/test_nncf_graph.py | 50 + tests/common/graph/test_utils.py | 11 + tests/common/hyperparameter_tuner/__init__.py | 10 + .../test_hyperparameter_tuner.py | 432 + tests/common/pruning/dummy_types.py | 11 + .../common/pruning/test_pruning_operations.py | 71 +- .../pruning/test_symbolic_mask_processor.py | 11 + tests/common/pruning/test_utils.py | 8 +- tests/common/quantization/data_generators.py | 6 +- tests/common/quantization/metatypes.py | 19 +- tests/common/quantization/mock_graphs.py | 38 +- .../test_filter_constant_nodes.py | 4 +- .../test_ignore_post_processing.py | 213 +- .../test_quantizer_propagation_graph.py | 142 +- .../test_quantizer_propagation_solver.py | 241 +- tests/common/requirements.txt | 3 + tests/common/test_framework_detection.py | 74 + tests/common/test_hardware_config.py | 5 - tests/common/test_logging.py | 1 + tests/common/test_scopes.py | 5 +- tests/common/test_statistics_aggregator.py | 181 +- tests/common/test_telemetry.py | 21 + tests/common/test_tensor.py | 20 + tests/cross_fw/examples/conftest.py | 40 + tests/cross_fw/examples/example_scope.json | 147 + tests/cross_fw/examples/requirements.txt | 2 + tests/cross_fw/examples/run_example.py | 151 + tests/cross_fw/examples/test_examples.py | 76 + .../cross_fw/install/install_checks_torch.py | 44 +- tests/cross_fw/install/requirements.txt | 1 + tests/cross_fw/install/test_install.py | 19 +- .../common/test_statistic_collector.py | 51 +- tests/onnx/README.md | 14 +- tests/onnx/benchmarking/README.md | 52 +- tests/onnx/benchmarking/requirements.txt | 2 +- tests/onnx/conftest.py | 24 +- tests/onnx/data/models/bertsquad-12.onnx | 3 + tests/onnx/data/models/gpt2-10.onnx | 3 + .../synthetic/activation_matmul_model.dot | 11 + .../synthetic/embedding_model.dot | 13 + .../synthetic/gemm_weight_transpose_model.dot | 11 + .../synthetic/unified_embedding_model.dot | 17 + .../synthetic/weight_matmul_model.dot | 9 + .../weight_propagation_conv_model.dot | 25 + .../weight_propagation_matmul_model.dot | 15 + .../synthetic/weight_sharing_model.dot | 6 +- .../quantization/MaskRCNN-12.dot | 11572 ++++++++------- .../quantization/bertsquad-12.dot | 3278 ++++ .../quantization/googlenet.dot | 884 +- .../reference_graphs/quantization/gpt2-10.dot | 6180 ++++++++ .../quantization/resnet50_cpu_spr.dot | 713 + .../synthetic/activation_matmul_model.dot | 19 + .../synthetic/embedding_model.dot | 21 + .../synthetic/gemm_weight_transpose_model.dot | 19 + .../synthetic/unified_embedding_model.dot | 33 + .../synthetic/weight_matmul_model.dot | 17 + .../weight_propagation_conv_model.dot | 49 + .../weight_propagation_matmul_model.dot | 31 + .../synthetic/weight_sharing_model.dot | 20 +- .../activation_matmul_model_mixed.json | 10 + .../activation_matmul_model_performance.json | 10 + .../embedding_model_mixed.json | 58 + .../embedding_model_performance.json | 32 + .../gemm_weight_transpose_model_mixed.json | 22 + ...mm_weight_transpose_model_performance.json | 22 + .../reference_scales/linear_model_mixed.json | 110 + .../linear_model_overflow_fix_disable.json | 6 +- .../linear_model_overflow_fix_enable.json | 6 +- ...r_model_overflow_fix_first_layer_only.json | 6 +- .../linear_model_performance.json | 110 + ...e_depthwise_convolutional_model_mixed.json | 74 + ...hwise_convolutional_model_performance.json | 74 + .../reshape_weight_model_mixed.json | 22 + .../reshape_weight_model_performance.json | 22 + .../weight_matmul_model_mixed.json | 22 + .../weight_matmul_model_performance.json | 22 + .../weight_sharing_model_mixed.json | 30 + .../weight_sharing_model_performance.json | 30 + tests/onnx/models.py | 574 +- tests/onnx/quantization/common.py | 10 +- .../onnx/quantization/test_bias_correction.py | 199 + .../test_calculation_quantizer_params.py | 4 +- .../test_classification_models_graph.py | 34 +- .../test_detection_models_graph.py | 2 + .../quantization/test_fast_bias_correction.py | 71 + tests/onnx/quantization/test_min_max.py | 224 + tests/onnx/quantization/test_ptq_params.py | 57 +- .../test_qdq_params_calculation.py | 62 +- .../quantization/test_quantizer_config.py | 35 +- .../test_segmentation_models_graph.py | 2 + tests/onnx/quantization/test_transform_fn.py | 54 + .../test_transformer_models_graph.py | 34 + tests/onnx/requirements.txt | 3 +- tests/onnx/test_e2e_ptq.py | 27 +- tests/onnx/test_engine.py | 22 +- tests/onnx/test_layer_attributes.py | 15 +- tests/onnx/test_metatypes.py | 4 +- tests/onnx/test_model_transformer.py | 8 +- tests/onnx/test_node_utils.py | 19 + tests/onnx/test_pattern_manager.py | 19 +- tests/onnx/test_statistics_aggregator.py | 2 +- .../onnx/tools/save_model_without_tensors.py | 28 + .../ac_configs/mobilefacedet-v1-mxnet.yml | 47 - .../openvino/data/ov_dataset_definitions.yml | 4 +- tests/openvino/datasets_helpers.py | 42 +- .../UnifiedEmbeddingModel.dot | 28 + .../original_nncf_graph/WeightsModel.dot | 34 +- .../exctracted_ConvModel.dot | 9 + .../exctracted_QuantizedModel.dot | 81 + .../mobilenet-v3-small-1.0-224-tf.dot | 1234 +- .../original_nncf_graph/yolo-v4-tiny-tf.dot | 644 +- ...GRUSequenceModel_linear_before_reset_F.dot | 81 + ...GRUSequenceModel_linear_before_reset_T.dot | 41 + .../quantized/IntegerModel.dot | 198 +- .../quantized/UnifiedEmbeddingModel.dot | 78 + .../quantized/WeightsModel.dot | 182 +- ...t => mobilenet-v2-pytorch_performance.dot} | 1872 +-- .../mobilenet-v3-small-1.0-224-tf.dot | 1968 --- ...ilenet-v3-small-1.0-224-tf_performance.dot | 1958 +++ ....dot => resnet-18-pytorch_performance.dot} | 816 +- .../resnet-50-pytorch_performance_CPU_SPR.dot | 1691 +++ ...4-window7-224_performance_transformer.dot} | 0 .../swin-tiny-patch4-window7-224_sq.dot | 2750 ++++ .../quantized/yolo-v4-tiny-tf.dot | 749 - .../quantized/yolo-v4-tiny-tf_performance.dot | 749 + .../IntegerModel_compressed_weights.json | 242 + .../reference_scales/IntegerModel_mixed.json | 3884 +---- .../IntegerModel_performance.json | 3876 +---- .../LSTMSequenceModel_mixed.json | 12344 +++++++++++++++- .../LSTMSequenceModel_performance.json | 12344 +++++++++++++++- .../UnifiedEmbeddingModel_mixed.json | 218 + .../UnifiedEmbeddingModel_performance.json | 160 + .../yolo-v4-tiny-tf_mixed.json | 234 +- .../yolo-v4-tiny-tf_performance.json | 228 +- tests/openvino/native/models.py | 86 +- .../quantization/test_channel_alignment.py | 107 + .../quantization/test_fq_configurations.py | 2 +- .../test_fq_params_calculation.py | 18 +- .../native/quantization/test_graphs.py | 72 +- .../native/quantization/test_ptq_params.py | 29 +- .../quantization/test_quantizer_config.py | 18 +- .../native/quantization/test_sanity.py | 33 +- .../quantization/test_weights_compression.py | 88 + tests/openvino/native/test_bias_correction.py | 200 + .../native/test_fast_bias_correction.py | 70 + .../openvino/native/test_layer_attributes.py | 191 +- tests/openvino/native/test_metatypes.py | 2 +- .../openvino/native/test_model_transformer.py | 130 +- tests/openvino/native/test_node_utils.py | 55 + tests/openvino/native/test_smooth_quant.py | 100 + .../native/test_statistic_collector.py | 47 +- tests/openvino/omz_helpers.py | 4 +- .../pot/quantization/test_parameters.py | 2 +- .../openvino/pot/quantization/test_sanity.py | 4 - tests/openvino/pot/test_engine.py | 6 +- tests/openvino/requirements.txt | 3 +- tests/openvino/test_transform_fn.py | 76 + tests/openvino/tools/README.md | 17 +- tests/openvino/tools/calibrate.py | 291 +- tests/post_training/README.md | 23 +- tests/post_training/conftest.py | 233 +- tests/post_training/model_scope.json | 232 - tests/post_training/model_scope.py | 288 +- tests/post_training/models.py | 97 - tests/post_training/pipelines/base.py | 302 + .../pipelines/causal_language_model.py | 57 + .../pipelines/image_classification_timm.py | 163 + .../pipelines/masked_language_modeling.py | 86 + tests/post_training/reference_data.yaml | 356 + tests/post_training/requirements.txt | 24 +- .../test_quantize_conformance.py | 781 +- tests/post_training/test_templates/helpers.py | 311 + tests/post_training/test_templates/models.py | 220 + .../test_templates/test_bias_correction.py | 181 + .../test_calculate_quantizer_parameters.py | 0 .../test_templates/test_channel_alignment.py | 502 + .../test_fast_bias_correction.py | 121 + .../{ => test_templates}/test_ptq_params.py | 98 +- .../test_quantizer_config.py | 90 +- .../test_templates/test_smooth_quant.py | 247 + tests/shared/case_collection.py | 7 +- tests/shared/helpers.py | 17 +- tests/shared/isolation_runner.py | 11 + tests/shared/metric_thresholds.py | 11 + tests/shared/paths.py | 11 + tests/shared/patterns.py | 2 + tests/shared/test_templates/__init__.py | 10 + .../template_test_nncf_tensor.py | 548 + tests/tensorflow/README.md | 62 +- .../accuracy_aware_training/test_keras_api.py | 2 + .../2.12/functional_insert_after.dot | 1 + .../2.12/functional_insert_before.dot | 1 + .../2.12/sequential_block_insert_after.dot | 1 + .../2.12/sequential_block_insert_before.dot | 1 + .../2.11/quantized/hw/CPU/inception_v3.dot | 338 +- .../2.11/quantized/hw/GPU/inception_v3.dot | 338 +- .../2.11/quantized/hw/VPU/inception_v3.dot | 338 +- .../w_sym_ch_a_asym_t/inception_v3.dot | 338 +- .../w_sym_t_a_sym_t/inception_v3.dot | 314 +- .../pruning/filter_pruning/densenet121.pb | 1 + .../pruning/filter_pruning/inception_v3.dot | 1 + .../pruning/filter_pruning/mobilenet_v1.pb | 1 + .../pruning/filter_pruning/mobilenet_v2.pb | 1 + .../filter_pruning/mobilenet_v3_large.pb | 1 + .../filter_pruning/mobilenet_v3_small.pb | 1 + .../2.12/pruning/filter_pruning/resnet50.pb | 1 + .../2.12/pruning/filter_pruning/retinanet.pb | 1 + .../filter_pruning/sequential_model.pb | 1 + .../sequential_no_input_model.pb | 1 + .../filter_pruning/shared_layers_model.pb | 1 + .../2.12/pruning/filter_pruning/vgg16.pb | 1 + .../2.12/pruning/filter_pruning/yolo_v4.pb | 1 + .../2.12/quantized/hw/CPU/inception_v3.dot | 1 + .../2.12/quantized/hw/CPU/mobilenet_v2.pb | 1 + .../2.12/quantized/hw/CPU/resnet50.pb | 1 + .../2.12/quantized/hw/GPU/inception_v3.dot | 1 + .../2.12/quantized/hw/GPU/mobilenet_v2.pb | 1 + .../2.12/quantized/hw/GPU/resnet50.pb | 1 + .../2.12/quantized/hw/VPU/inception_v3.dot | 1 + .../2.12/quantized/hw/VPU/mobilenet_v2.pb | 1 + .../2.12/quantized/hw/VPU/resnet50.pb | 1 + .../w_sym_ch_a_asym_t/densenet121.pb | 1 + .../w_sym_ch_a_asym_t/inception_v3.dot | 1 + .../quantized/w_sym_ch_a_asym_t/mask_rcnn.dot | 1 + .../w_sym_ch_a_asym_t/mobilenet_v1.pb | 1 + .../w_sym_ch_a_asym_t/mobilenet_v2.pb | 1 + .../mobilenet_v2_quantize_outputs.pb | 1 + .../w_sym_ch_a_asym_t/mobilenet_v3_large.pb | 1 + .../w_sym_ch_a_asym_t/mobilenet_v3_small.pb | 1 + .../quantized/w_sym_ch_a_asym_t/resnet50.pb | 1 + .../quantized/w_sym_ch_a_asym_t/resnet50v2.pb | 1 + .../quantized/w_sym_ch_a_asym_t/retinanet.pb | 1 + .../retinanet_quantize_outputs.pb | 1 + .../w_sym_ch_a_asym_t/sequential_model.pb | 1 + .../sequential_model_quantize_outputs.pb | 1 + .../sequential_no_input_model.pb | 1 + .../w_sym_ch_a_asym_t/shared_layers_model.pb | 1 + .../shared_layers_model_quantize_outputs.pb | 1 + .../2.12/quantized/w_sym_ch_a_asym_t/vgg16.pb | 1 + .../quantized/w_sym_ch_a_asym_t/yolo_v4.pb | 1 + .../quantized/w_sym_t_a_sym_t/densenet121.pb | 1 + .../w_sym_t_a_sym_t/inception_v3.dot | 1 + .../quantized/w_sym_t_a_sym_t/mask_rcnn.dot | 1 + .../quantized/w_sym_t_a_sym_t/mobilenet_v1.pb | 1 + .../quantized/w_sym_t_a_sym_t/mobilenet_v2.pb | 1 + .../mobilenet_v2_quantize_outputs.pb | 1 + .../w_sym_t_a_sym_t/mobilenet_v3_large.pb | 1 + .../w_sym_t_a_sym_t/mobilenet_v3_small.pb | 1 + .../quantized/w_sym_t_a_sym_t/resnet50.pb | 1 + .../quantized/w_sym_t_a_sym_t/resnet50v2.pb | 1 + .../quantized/w_sym_t_a_sym_t/retinanet.pb | 1 + .../retinanet_quantize_outputs.pb | 1 + .../w_sym_t_a_sym_t/sequential_model.pb | 1 + .../sequential_model_quantize_outputs.pb | 1 + .../sequential_no_input_model.pb | 1 + .../w_sym_t_a_sym_t/shared_layers_model.pb | 1 + .../shared_layers_model_quantize_outputs.pb | 1 + .../2.12/quantized/w_sym_t_a_sym_t/vgg16.pb | 1 + .../2.12/quantized/w_sym_t_a_sym_t/yolo_v4.pb | 1 + .../magnitude_sparsity/densenet121.pb | 1 + .../magnitude_sparsity/inception_v3.dot | 1 + .../sparsity/magnitude_sparsity/mask_rcnn.dot | 1 + .../magnitude_sparsity/mobilenet_v1.pb | 1 + .../magnitude_sparsity/mobilenet_v2.pb | 1 + .../magnitude_sparsity/mobilenet_v2_slim.dot | 1 + .../magnitude_sparsity/mobilenet_v3_large.pb | 1 + .../magnitude_sparsity/mobilenet_v3_small.pb | 1 + .../sparsity/magnitude_sparsity/resnet50.pb | 1 + .../sparsity/magnitude_sparsity/resnet50v2.pb | 1 + .../sparsity/magnitude_sparsity/retinanet.pb | 1 + .../magnitude_sparsity/sequential_model.pb | 1 + .../sequential_no_input_model.pb | 1 + .../magnitude_sparsity/shared_layers_model.pb | 1 + .../2.12/sparsity/magnitude_sparsity/vgg16.pb | 1 + .../sparsity/magnitude_sparsity/yolo_v4.pb | 1 + .../2.12/sparsity/rb_sparsity/densenet121.dot | 1 + .../rb_sparsity/inception_resnet_v2.dot | 8447 +++++++++++ .../sparsity/rb_sparsity/inception_v3.dot | 1 + .../2.12/sparsity/rb_sparsity/mask_rcnn.dot | 1 + .../sparsity/rb_sparsity/mobilenet_v1.dot | 1 + .../sparsity/rb_sparsity/mobilenet_v2.dot | 1 + .../rb_sparsity/mobilenet_v3_large.dot | 1 + .../rb_sparsity/mobilenet_v3_small.dot | 1 + .../sparsity/rb_sparsity/nasnet_mobile.dot | 1 + .../2.12/sparsity/rb_sparsity/resnet50.dot | 1 + .../2.12/sparsity/rb_sparsity/resnet50v2.dot | 1 + .../2.12/sparsity/rb_sparsity/retinanet.dot | 1 + .../sparsity/rb_sparsity/sequential_model.dot | 1 + .../rb_sparsity/sequential_no_input_model.dot | 1 + .../rb_sparsity/shared_layers_model.dot | 1 + .../2.12/sparsity/rb_sparsity/vgg16.dot | 1 + .../2.12/sparsity/rb_sparsity/xception.dot | 1 + .../2.12/sparsity/rb_sparsity/yolo_v4.dot | 1 + .../2.4/quantized/hw/CPU/inception_v3.dot | 338 +- .../2.4/quantized/hw/GPU/inception_v3.dot | 338 +- .../2.4/quantized/hw/VPU/inception_v3.dot | 338 +- .../w_sym_ch_a_asym_t/densenet121.pb | 4 +- .../w_sym_ch_a_asym_t/inception_v3.dot | 338 +- .../quantized/w_sym_ch_a_asym_t/mask_rcnn.dot | 1916 +-- .../quantized/w_sym_ch_a_asym_t/retinanet.pb | 4 +- .../retinanet_quantize_outputs.pb | 4 +- .../quantized/w_sym_t_a_sym_t/densenet121.pb | 4 +- .../w_sym_t_a_sym_t/inception_v3.dot | 314 +- .../quantized/w_sym_t_a_sym_t/mask_rcnn.dot | 1740 +-- .../quantized/w_sym_t_a_sym_t/retinanet.pb | 4 +- .../retinanet_quantize_outputs.pb | 4 +- .../2.5/quantized/hw/CPU/inception_v3.dot | 338 +- .../2.5/quantized/hw/GPU/inception_v3.dot | 338 +- .../2.5/quantized/hw/VPU/inception_v3.dot | 338 +- .../w_sym_ch_a_asym_t/densenet121.pb | 4 +- .../w_sym_ch_a_asym_t/inception_v3.dot | 338 +- .../quantized/w_sym_ch_a_asym_t/mask_rcnn.dot | 1916 +-- .../quantized/w_sym_ch_a_asym_t/retinanet.pb | 4 +- .../retinanet_quantize_outputs.pb | 4 +- .../quantized/w_sym_t_a_sym_t/densenet121.pb | 4 +- .../w_sym_t_a_sym_t/inception_v3.dot | 314 +- .../quantized/w_sym_t_a_sym_t/mask_rcnn.dot | 1740 +-- .../quantized/w_sym_t_a_sym_t/retinanet.pb | 4 +- .../retinanet_quantize_outputs.pb | 4 +- .../quantized/w_sym_ch_a_asym_t/mask_rcnn.dot | 3430 ++--- .../quantized/w_sym_t_a_sym_t/mask_rcnn.dot | 3130 ++-- .../tensorflow/pruning/test_flops_pruning.py | 2 +- .../pruning/test_tensor_processor.py | 11 + .../quantization/test_ptq_params.py | 102 + .../quantization/test_transform_fn.py | 62 + .../quantization/test_unified_scales.py | 6 +- tests/tensorflow/requirements.txt | 3 +- tests/tensorflow/sota_checkpoints_eval.json | 6 +- tests/tensorflow/sparsity/rb/utils.py | 11 + tests/tensorflow/test_compressed_graph.py | 4 +- tests/tensorflow/test_ignored_scopes.py | 18 +- .../test_models/inception_resnet_v2.py | 22 +- tests/tensorflow/test_transformations.py | 4 +- .../test_accuracy_aware_config.py | 4 +- .../test_training_loop.py | 8 +- .../test_timeout_extension_loader.py | 45 + .../composite/test_sparsity_quantization.py | 11 + .../nas/efficient_net_b0_depth.dot | 414 +- .../nas/efficient_net_b0_kernel.dot | 1174 +- .../nas/efficient_net_b0_width.dot | 896 +- .../nas/squeezenet1_0_depth.dot | 221 +- .../nas/squeezenet1_0_kernel.dot | 290 +- .../nas/squeezenet1_0_width.dot | 257 +- .../quantized/asymmetric/lstm_cell.dot | 2 +- .../ptq/symmetric/embedding_model.dot | 15 + .../quantized/ptq/symmetric/resnet50.dot | 621 - .../ptq/symmetric/resnet50_cpu_spr.dot | 589 + .../quantized/symmetric/lstm_cell.dot | 2 +- .../quantized/synthetic_model/MHA.dot | 106 +- .../synthetic_model/MHA_single_input.dot | 128 +- .../MultiOutputSameTensorModel.dot | 23 +- .../OrdinaryModelWithRecurrentInName.dot | 13 + .../ShiftScale__multi_input_branch.dot | 23 + ...ftScale__normalize__multi_input_branch.dot | 27 + ...tScale__normalize__single_input_branch.dot | 19 + .../ShiftScale__single_input_branch.dot | 15 + .../quantized_rb_sparsity/lstm_cell.dot | 2 +- .../search_building_block/wave2vec_2.0.json | 48 +- .../test_replace_timm_custom_modules.py | 96 + .../test_search_building_blocks.py | 23 +- .../test_transformer_blocks.py | 23 +- tests/torch/extensions_build_checks.py | 11 + tests/torch/helpers.py | 4 +- tests/torch/modules/seq2seq/attention.py | 11 + tests/torch/modules/seq2seq/decoder.py | 11 + tests/torch/modules/seq2seq/encoder.py | 11 + tests/torch/modules/seq2seq/gnmt.py | 11 + tests/torch/modules/seq2seq/seq2seq_base.py | 11 + tests/torch/modules/test_rnn.py | 72 +- tests/torch/nas/creators.py | 8 +- tests/torch/nas/models/synthetic.py | 13 +- tests/torch/nas/test_elastic_depth.py | 8 +- tests/torch/nas/test_elastic_kernel.py | 16 + tests/torch/nas/test_flops.py | 2 +- tests/torch/nas/test_ps_controller.py | 81 +- tests/torch/nas/test_scheduler.py | 2 + tests/torch/nas/test_search.py | 2 +- tests/torch/nas/test_search_space.py | 4 +- tests/torch/nas/test_state.py | 2 +- .../experimental/test_nodes_grouping.py | 10 +- .../torch/pruning/filter_pruning/test_algo.py | 8 +- .../pruning/filter_pruning/test_layers.py | 2 +- tests/torch/pruning/helpers.py | 15 + .../pruning/test_model_pruning_analysis.py | 55 +- tests/torch/pruning/test_tensor_processor.py | 11 + tests/torch/ptq/helpers.py | 30 +- .../ptq/test_calculation_quantizer_params.py | 29 +- tests/torch/ptq/test_fast_bias_correction.py | 61 + tests/torch/ptq/test_fq_params_calculation.py | 5 +- tests/torch/ptq/test_graphs.py | 80 +- tests/torch/ptq/test_ptq_params.py | 121 +- tests/torch/ptq/test_quantizer_config.py | 9 +- tests/torch/ptq/test_strip.py | 86 + tests/torch/ptq/test_weights_compression.py | 72 + tests/torch/pytorch_patch_isolated.py | 49 +- .../quantization/extensions/isolated_cases.py | 23 +- .../extensions/test_extension_unavailable.py | 23 +- .../test_timeout_extension_loader.py | 45 + .../torch/quantization/test_adjust_padding.py | 23 +- .../quantization/test_algo_quantization.py | 61 +- .../quantization/test_hawq_precision_init.py | 10 +- .../quantization/test_logarithm_scale.py | 9 +- .../test_overflow_issue_export.py | 11 + .../torch/quantization/test_sanity_sample.py | 11 + .../quantization/test_serialize_to_json.py | 11 + .../test_solver_quantization_traits.py | 11 + tests/torch/quantization/test_strip.py | 38 +- tests/torch/quantization/test_tracing.py | 95 + .../torch/quantization/test_unified_scales.py | 22 +- tests/torch/requirements.txt | 13 +- tests/torch/run_examples_for_test_sota.py | 11 + .../torch/sparsity/movement/helpers/utils.py | 7 +- .../sparsity/movement/test_model_saving.py | 8 +- .../sparsity/movement/test_structured_mask.py | 4 +- .../torch/sparsity/movement/test_training.py | 4 +- .../test_training_with_third_party.py | 4 +- .../movement/training_scripts/run_glue.py | 7 +- tests/torch/test_algo_common.py | 29 +- tests/torch/test_api_behavior.py | 10 + tests/torch/test_backward_compat.py | 64 +- tests/torch/test_compressed_graph.py | 29 +- tests/torch/test_compression_training.py | 26 +- tests/torch/test_config_schema.py | 11 + tests/torch/test_context_independence.py | 11 + tests/torch/test_custom_modules.py | 11 + .../test_distributed_data_parallel_mode.py | 11 + tests/torch/test_extensions_build.py | 11 + tests/torch/test_frozen_layers.py | 11 + tests/torch/test_graph_analysis.py | 1 + tests/torch/test_graph_building.py | 25 +- tests/torch/test_input_management.py | 11 + tests/torch/test_layer_attributes.py | 20 +- tests/torch/test_model_transformer.py | 507 + tests/torch/test_models/__init__.py | 11 + tests/torch/test_models/swin.py | 8 +- tests/torch/test_models/synthetic.py | 41 + tests/torch/test_nncf_network.py | 498 +- tests/torch/test_onnx_export.py | 11 + tests/torch/test_pattern_manager.py | 16 +- tests/torch/test_pytorch_patch.py | 17 + tests/torch/test_sanity_sample.py | 6 +- tests/torch/test_sanity_third_party.py | 8 +- tests/torch/test_sota_checkpoints.py | 11 + tests/torch/test_statistics_aggregator.py | 15 +- tests/torch/test_tensor.py | 34 + tests/torch/test_tracing_context.py | 4 +- tests/torch/test_transform_fn.py | 66 + tests/torch/test_utils.py | 11 + .../huggingface_transformers/README.md | 20 +- tools/benchmark.py | 41 +- tools/benchmark_quantize_layers.py | 305 +- .../collect_pylint_input_files_for_backend.py | 59 + tools/update_eval_results.py | 2 +- 805 files changed, 97748 insertions(+), 39533 deletions(-) create mode 100644 .file-header create mode 100644 .git-blame-ignore-revs create mode 100644 .github/workflows/post_pr_merge.yml create mode 100644 .github/workflows/pre-commit-linters.yml create mode 100644 .github/workflows/precommit.yml create mode 100644 .isort.cfg create mode 100644 .markdownlint.yaml create mode 100644 .pre-commit-config.yaml create mode 100644 codecov.yml create mode 100644 docs/ModelZoo.md create mode 100644 docs/compression_algorithms/CompressWeights.md rename examples/post_training_quantization/openvino/{quantize_with_accuracy_control => anomaly_stfpm_quantize_with_accuracy_control}/README.md (84%) rename examples/post_training_quantization/openvino/{quantize_with_accuracy_control => anomaly_stfpm_quantize_with_accuracy_control}/main.py (79%) create mode 100644 examples/post_training_quantization/openvino/anomaly_stfpm_quantize_with_accuracy_control/requirements.txt delete mode 100644 examples/post_training_quantization/openvino/quantize_with_accuracy_control/requirements.txt create mode 100644 nncf/common/plotting.py create mode 100644 nncf/common/strip.py delete mode 100644 nncf/experimental/openvino/quantization/quantize_model.py create mode 100644 nncf/experimental/tensor/README.md create mode 100644 nncf/experimental/tensor/__init__.py create mode 100644 nncf/experimental/tensor/enums.py create mode 100644 nncf/experimental/tensor/functions.py create mode 100644 nncf/experimental/tensor/numpy_functions.py create mode 100644 nncf/experimental/tensor/tensor.py create mode 100644 nncf/experimental/tensor/torch_functions.py create mode 100644 nncf/experimental/torch/nas/bootstrapNAS/search/supernet.py rename nncf/experimental/{openvino => torch/replace_custom_modules}/__init__.py (100%) create mode 100644 nncf/experimental/torch/replace_custom_modules/timm_custom_modules.py create mode 100644 nncf/onnx/graph/model_utils.py create mode 100644 nncf/openvino/graph/layer_attributes.py create mode 100644 nncf/openvino/graph/model_utils.py create mode 100644 nncf/openvino/quantization/weights_compression.py create mode 100644 nncf/quantization/algorithms/accuracy_control/evaluator.py create mode 100644 nncf/quantization/algorithms/accuracy_control/subset_selection.py rename nncf/{experimental/openvino/quantization => quantization/algorithms/channel_alignment}/__init__.py (100%) create mode 100644 nncf/quantization/algorithms/channel_alignment/algorithm.py create mode 100644 nncf/quantization/algorithms/channel_alignment/backend.py create mode 100644 nncf/quantization/algorithms/channel_alignment/openvino_backend.py create mode 100644 nncf/quantization/algorithms/fast_bias_correction/torch_backend.py create mode 100644 nncf/quantization/algorithms/hyperparameter_tuner/__init__.py create mode 100644 nncf/quantization/algorithms/hyperparameter_tuner/algorithm.py create mode 100644 nncf/quantization/algorithms/hyperparameter_tuner/param_grid.py create mode 100644 nncf/quantization/algorithms/smooth_quant/__init__.py create mode 100644 nncf/quantization/algorithms/smooth_quant/algorithm.py create mode 100644 nncf/quantization/algorithms/smooth_quant/backend.py create mode 100644 nncf/quantization/algorithms/smooth_quant/openvino_backend.py create mode 100644 nncf/torch/extensions/include/dispatch.h create mode 100644 nncf/torch/graph/transformations/command_creation.py create mode 100644 nncf/torch/model_analyzer.py create mode 100644 nncf/torch/model_transformer.py create mode 100644 nncf/torch/quantization/weights_compression.py create mode 100644 nncf/torch/strip.py create mode 100644 tests/common/accuracy_control/test_calculate_drop.py create mode 100644 tests/common/accuracy_control/test_evaluator.py create mode 100644 tests/common/conftest.py create mode 100644 tests/common/graph/test_nncf_graph.py create mode 100644 tests/common/hyperparameter_tuner/__init__.py create mode 100644 tests/common/hyperparameter_tuner/test_hyperparameter_tuner.py create mode 100644 tests/common/requirements.txt create mode 100644 tests/common/test_framework_detection.py create mode 100644 tests/common/test_tensor.py create mode 100644 tests/cross_fw/examples/conftest.py create mode 100644 tests/cross_fw/examples/example_scope.json create mode 100644 tests/cross_fw/examples/requirements.txt create mode 100644 tests/cross_fw/examples/run_example.py create mode 100644 tests/cross_fw/examples/test_examples.py create mode 100644 tests/onnx/data/models/bertsquad-12.onnx create mode 100644 tests/onnx/data/models/gpt2-10.onnx create mode 100644 tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/activation_matmul_model.dot create mode 100644 tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/embedding_model.dot create mode 100644 tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/gemm_weight_transpose_model.dot create mode 100644 tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/unified_embedding_model.dot create mode 100644 tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/weight_matmul_model.dot create mode 100644 tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/weight_propagation_conv_model.dot create mode 100644 tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/weight_propagation_matmul_model.dot create mode 100644 tests/onnx/data/reference_graphs/quantization/bertsquad-12.dot create mode 100644 tests/onnx/data/reference_graphs/quantization/gpt2-10.dot create mode 100644 tests/onnx/data/reference_graphs/quantization/resnet50_cpu_spr.dot create mode 100644 tests/onnx/data/reference_graphs/quantization/synthetic/activation_matmul_model.dot create mode 100644 tests/onnx/data/reference_graphs/quantization/synthetic/embedding_model.dot create mode 100644 tests/onnx/data/reference_graphs/quantization/synthetic/gemm_weight_transpose_model.dot create mode 100644 tests/onnx/data/reference_graphs/quantization/synthetic/unified_embedding_model.dot create mode 100644 tests/onnx/data/reference_graphs/quantization/synthetic/weight_matmul_model.dot create mode 100644 tests/onnx/data/reference_graphs/quantization/synthetic/weight_propagation_conv_model.dot create mode 100644 tests/onnx/data/reference_graphs/quantization/synthetic/weight_propagation_matmul_model.dot create mode 100644 tests/onnx/data/reference_scales/activation_matmul_model_mixed.json create mode 100644 tests/onnx/data/reference_scales/activation_matmul_model_performance.json create mode 100644 tests/onnx/data/reference_scales/embedding_model_mixed.json create mode 100644 tests/onnx/data/reference_scales/embedding_model_performance.json create mode 100644 tests/onnx/data/reference_scales/gemm_weight_transpose_model_mixed.json create mode 100644 tests/onnx/data/reference_scales/gemm_weight_transpose_model_performance.json create mode 100644 tests/onnx/data/reference_scales/linear_model_mixed.json create mode 100644 tests/onnx/data/reference_scales/linear_model_performance.json create mode 100644 tests/onnx/data/reference_scales/one_depthwise_convolutional_model_mixed.json create mode 100644 tests/onnx/data/reference_scales/one_depthwise_convolutional_model_performance.json create mode 100644 tests/onnx/data/reference_scales/reshape_weight_model_mixed.json create mode 100644 tests/onnx/data/reference_scales/reshape_weight_model_performance.json create mode 100644 tests/onnx/data/reference_scales/weight_matmul_model_mixed.json create mode 100644 tests/onnx/data/reference_scales/weight_matmul_model_performance.json create mode 100644 tests/onnx/data/reference_scales/weight_sharing_model_mixed.json create mode 100644 tests/onnx/data/reference_scales/weight_sharing_model_performance.json create mode 100644 tests/onnx/quantization/test_bias_correction.py create mode 100644 tests/onnx/quantization/test_fast_bias_correction.py create mode 100644 tests/onnx/quantization/test_min_max.py create mode 100644 tests/onnx/quantization/test_transform_fn.py create mode 100644 tests/onnx/quantization/test_transformer_models_graph.py create mode 100644 tests/onnx/tools/save_model_without_tensors.py delete mode 100644 tests/openvino/data/ac_configs/mobilefacedet-v1-mxnet.yml create mode 100644 tests/openvino/native/data/reference_graphs/original_nncf_graph/UnifiedEmbeddingModel.dot create mode 100644 tests/openvino/native/data/reference_graphs/original_nncf_graph/exctracted_ConvModel.dot create mode 100644 tests/openvino/native/data/reference_graphs/original_nncf_graph/exctracted_QuantizedModel.dot create mode 100644 tests/openvino/native/data/reference_graphs/quantized/GRUSequenceModel_linear_before_reset_F.dot create mode 100644 tests/openvino/native/data/reference_graphs/quantized/GRUSequenceModel_linear_before_reset_T.dot create mode 100644 tests/openvino/native/data/reference_graphs/quantized/UnifiedEmbeddingModel.dot rename tests/openvino/native/data/reference_graphs/quantized/{mobilenet-v2-pytorch.dot => mobilenet-v2-pytorch_performance.dot} (55%) delete mode 100644 tests/openvino/native/data/reference_graphs/quantized/mobilenet-v3-small-1.0-224-tf.dot create mode 100644 tests/openvino/native/data/reference_graphs/quantized/mobilenet-v3-small-1.0-224-tf_performance.dot rename tests/openvino/native/data/reference_graphs/quantized/{resnet-18-pytorch.dot => resnet-18-pytorch_performance.dot} (53%) create mode 100644 tests/openvino/native/data/reference_graphs/quantized/resnet-50-pytorch_performance_CPU_SPR.dot rename tests/openvino/native/data/reference_graphs/quantized/{swin-tiny-patch4-window7-224.dot => swin-tiny-patch4-window7-224_performance_transformer.dot} (100%) create mode 100644 tests/openvino/native/data/reference_graphs/quantized/swin-tiny-patch4-window7-224_sq.dot delete mode 100644 tests/openvino/native/data/reference_graphs/quantized/yolo-v4-tiny-tf.dot create mode 100644 tests/openvino/native/data/reference_graphs/quantized/yolo-v4-tiny-tf_performance.dot create mode 100644 tests/openvino/native/data/reference_scales/IntegerModel_compressed_weights.json create mode 100644 tests/openvino/native/data/reference_scales/UnifiedEmbeddingModel_mixed.json create mode 100644 tests/openvino/native/data/reference_scales/UnifiedEmbeddingModel_performance.json create mode 100644 tests/openvino/native/quantization/test_channel_alignment.py create mode 100644 tests/openvino/native/quantization/test_weights_compression.py create mode 100644 tests/openvino/native/test_bias_correction.py create mode 100644 tests/openvino/native/test_fast_bias_correction.py create mode 100644 tests/openvino/native/test_smooth_quant.py create mode 100644 tests/openvino/test_transform_fn.py delete mode 100644 tests/post_training/model_scope.json delete mode 100644 tests/post_training/models.py create mode 100644 tests/post_training/pipelines/base.py create mode 100644 tests/post_training/pipelines/causal_language_model.py create mode 100644 tests/post_training/pipelines/image_classification_timm.py create mode 100644 tests/post_training/pipelines/masked_language_modeling.py create mode 100644 tests/post_training/reference_data.yaml create mode 100644 tests/post_training/test_templates/helpers.py create mode 100644 tests/post_training/test_templates/models.py create mode 100644 tests/post_training/test_templates/test_bias_correction.py rename tests/post_training/{ => test_templates}/test_calculate_quantizer_parameters.py (100%) create mode 100644 tests/post_training/test_templates/test_channel_alignment.py create mode 100644 tests/post_training/test_templates/test_fast_bias_correction.py rename tests/post_training/{ => test_templates}/test_ptq_params.py (73%) rename tests/post_training/{ => test_templates}/test_quantizer_config.py (73%) create mode 100644 tests/post_training/test_templates/test_smooth_quant.py create mode 100644 tests/shared/test_templates/__init__.py create mode 100644 tests/shared/test_templates/template_test_nncf_tensor.py create mode 120000 tests/tensorflow/data/model_transormer/2.12/functional_insert_after.dot create mode 120000 tests/tensorflow/data/model_transormer/2.12/functional_insert_before.dot create mode 120000 tests/tensorflow/data/model_transormer/2.12/sequential_block_insert_after.dot create mode 120000 tests/tensorflow/data/model_transormer/2.12/sequential_block_insert_before.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/densenet121.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/inception_v3.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/mobilenet_v1.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/mobilenet_v2.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/mobilenet_v3_large.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/mobilenet_v3_small.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/resnet50.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/retinanet.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/sequential_model.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/sequential_no_input_model.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/shared_layers_model.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/vgg16.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/yolo_v4.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/hw/CPU/inception_v3.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/hw/CPU/mobilenet_v2.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/hw/CPU/resnet50.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/hw/GPU/inception_v3.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/hw/GPU/mobilenet_v2.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/hw/GPU/resnet50.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/hw/VPU/inception_v3.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/hw/VPU/mobilenet_v2.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/hw/VPU/resnet50.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/densenet121.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/inception_v3.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mask_rcnn.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v1.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v2.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v2_quantize_outputs.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v3_large.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v3_small.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/resnet50.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/resnet50v2.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/retinanet.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/retinanet_quantize_outputs.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/sequential_model.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/sequential_model_quantize_outputs.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/sequential_no_input_model.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/shared_layers_model.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/shared_layers_model_quantize_outputs.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/vgg16.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/yolo_v4.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/densenet121.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/inception_v3.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mask_rcnn.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v1.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v2.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v2_quantize_outputs.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v3_large.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v3_small.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/resnet50.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/resnet50v2.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/retinanet.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/retinanet_quantize_outputs.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/sequential_model.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/sequential_model_quantize_outputs.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/sequential_no_input_model.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/shared_layers_model.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/shared_layers_model_quantize_outputs.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/vgg16.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/yolo_v4.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/densenet121.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/inception_v3.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mask_rcnn.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v1.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v2.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v2_slim.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v3_large.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v3_small.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/resnet50.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/resnet50v2.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/retinanet.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/sequential_model.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/sequential_no_input_model.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/shared_layers_model.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/vgg16.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/yolo_v4.pb create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/densenet121.dot create mode 100644 tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/inception_resnet_v2.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/inception_v3.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mask_rcnn.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mobilenet_v1.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mobilenet_v2.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mobilenet_v3_large.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mobilenet_v3_small.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/nasnet_mobile.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/resnet50.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/resnet50v2.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/retinanet.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/sequential_model.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/sequential_no_input_model.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/shared_layers_model.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/vgg16.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/xception.dot create mode 120000 tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/yolo_v4.dot create mode 100644 tests/tensorflow/quantization/test_ptq_params.py create mode 100644 tests/tensorflow/quantization/test_transform_fn.py create mode 100644 tests/torch/binarization/test_timeout_extension_loader.py create mode 100644 tests/torch/data/reference_graphs/quantized/ptq/symmetric/embedding_model.dot delete mode 100644 tests/torch/data/reference_graphs/quantized/ptq/symmetric/resnet50.dot create mode 100644 tests/torch/data/reference_graphs/quantized/ptq/symmetric/resnet50_cpu_spr.dot create mode 100644 tests/torch/data/reference_graphs/quantized/synthetic_model/OrdinaryModelWithRecurrentInName.dot create mode 100644 tests/torch/data/reference_graphs/quantized/synthetic_model/ShiftScale__multi_input_branch.dot create mode 100644 tests/torch/data/reference_graphs/quantized/synthetic_model/ShiftScale__normalize__multi_input_branch.dot create mode 100644 tests/torch/data/reference_graphs/quantized/synthetic_model/ShiftScale__normalize__single_input_branch.dot create mode 100644 tests/torch/data/reference_graphs/quantized/synthetic_model/ShiftScale__single_input_branch.dot create mode 100644 tests/torch/experimental/replace_custom_modules/test_replace_timm_custom_modules.py create mode 100644 tests/torch/ptq/test_fast_bias_correction.py create mode 100644 tests/torch/ptq/test_strip.py create mode 100644 tests/torch/ptq/test_weights_compression.py create mode 100644 tests/torch/quantization/extensions/test_timeout_extension_loader.py create mode 100644 tests/torch/quantization/test_tracing.py create mode 100644 tests/torch/test_model_transformer.py create mode 100644 tests/torch/test_tensor.py create mode 100644 tests/torch/test_transform_fn.py create mode 100755 tools/collect_pylint_input_files_for_backend.py diff --git a/.file-header b/.file-header new file mode 100644 index 00000000000..83eef0801ee --- /dev/null +++ b/.file-header @@ -0,0 +1,10 @@ +# Copyright \(c\) (\d{4}|\d{4}-\d{4}) Intel Corporation +# Licensed under the Apache License, Version 2.0 \(the "License"\); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 00000000000..45ea0b28c00 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,5 @@ +# pre-commit autoformatting (isort, black) +de18dbe23246135d3604a8116c3484e98a8ed0cc + +# license formatting +34b6b82a746ddac6668c64f012edfe97de090f06 diff --git a/.github/action_configs/labeler.yml b/.github/action_configs/labeler.yml index 15048bd2bc5..1523e5fa038 100644 --- a/.github/action_configs/labeler.yml +++ b/.github/action_configs/labeler.yml @@ -5,37 +5,45 @@ dependencies: - '**/setup.py' NNCF PT: - - 'examples/torch/**/*' - - 'examples/post_training_quantization/torch/**/*' - - 'nncf/torch/**/*' - - 'tests/torch/**/*' + - 'examples/torch/**/*!(.md)' + - 'examples/post_training_quantization/torch/**/*!(.md)' + - 'nncf/torch/**/*!(.md)' + - 'tests/torch/**/*!(.md)' + - 'nncf/quantization/**/torch_backend.py' NNCF TF: - - 'examples/tensorflow/**/*' - - 'examples/post_training_quantization/tensorflow/**/*' - - 'nncf/tensorflow/**/*' - - 'tests/tensorflow/**/*' + - 'examples/tensorflow/**/*!(.md)' + - 'examples/post_training_quantization/tensorflow/**/*!(.md)' + - 'nncf/tensorflow/**/*!(.md)' + - 'tests/tensorflow/**/*!(.md)' + - 'nncf/quantization/**/tf_backend.py' NNCF ONNX: - - 'examples/onnx/**/*' - - 'examples/post_training_quantization/onnx/**/*' - - 'nncf/onnx/**/*' - - 'tests/onnx/**/*' + - 'examples/onnx/**/*!(.md)' + - 'examples/post_training_quantization/onnx/**/*!(.md)' + - 'nncf/onnx/**/*!(.md)' + - 'tests/onnx/**/*!(.md)' + - 'nncf/quantization/**/onnx_backend.py' NNCF OpenVINO: - - 'examples/openvino/**/*' - - 'examples/post_training_quantization/openvino/**/*' - - 'nncf/openvino/**/*' - - 'tests/openvino/**/*' + - 'examples/openvino/**/*!(.md)' + - 'examples/post_training_quantization/openvino/**/*!(.md)' + - 'nncf/openvino/**/*!(.md)' + - 'tests/openvino/**/*!(.md)' + - 'nncf/quantization/**/openvino_backend.py' + +NNCF PTQ: + - 'nncf/quantization/**/*!(.md)' + - 'tests/post_training/**/*!(.md)' documentation: - - '**/README.md' + - '**/*.md' - 'docs/**/*' experimental: - - 'nncf/experimental/**/*' + - 'nncf/experimental/**/*!(.md)' NNCF Common: - - 'examples/common/**/*' - - 'nncf/common/**/*' - - 'tests/common/**/*' + - 'examples/common/**/*!(.md)' + - 'nncf/common/**/*!(.md)' + - 'tests/common/**/*!(.md)' diff --git a/.github/workflows/api_changes_check.yml b/.github/workflows/api_changes_check.yml index 6082c2fec3c..973e65270fa 100644 --- a/.github/workflows/api_changes_check.yml +++ b/.github/workflows/api_changes_check.yml @@ -18,7 +18,7 @@ jobs: ref: "refs/pull/${{ github.event.number }}/merge" compare-api-doc-with-develop: needs: call-build-api-doc - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 permissions: issues: write steps: @@ -40,7 +40,7 @@ jobs: CHANGED_FILES=$(echo $CHANGED_FILES | tr '\n' ' ') echo "changed_files=${CHANGED_FILES}" >> $GITHUB_OUTPUT - uses: actions/github-script@v6 - if: ${{ !contains(steps.diff.outputs.changed_files, 'differ') }} + if: ${{ !(contains(steps.diff.outputs.changed_files, 'differ')) && contains(github.event.pull_request.labels.*.name, 'API') }} with: github-token: ${{ secrets.ADD_LABELS_WITH_REST_API }} script: | diff --git a/.github/workflows/build_and_publish_doc.yml b/.github/workflows/build_and_publish_doc.yml index 6bc7f115421..622ecac5e40 100644 --- a/.github/workflows/build_and_publish_doc.yml +++ b/.github/workflows/build_and_publish_doc.yml @@ -22,7 +22,7 @@ jobs: uses: ./.github/workflows/build_schema_page.yml publish: needs: [call-build-html-doc, call-build-schema-page] - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 steps: - name: Checkout main repo # the github-pages-deploy-action seems to require this step uses: actions/checkout@v3 diff --git a/.github/workflows/build_html_doc.yml b/.github/workflows/build_html_doc.yml index b632c583813..33948efd392 100644 --- a/.github/workflows/build_html_doc.yml +++ b/.github/workflows/build_html_doc.yml @@ -7,7 +7,7 @@ on: type: string jobs: build-html: - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 steps: - name: Checkout uses: actions/checkout@v3 @@ -15,7 +15,7 @@ jobs: ref: ${{ inputs.ref }} - name: Install NNCF and doc requirements run: | - pip install -e . + pip install . pip install -r docs/api/requirements.txt - name: Build API docs run: | diff --git a/.github/workflows/build_schema_page.yml b/.github/workflows/build_schema_page.yml index c14b697e642..b6f5f9061dc 100644 --- a/.github/workflows/build_schema_page.yml +++ b/.github/workflows/build_schema_page.yml @@ -3,7 +3,7 @@ on: workflow_call: jobs: build-config-schema-html: - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 steps: - name: Checkout uses: actions/checkout@v3 @@ -11,7 +11,7 @@ jobs: - name: Install and Build run: | pip install json-schema-for-humans - pip install -e . + pip install . python -c 'import jstyleson; from nncf.config import NNCFConfig; jstyleson.dump(NNCFConfig.schema(), open("./schema.json", "w"), indent=2)' mkdir schema generate-schema-doc --deprecated-from-description schema.json schema/index.html diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml index c3eb8808fbd..d7e77d5e964 100644 --- a/.github/workflows/labeler.yml +++ b/.github/workflows/labeler.yml @@ -6,7 +6,7 @@ jobs: permissions: contents: read pull-requests: write - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 steps: - uses: actions/labeler@v4 with: diff --git a/.github/workflows/post_pr_merge.yml b/.github/workflows/post_pr_merge.yml new file mode 100644 index 00000000000..0f12ef08a0e --- /dev/null +++ b/.github/workflows/post_pr_merge.yml @@ -0,0 +1,40 @@ +# With the squash-and-merge strategy that we are employing, the final commit that ends up in the develop branch +# after the PR merge has a brand-new commit SHA that is not equal to the SHA of the last commit in the PR. +# This means that the coverage reports uploaded to Codecov in the course of the (yet unmerged) PR checks will not +# be translated to the ultimate commit in the develop and Codecov will show "missing base report" errors in the UI. +# We don't want to re-run the precommits after PR merge via a separate 'on: push' action on the develop branch, so +# instead will grab the latest coverage report artifact from the just-merged PR and upload it as the report for the +# new commit on develop. Note that this will break if the PR is merged before the coverage artifact for the latest +# PR commit is generated. + +name: Post-PR merge actions + +on: + pull_request_target: + branches: + - develop + types: + - closed + +jobs: + upload-coverage: + if: github.event.pull_request.merged == true + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v3 # codecov uploader demands that the scanned files be present when uploading + with: + ref: ${{ github.event.pull_request.merge_commit_sha }} + - uses: dawidd6/action-download-artifact@v2 + with: + workflow: precommit.yml + check_artifacts: true + commit: ${{ github.event.pull_request.head.sha }} # this is the latest commit in the PR + name: coverage_xml + - name: Upload coverage report to Codecov + run: | + curl -Os https://uploader.codecov.io/latest/linux/codecov + chmod +x codecov + + # github.event.pull_request.merge_commit_sha is the fresh commit in the develop, + # provided that github.event.pull_request.merged == true + ./codecov -f ./coverage.xml -t ${{ secrets.CODECOV_TOKEN }} -C ${{ github.event.pull_request.merge_commit_sha }} -B develop -n "codecov-onnx" \ No newline at end of file diff --git a/.github/workflows/pre-commit-linters.yml b/.github/workflows/pre-commit-linters.yml new file mode 100644 index 00000000000..078c5f0fa6e --- /dev/null +++ b/.github/workflows/pre-commit-linters.yml @@ -0,0 +1,21 @@ +name: pre-commit-linters + +on: + pull_request: + types: + - opened + - reopened + - synchronize + +jobs: + pre-commit: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v3 + with: + python-version: 3.8.10 + - name: Install pre-commit package + run: make install-pre-commit + - name: Run pre-commit linter suite + run: make pre-commit diff --git a/.github/workflows/precommit.yml b/.github/workflows/precommit.yml new file mode 100644 index 00000000000..76d6dc97b1b --- /dev/null +++ b/.github/workflows/precommit.yml @@ -0,0 +1,33 @@ +name: precommit + +on: + pull_request: + types: + - opened + - reopened + - synchronize + +jobs: + onnx: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v3 + with: + lfs: true + - uses: actions/setup-python@v3 + with: + python-version: 3.8.10 + - name: Install NNCF and test requirements + run: make install-onnx-test + - name: Run ONNX precommit test scope + run: make test-onnx + - name: Upload coverage report as artifact + uses: actions/upload-artifact@v3 + with: + name: coverage_xml # optional + path: ./coverage.xml + - name: Upload coverage report to codecov + uses: codecov/codecov-action@v3 + with: + token: ${{ secrets.CODECOV_TOKEN }} + name: codecov-onnx # optional diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index c93772805eb..c5aa6b6594e 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -22,7 +22,7 @@ on: jobs: deploy: - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v2 diff --git a/.gitignore b/.gitignore index 6581fdafff1..4316ef00d3d 100644 --- a/.gitignore +++ b/.gitignore @@ -120,9 +120,18 @@ examples/post_training_quantization/onnx/mobilenet_v2/mobilenet_v2_* examples/post_training_quantization/openvino/mobilenet_v2/mobilenet_v2_* examples/post_training_quantization/tensorflow/mobilenet_v2/mobilenet_v2_* examples/post_training_quantization/torch/mobilenet_v2/mobilenet_v2_* +examples/post_training_quantization/torch/ssd300_vgg16/ssd300_vgg16_* +examples/post_training_quantization/openvino/anomaly_stfpm_quantize_with_accuracy_control/stfpm_* +examples/post_training_quantization/openvino/yolov8/yolov8n* +examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/yolov8n* examples/**/runs/** examples/**/results/** +compressed_graph.dot +original_graph.dot +datasets/** # Tests tests/**/runs/** +tests/**/tmp*/** open_model_zoo/ +nncf-tests.xml diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 00000000000..f9b4e5d5a32 --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,6 @@ +[settings] +line_length = 120 +force_single_line = true +profile = black +single_line_exclusions = typing +skip_glob=examples/post_training_quantization/torch/ssd300_vgg16/main.py diff --git a/.markdownlint.yaml b/.markdownlint.yaml new file mode 100644 index 00000000000..3a792ec1aa7 --- /dev/null +++ b/.markdownlint.yaml @@ -0,0 +1,9 @@ +# Default state for all rules +default: true + +MD013: false # Line length +MD033: false # Inline HTML +MD034: false # Bare URL used +MD036: false # Emphasis used instead of a heading +MD037: false # Spaces inside emphasis markers +MD041: false # First line diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000000..8b3e4e56f63 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,22 @@ +default_language_version: + python: python3 + +repos: + - repo: https://github.com/psf/black + rev: 23.3.0 + hooks: + - id: black + files: '^.*\.py' + args: ["--line-length", "120"] + + - repo: https://github.com/pycqa/isort + rev: 5.12.0 + hooks: + - id: isort + name: isort (python) + + - repo: https://github.com/igorshubovych/markdownlint-cli + rev: v0.33.0 + hooks: + - id: markdownlint + args: [--config=.markdownlint.yaml] diff --git a/.pylintrc b/.pylintrc index e9f281a2ae5..03c773c92dd 100644 --- a/.pylintrc +++ b/.pylintrc @@ -26,9 +26,12 @@ disable = arguments-differ, max-line-length = 120 ignore-docstrings = yes -ignored-modules = numpy,torch,cv2,openvino,tensorflow +ignored-modules = numpy,torch,cv2,openvino,tensorflow,optimum,memory_profiler extension-pkg-whitelist = torch,cv2 init-hook='import sys; sys.setrecursionlimit(8 * sys.getrecursionlimit())' # to avoid https://stackoverflow.com/questions/36496192/pylint-infinite-recursion-in-astriod-package +load-plugins=pylintfileheader +file-header-path=.file-header +file-header-ignore-empty-files=yes [SIMILARITIES] ignore-imports = yes diff --git a/CODEOWNERS b/CODEOWNERS index af9beddc82a..53bae7e75c6 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1,7 +1,3 @@ -* @openvinotoolkit/nncf_pytorch-maintainers +* @openvinotoolkit/nncf-maintainers -CODEOWNERS @openvinotoolkit/openvino-admins - -# Control 3d party dependencies -requirements.txt @openvino-configuration-mgmt -**/setup.py @openvino-configuration-mgmt +CODEOWNERS @openvinotoolkit/nncf-admins diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b37d11a8b4a..9382d0ba0b3 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,24 +1,18 @@ # Contributing to NNCF Contributions are accepted in the form of: + * Submitting issues against the current code to report bugs or request features * Extending NNCF functionality with important features (e.g. to address community requests, improve usability, implement a recently published compression algorithm, etc.) * Adding example scripts to showcase NNCF usage in real training pipelines and provide the means to reproduce the reported compression results * Providing recipes (specific NNCF configurations and training hyperparameters) to obtain state-of-the-art compression using NNCF for existing models * Adding well-defined patches that integrate NNCF into third-party repositories -* Reducing performance overhead of NNCF compression by writing specialized CUDA kernels for compression operations or improving existing ones. +* Reducing performance overhead of NNCF compression by writing specialized CUDA kernels for compression operations or improving existing ones. The latter forms are accepted as pull requests from your own forks of the NNCF repository. Any contributions must not violate the repository's [LICENSE](./LICENSE) requirements. -## Installation -### (Experimental) ONNXRuntime-OpenVINO -Install the package and its dependencies by running the following in the repository root directory: -```bash -make install-onnx-dev -``` - ## Testing After your pull request is submitted, the maintainer will launch a scope of CI tests against it. @@ -28,42 +22,30 @@ The pre-commit scope may be run locally by executing the `pytest` command (witho Please run the pre-commit testing scope locally before submitting your PR and ensure that it passes to conserve your own time and that of the reviewing maintainer. New feature pull requests should include all the necessary testing code. -Testing is done using the `pytest` framework. +Testing is done using the `pytest` framework. The test files should be located inside the [tests](./tests) directory and start with `test_` so that the `pytest` is able to discover them. Any additional data that is required for tests (configuration files, mock datasets, etc.) must be stored within the [tests/data](./tests/data) folder. The test files themselves may be grouped in arbitrary directories according to their testing purpose and common sense. -Any additional tests in the [tests](./tests) directory will be automatically added into the pre-commit CI scope. +Any additional tests in the [tests](./tests) directory will be automatically added into the pre-commit CI scope. If your testing code is more extensive than unit tests (in terms of test execution time), or would be more suited to be executed on a nightly/weekly basis instead of for each future commit, please inform the maintainers in your PR discussion thread so that our internal testing pipelines could be adjusted accordingly. -### Preset command for testing -You can launch appropriate tests against the framework by running the following command: - -- (Experimental) ONNXRuntime-OpenVINO -```bash -test-onnx -``` - ## Code style + Changes to NNCF Python code should conform to [Python Style Guide](./docs/styleguide/PyGuide.md) -Pylint is used throughout the project to ensure code cleanliness and quality. +Pylint is used throughout the project to ensure code cleanliness and quality. A Pylint run is also done as part of the pre-commit scope - the pre-commit `pytest` scope will not be run if your code fails the Pylint checks. The Pylint rules and exceptions for this repository are described in the standard [.pylintrc](./.pylintrc) format - make sure your local linter uses these. -### Preset command for linting -You can launch appropriate linting against the framework by running the following command: - -- (Experimental) ONNXRuntime-OpenVINO -```bash -pylint-onnx -``` - ## Binary files -Please refrain from adding huge binary files into the repository. If binary files have to be added, mark these to use Git LFS via the [.gitattributes](./.gitattributes) file. + +Please refrain from adding huge binary files into the repository. If binary files have to be added, mark these to use Git LFS via the [.gitattributes](./.gitattributes) file. ## Model identifiers + When adding model configs and checkpoints to be showcased in NNCF's sample script, follow the format for naming these files: + 1. The base name must be the same for the NNCF config file, AC config file, checkpoint file (PT/ONNX/OV) or checkpoint folder (TF), and other associated artifacts. 2. This name should be composed with the following format: `{model_name}_{dataset_name}` for FP32 models, `{topology_name}_{dataset_name}_{compression_algorithms_applied}`. The format may be extended if there are multiple models with the same topology, dataset and compression algos applied, which only differ in something else such as exact value of achieved sparsity. Align the naming of the new checkpoints with the existing ones. -3. Additional human-readable information on the model such as expected metrics and compression algorithm specifics (e.g. level of pruning/sparsity, per-tensor/per-channel quantizer configuration etc.) should be stored in a registry file (`tests/torch/sota_checkpoints_eval.json` for PT, `tests/tensorflow/sota_checkpoints_eval.json` for TF) \ No newline at end of file +3. Additional human-readable information on the model such as expected metrics and compression algorithm specifics (e.g. level of pruning/sparsity, per-tensor/per-channel quantizer configuration etc.) should be stored in a registry file (`tests/torch/sota_checkpoints_eval.json` for PT, `tests/tensorflow/sota_checkpoints_eval.json` for TF) diff --git a/Makefile b/Makefile index d6b4df85403..f2cc30f60c8 100644 --- a/Makefile +++ b/Makefile @@ -1,10 +1,17 @@ -PYLINT_VERSION := 2.13.9 JUNITXML_PATH ?= nncf-tests.xml +COVERAGE ?= --cov=./ --cov-report=xml ifdef DATA DATA_ARG := --data $(DATA) endif +install-pre-commit: + pip install pre-commit==3.2.2 + +install-pylint: + pip install pylint==2.13.9 + pip install pylintfileheader==0.3.2 + ############################################################################### # ONNX backend install-onnx-test: @@ -12,25 +19,26 @@ install-onnx-test: pip install -e .[onnx] pip install -r tests/onnx/requirements.txt pip install -r tests/cross_fw/install/requirements.txt + pip install -r tests/cross_fw/examples/requirements.txt pip install -r tests/onnx/benchmarking/requirements.txt - pip install -r examples/post_training_quantization/onnx/mobilenet_v2/requirements.txt -install-onnx-dev: install-onnx-test - pip install pylint==$(PYLINT_VERSION) +install-onnx-dev: install-onnx-test install-pre-commit install-pylint + pip install -r examples/post_training_quantization/onnx/mobilenet_v2/requirements.txt test-onnx: - pytest tests/onnx $(DATA_ARG) --junitxml ${JUNITXML_PATH} + pytest ${COVERAGE} tests/onnx $(DATA_ARG) --junitxml ${JUNITXML_PATH} -ONNX_PYFILES := $(shell find examples/post_training_quantization/onnx -type f -name "*.py") pylint-onnx: pylint --rcfile .pylintrc \ - nncf/onnx \ - nncf/quantization \ - tests/onnx \ - $(ONNX_PYFILES) + $(shell python3 tools/collect_pylint_input_files_for_backend.py onnx) test-install-onnx: - pytest tests/cross_fw/install/ -s \ + pytest tests/cross_fw/install -s \ + --backend onnx \ + --junitxml ${JUNITXML_PATH} + +test-examples-onnx: + pytest tests/cross_fw/examples -s \ --backend onnx \ --junitxml ${JUNITXML_PATH} @@ -41,28 +49,34 @@ install-openvino-test: pip install -e .[openvino] pip install -r tests/openvino/requirements.txt pip install -r tests/cross_fw/install/requirements.txt + pip install -r tests/cross_fw/examples/requirements.txt + +install-openvino-dev: install-openvino-test install-pre-commit install-pylint pip install -r examples/experimental/openvino/bert/requirements.txt pip install -r examples/experimental/openvino/yolo_v5/requirements.txt - pip install git+https://github.com/openvinotoolkit/open_model_zoo.git@dcbf53280a95dae3c6538689bafe760470f08ec2#subdirectory=tools/model_tools - -install-openvino-dev: install-openvino-test - pip install pylint==$(PYLINT_VERSION) + pip install -r examples/post_training_quantization/openvino/mobilenet_v2/requirements.txt + pip install -r examples/post_training_quantization/openvino/anomaly_stfpm_quantize_with_accuracy_control/requirements.txt + pip install -r examples/post_training_quantization/openvino/yolov8/requirements.txt + pip install -r examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/requirements.txt test-openvino: + # omitting ${COVERAGE} for internal runs since they seem to introduce a major slowdown pytest tests/openvino $(DATA_ARG) --junitxml ${JUNITXML_PATH} pylint-openvino: pylint --rcfile .pylintrc \ - nncf/openvino/ \ - nncf/experimental/openvino/ \ - tests/openvino/ \ - examples/experimental/openvino/ + $(shell python3 tools/collect_pylint_input_files_for_backend.py openvino) test-install-openvino: pytest tests/cross_fw/install -s \ --backend openvino \ --junitxml ${JUNITXML_PATH} +test-examples-openvino: + pytest tests/cross_fw/examples -s \ + --backend openvino \ + --junitxml ${JUNITXML_PATH} + ############################################################################### # TensorFlow backend install-tensorflow-test: @@ -70,26 +84,27 @@ install-tensorflow-test: pip install -e .[tf] pip install -r tests/tensorflow/requirements.txt pip install -r tests/cross_fw/install/requirements.txt + pip install -r tests/cross_fw/examples/requirements.txt pip install -r examples/tensorflow/requirements.txt -install-tensorflow-dev: install-tensorflow-test - pip install pylint==$(PYLINT_VERSION) +install-tensorflow-dev: install-tensorflow-test install-pre-commit install-pylint + pip install -r examples/post_training_quantization/tensorflow/mobilenet_v2/requirements.txt test-tensorflow: + # omitting ${COVERAGE} for internal runs since they seem to introduce a major slowdown pytest tests/common tests/tensorflow \ --junitxml ${JUNITXML_PATH} \ $(DATA_ARG) pylint-tensorflow: pylint --rcfile .pylintrc \ - nncf/tensorflow \ - nncf/experimental/tensorflow \ - tests/tensorflow \ - tests/experimental/tensorflow \ - examples/tensorflow + $(shell python3 tools/collect_pylint_input_files_for_backend.py tensorflow) test-install-tensorflow: - pytest tests/cross_fw/install/ -s --backend tf --junitxml ${JUNITXML_PATH} + pytest tests/cross_fw/install -s --backend tf --junitxml ${JUNITXML_PATH} + +test-examples-tensorflow: + pytest tests/cross_fw/examples -s --backend tf --junitxml ${JUNITXML_PATH} ############################################################################### # PyTorch backend @@ -98,28 +113,25 @@ install-torch-test: pip install -e .[torch] pip install -r tests/torch/requirements.txt pip install -r tests/cross_fw/install/requirements.txt + pip install -r tests/cross_fw/examples/requirements.txt pip install -r examples/torch/requirements.txt -install-torch-dev: install-torch-test - pip install pylint==$(PYLINT_VERSION) +install-torch-dev: install-torch-test install-pre-commit install-pylint + pip install -r examples/post_training_quantization/torch/mobilenet_v2/requirements.txt + pip install -r examples/post_training_quantization/torch/ssd300_vgg16/requirements.txt test-torch: + # omitting ${COVERAGE} for internal runs since they seem to introduce a major slowdown pytest tests/common tests/torch --junitxml ${JUNITXML_PATH} $(DATA_ARG) +COMMON_PYFILES := $(shell python3 tools/collect_pylint_input_files_for_backend.py common) pylint-torch: - pylint --rcfile .pylintrc \ - nncf/common \ - nncf/config \ - nncf/api \ - nncf/torch \ - nncf/experimental/torch \ - tests/common \ - tests/torch \ - examples/torch \ - examples/experimental/torch + pylint --rcfile .pylintrc \ + $(COMMON_PYFILES) \ + $(shell python3 tools/collect_pylint_input_files_for_backend.py torch) test-install-torch-cpu: - pytest tests/cross_fw/install/ -s \ + pytest tests/cross_fw/install -s \ --backend torch \ --host-configuration cpu \ --junitxml ${JUNITXML_PATH} @@ -128,3 +140,32 @@ test-install-torch-gpu: pytest tests/cross_fw/install -s \ --backend torch \ --junitxml ${JUNITXML_PATH} + +test-examples-torch: + pytest tests/cross_fw/examples -s \ + --backend torch \ + --junitxml ${JUNITXML_PATH} + +############################################################################### +# Common part +install-common-test: + pip install -U pip + pip install -e . + pip install -r tests/common/requirements.txt + pip install -r tests/cross_fw/install/requirements.txt + pip install -r tests/cross_fw/examples/requirements.txt + +pylint-common: + pylint --rcfile .pylintrc \ + $(COMMON_PYFILES) + +test-common: + pytest ${COVERAGE} tests/common $(DATA_ARG) --junitxml ${JUNITXML_PATH} + +test-examples: + pytest tests/cross_fw/examples -s --junitxml ${JUNITXML_PATH} + +############################################################################### +# Pre commit check +pre-commit: + pre-commit run -a diff --git a/README.md b/README.md index 0434aeccd04..f2d2b6c8923 100644 --- a/README.md +++ b/README.md @@ -3,38 +3,38 @@ # Neural Network Compression Framework (NNCF) [Key Features](#key-features) • -[Installation](#Installation-guide) • +[Installation](#installation-guide) • [Documentation](#documentation) • [Usage](#usage) • -[Tutorials and Samples](#Model-compression-tutorials-and-samples) • -[Third-party integration](#Third-party-repository-integration) • -[Model Zoo](#NNCF-Compressed-Model-Zoo) - +[Tutorials and Samples](#model-compression-tutorials-and-samples) • +[Third-party integration](#third-party-repository-integration) • +[Model Zoo](./docs/ModelZoo.md) + [![GitHub Release](https://img.shields.io/github/v/release/openvinotoolkit/nncf?color=green)](https://github.com/openvinotoolkit/nncf/releases) [![Website](https://img.shields.io/website?up_color=blue&up_message=docs&url=https%3A%2F%2Fdocs.openvino.ai%2Flatest%2Fopenvino_docs_model_optimization_guide.html)](https://docs.openvino.ai/latest/openvino_docs_model_optimization_guide.html) [![Apache License Version 2.0](https://img.shields.io/badge/license-Apache_2.0-green.svg)](LICENSE) [![PyPI Downloads](https://static.pepy.tech/badge/nncf)](https://pypi.org/project/nncf/) - + Neural Network Compression Framework (NNCF) provides a suite of post-training and training-time algorithms for neural networks inference optimization in [OpenVINO™](https://docs.openvino.ai) with minimal accuracy drop. NNCF is designed to work with models from [PyTorch](https://pytorch.org/), [TensorFlow](https://www.tensorflow.org/), [ONNX](https://onnx.ai/) and [OpenVINO™](https://docs.openvino.ai/latest/home.html). -NNCF provides [samples](#Model-Compression-Samples) that demonstrate the usage of compression algorithms for different use cases and models. -[Compression results](#nncf-compressed-model-zoo) achievable with the NNCF-powered samples can be found in a table at -the end of this document. +NNCF provides [samples](#model-compression-tutorials-and-samples) that demonstrate the usage of compression algorithms for different use cases and models. See compression results achievable with the NNCF-powered samples at [Model Zoo page](./docs/ModelZoo.md). -The framework is organized as a Python\* package that can be built and used in a standalone mode. The framework -architecture is unified to make it easy to add different compression algorithms for both PyTorch and TensorFlow deep +The framework is organized as a Python\* package that can be built and used in a standalone mode. The framework +architecture is unified to make it easy to add different compression algorithms for both PyTorch and TensorFlow deep learning frameworks. ## Key Features + ### Post-Training Compression Algorithms | Compression algorithm |OpenVINO|PyTorch| TensorFlow | ONNX | |:----------------------------------------------------------------------------| :---: | :---: |:--------:|:------------------:| | [Post-Training Quantization](./docs/compression_algorithms/post_training/Quantization.md) | Supported | Supported |Supported| Supported | +| [Weights Compression](./docs/compression_algorithms/CompressWeights.md) | Supported | Supported |Not supported| Not supported | ### Training-Time Compression Algorithms @@ -53,14 +53,14 @@ learning frameworks. - GPU-accelerated layers for faster compressed model fine-tuning. - Distributed training support. - Git patch for prominent third-party repository ([huggingface-transformers](https://github.com/huggingface/transformers)) demonstrating the process of integrating NNCF into custom training pipelines -- Seamless combination of pruning, sparsity and quantization algorithms. Please refer to [optimum-intel](https://github.com/huggingface/optimum-intel/tree/main/examples/openvino) for examples of +- Seamless combination of pruning, sparsity and quantization algorithms. Please refer to [optimum-intel](https://github.com/huggingface/optimum-intel/tree/main/examples/openvino) for examples of joint (movement) pruning, quantization and distillation (JPQD), end-to-end from NNCF optimization to compressed OpenVINO IR. - Exporting PyTorch compressed models to ONNX\* checkpoints and TensorFlow compressed models to SavedModel or Frozen Graph format, ready to use with [OpenVINO™ toolkit](https://docs.openvino.ai). - Support for [Accuracy-Aware model training](./docs/Usage.md#accuracy-aware-model-training) pipelines via the [Adaptive Compression Level Training](./docs/accuracy_aware_model_training/AdaptiveCompressionLevelTraining.md) and [Early Exit Training](./docs/accuracy_aware_model_training/EarlyExitTraining.md). ## Documentation -This documentation covers detailed information about NNCF algorithms and functions needed for the contribution to NNCF. +This documentation covers detailed information about NNCF algorithms and functions needed for the contribution to NNCF. The latest user documentation for NNCF is available [here](https://docs.openvino.ai/latest/openvino_docs_model_optimization_guide.html). @@ -80,12 +80,13 @@ The NNCF PTQ is the simplest way to apply 8-bit quantization. To run the algorit import nncf import openvino.runtime as ov import torch -from torchvision import datasets +from torchvision import datasets, transforms # Instantiate your uncompressed model model = ov.Core().read_model("/model_path") + # Provide validation part of the dataset to collect statistics needed for the compression algorithm -val_dataset = datasets.ImageFolder("/path") +val_dataset = datasets.ImageFolder("/path", transform=transforms.Compose([transforms.ToTensor()])) dataset_loader = torch.utils.data.DataLoader(val_dataset, batch_size=1) # Step 1: Initialize transformation function @@ -109,9 +110,10 @@ import torch from torchvision import datasets, models # Instantiate your uncompressed model -model = models.mobilenet_v2() +model = models.mobilenet_v2() + # Provide validation part of the dataset to collect statistics needed for the compression algorithm -val_dataset = datasets.ImageFolder("/path") +val_dataset = datasets.ImageFolder("/path", transform=transforms.Compose([transforms.ToTensor()])) dataset_loader = torch.utils.data.DataLoader(val_dataset) # Step 1: Initialize the transformation function @@ -137,8 +139,9 @@ import tensorflow_datasets as tfds # Instantiate your uncompressed model model = tf.keras.applications.MobileNetV2() + # Provide validation part of the dataset to collect statistics needed for the compression algorithm -val_dataset = tfds.load("/path", split="validation", +val_dataset = tfds.load("/path", split="validation", shuffle_files=False, as_supervised=True) # Step 1: Initialize transformation function @@ -164,8 +167,9 @@ from torchvision import datasets # Instantiate your uncompressed model onnx_model = onnx.load_model("/model_path") + # Provide validation part of the dataset to collect statistics needed for the compression algorithm -val_dataset = datasets.ImageFolder("/path") +val_dataset = datasets.ImageFolder("/path", transform=transforms.Compose([transforms.ToTensor()])) dataset_loader = torch.utils.data.DataLoader(val_dataset, batch_size=1) # Step 1: Initialize transformation function @@ -182,7 +186,6 @@ quantized_model = nncf.quantize(onnx_model, calibration_dataset) - [//]: # (NNCF provides full [samples](#post-training-quantization-samples), which demonstrate Post-Training Quantization usage for PyTorch, TensorFlow, ONNX, OpenVINO.) ### Training-Time Compression @@ -207,14 +210,14 @@ nncf_config = NNCFConfig.from_json("resnet50_int8.json") # Provide data loaders for compression algorithm initialization, if necessary import torchvision.datasets as datasets -representative_dataset = datasets.ImageFolder("/path") +representative_dataset = datasets.ImageFolder("/path", transform=transforms.Compose([transforms.ToTensor()])) init_loader = torch.utils.data.DataLoader(representative_dataset) nncf_config = register_default_init_args(nncf_config, init_loader) # Apply the specified compression algorithms to the model compression_ctrl, compressed_model = create_compressed_model(model, nncf_config) -# Now use compressed_model as a usual torch.nn.Module +# Now use compressed_model as a usual torch.nn.Module # to fine-tune compression parameters along with the model weights # ... the rest of the usual PyTorch-powered training pipeline @@ -255,7 +258,7 @@ compression_ctrl, compressed_model = create_compressed_model(model, nncf_config) # ... the rest of the usual TensorFlow-powered training pipeline -# Export to Frozen Graph, TensorFlow SavedModel or .h5 when done fine-tuning +# Export to Frozen Graph, TensorFlow SavedModel or .h5 when done fine-tuning compression_ctrl.export_model("compressed_model.pb", save_format="frozen_graph") ``` @@ -267,13 +270,17 @@ For a more detailed description of NNCF usage in your training code, see [this t For a quicker start with NNCF-powered compression, try sample notebooks and scripts presented below. -### Model Compression Tutorials +### Model Compression Tutorials A collection of ready-to-run Jupyter* notebooks are available to demonstrate how to use NNCF compression algorithms to optimize models for inference with the OpenVINO Toolkit: -- [Accelerate Inference of NLP models with Post-Training Qunatization API of NNCF](https://github.com/openvinotoolkit/openvino_notebooks/blob/main/notebooks/105-language-quantize-bert) + +- [Accelerate Inference of NLP models with Post-Training Quantization API of NNCF](https://github.com/openvinotoolkit/openvino_notebooks/blob/main/notebooks/105-language-quantize-bert) - [Convert and Optimize YOLOv8 with OpenVINO](https://github.com/openvinotoolkit/openvino_notebooks/blob/main/notebooks/230-yolov8-optimization) - [Convert and Optimize YOLOv7 with OpenVINO](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/226-yolov7-optimization) - [NNCF Post-Training Optimization of Segment Anything Model](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/237-segment-anything) +- [NNCF Post-Training Optimization of CLIP Model](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/228-clip-zero-shot-image-classification) +- [NNCF Post-Training Optimization of ImageBind Model](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/239-image-bind) +- [NNCF Post-Training Optimization of Whisper Model](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/227-whisper-subtitles-generation) - [Quantize a Segmentation Model and Show Live Inference](https://github.com/openvinotoolkit/openvino_notebooks/blob/main/notebooks/110-ct-segmentation-quantize) - [Training to Deployment with TensorFlow and OpenVINO](https://github.com/openvinotoolkit/openvino_notebooks/blob/main/notebooks/301-tensorflow-training-openvino) - [Migrate quantization from POT API to NNCF API](https://github.com/openvinotoolkit/openvino_notebooks/blob/main/notebooks/111-yolov5-quantization-migration) @@ -283,7 +290,9 @@ A collection of ready-to-run Jupyter* notebooks are available to demonstrate how - [Accelerate Inference of Sparse Transformer Models with OpenVINO and 4th Gen Intel Xeon Scalable Processors](https://github.com/openvinotoolkit/openvino_notebooks/blob/main/notebooks/116-sparsity-optimization) ### Post-Training Quantization Samples -Compact scripts demonstrating quantization and corresponding inference speed boost: + +Compact scripts demonstrating quantization and corresponding inference speed boost: + - [Post-Training Quantization of MobileNet v2 OpenVINO Model](examples/post_training_quantization/openvino/mobilenet_v2/README.md) - [Post-Training Quantization of YOLOv8 OpenVINO Model](examples/post_training_quantization/openvino/yolov8/README.md) - [Post-Training Quantization of Anomaly Classification OpenVINO model with control of accuracy metric](examples/post_training_quantization/openvino/quantize_with_accuracy_control/README.md) @@ -294,7 +303,9 @@ Compact scripts demonstrating quantization and corresponding inference speed boo - [Post-Training Quantization of MobileNet v2 TensorFlow Model](examples/post_training_quantization/tensorflow/mobilenet_v2/README.md) ### Training-Time Compression Samples + These examples provide full pipelines including compression, training and inference for classification, object detection and segmentation tasks. + - PyTorch samples: - [Image Classification sample](examples/torch/classification/README.md) - [Object Detection sample](examples/torch/object_detection/README.md) @@ -305,207 +316,70 @@ These examples provide full pipelines including compression, training and infere - [Instance Segmentation sample](examples/tensorflow/segmentation/README.md) ## Third-party repository integration + NNCF may be straightforwardly integrated into training/evaluation pipelines of third-party repositories. ### Used by - [OpenVINO Training Extensions](https://github.com/openvinotoolkit/training_extensions) - + NNCF is integrated into OpenVINO Training Extensions as model optimization backend. So you can train, optimize and export new models based on the available model templates as well as run exported models with OpenVINO. -- [HuggingFace Optimum Intel](https://huggingface.co/docs/optimum/intel/optimization_ov) +- [HuggingFace Optimum Intel](https://huggingface.co/docs/optimum/intel/optimization_ov) NNCF is used as a compression backend within the renowned `transformers` repository in HuggingFace Optimum Intel. ### Git patches for third-party repository + See [third_party_integration](./third_party_integration) for examples of code modifications (Git patches and base commit IDs are provided) that are necessary to integrate NNCF into the following repositories: - - [huggingface-transformers](third_party_integration/huggingface_transformers/README.md) + +- [huggingface-transformers](third_party_integration/huggingface_transformers/README.md) ## Installation Guide + For detailed installation instructions please refer to the [Installation](./docs/Installation.md) page. NNCF can be installed as a regular PyPI package via pip: -``` + +```bash pip install nncf ``` + If you want to install both NNCF and the supported PyTorch version in one line, you can do this by simply running: -``` + +```bash pip install nncf[torch] ``` + Other viable options besides `[torch]` are `[tf]`, `[onnx]` and `[openvino]`. NNCF is also available via [conda](https://anaconda.org/conda-forge/nncf): -``` + +```bash conda install -c conda-forge nncf ``` -You may also use one of the Dockerfiles in the [docker](./docker) directory to build an image with an environment already set up and ready for running NNCF [sample scripts](#model-compression-samples). +You may also use one of the Dockerfiles in the [docker](./docker) directory to build an image with an environment already set up and ready for running NNCF [sample scripts](#model-compression-tutorials-and-samples). ### System requirements + - Ubuntu\* 18.04 or later (64-bit) - Python\* 3.7 or later - Supported frameworks: - - PyTorch\* >=1.9.1, <1.14 - - TensorFlow\* >=2.4.0, <=2.11.1 + - PyTorch\* >=1.13.0, <2.1 + - TensorFlow\* >=2.4.0, <=2.12.0 - ONNX\* ~=1.13.1 - OpenVINO\* >=2022.3.0 -This repository is tested on Python* 3.8.10, PyTorch* 1.13.1 (NVidia CUDA\* Toolkit 11.6) and TensorFlow* 2.11.1 (NVidia CUDA\* Toolkit 11.2). +This repository is tested on Python* 3.8.10, PyTorch* 2.0.1 (NVidia CUDA\* Toolkit 11.7) and TensorFlow* 2.12.0 (NVidia CUDA\* Toolkit 11.8). ## NNCF Compressed Model Zoo -Results achieved using sample scripts, example patches to third-party repositories and NNCF configuration files provided -with this repository. See README.md files for [sample scripts](#model-compression-samples) and [example patches](#third-party-repository-integration) -to find instruction and links to exact configuration files and final checkpoints. -- [PyTorch models](#pytorch-models) - * [Classification](#pytorch_classification) - * [Object detection](#pytorch_object_detection) - * [Semantic segmentation](#pytorch_semantic_segmentation) - * [Natural language processing (3rd-party training pipelines)](#pytorch_nlp) -- [TensorFlow models](#tensorflow-models) - * [Classification](#tensorflow_classification) - * [Object detection](#tensorflow_object_detection) - * [Instance segmentation](#tensorflow_instance_segmentation) - -### PyTorch models - - -#### Classification - -|Model|Compression algorithm|Dataset|Accuracy (_drop_) %| -| :---: | :---: | :---: | :---: | -|ResNet-50|INT8|ImageNet|76.46 (-0.31)| -|ResNet-50|INT8 (per-tensor only)|ImageNet|76.39 (-0.24)| -|ResNet-50|Mixed, 43.12% INT8 / 56.88% INT4|ImageNet|76.05 (0.10)| -|ResNet-50|INT8 + Sparsity 61% (RB)|ImageNet|75.42 (0.73)| -|ResNet-50|INT8 + Sparsity 50% (RB)|ImageNet|75.50 (0.65)| -|ResNet-50|Filter pruning, 40%, geometric median criterion|ImageNet|75.57 (0.58)| -|Inception V3|INT8|ImageNet|77.45 (-0.12)| -|Inception V3|INT8 + Sparsity 61% (RB)|ImageNet|76.36 (0.97)| -|MobileNet V2|INT8|ImageNet|71.07 (0.80)| -|MobileNet V2|INT8 (per-tensor only)|ImageNet|71.24 (0.63)| -|MobileNet V2|Mixed, 58.88% INT8 / 41.12% INT4|ImageNet|70.95 (0.92)| -|MobileNet V2|INT8 + Sparsity 52% (RB)|ImageNet|71.09 (0.78)| -|MobileNet V3 small|INT8|ImageNet|66.98 (0.68)| -|SqueezeNet V1.1|INT8|ImageNet|58.22 (-0.03)| -|SqueezeNet V1.1|INT8 (per-tensor only)|ImageNet|58.11 (0.08)| -|SqueezeNet V1.1|Mixed, 52.83% INT8 / 47.17% INT4|ImageNet|57.57 (0.62)| -|ResNet-18|XNOR (weights), scale/threshold (activations)|ImageNet|61.67 (8.09)| -|ResNet-18|DoReFa (weights), scale/threshold (activations)|ImageNet|61.63 (8.13)| -|ResNet-18|Filter pruning, 40%, magnitude criterion|ImageNet|69.27 (0.49)| -|ResNet-18|Filter pruning, 40%, geometric median criterion|ImageNet|69.31 (0.45)| -|ResNet-34|Filter pruning, 50%, geometric median criterion + KD|ImageNet|73.11 (0.19)| -|GoogLeNet|Filter pruning, 40%, geometric median criterion|ImageNet|69.47 (0.30)| - - -#### Object detection - -|Model|Compression algorithm|Dataset|mAP (_drop_) %| -| :---: | :---: | :---: | :---: | -|SSD300-MobileNet|INT8 + Sparsity 70% (Magnitude)|VOC12+07 train, VOC07 eval|62.95 (-0.72)| -|SSD300-VGG-BN|INT8|VOC12+07 train, VOC07 eval|77.81 (0.47)| -|SSD300-VGG-BN|INT8 + Sparsity 70% (Magnitude)|VOC12+07 train, VOC07 eval|77.66 (0.62)| -|SSD300-VGG-BN|Filter pruning, 40%, geometric median criterion|VOC12+07 train, VOC07 eval|78.35 (-0.07)| -|SSD512-VGG-BN|INT8|VOC12+07 train, VOC07 eval|80.04 (0.22)| -|SSD512-VGG-BN|INT8 + Sparsity 70% (Magnitude)|VOC12+07 train, VOC07 eval|79.68 (0.58)| - - -#### Semantic segmentation - -|Model|Compression algorithm|Dataset|mIoU (_drop_) %| -| :---: | :---: | :---: | :---: | -|UNet|INT8|CamVid|71.89 (0.06)| -|UNet|INT8 + Sparsity 60% (Magnitude)|CamVid|72.46 (-0.51)| -|ICNet|INT8|CamVid|67.89 (0.00)| -|ICNet|INT8 + Sparsity 60% (Magnitude)|CamVid|67.16 (0.73)| -|UNet|INT8|Mapillary|56.09 (0.15)| -|UNet|INT8 + Sparsity 60% (Magnitude)|Mapillary|55.69 (0.55)| -|UNet|Filter pruning, 25%, geometric median criterion|Mapillary|55.64 (0.60)| - - -#### NLP (HuggingFace Transformers-powered models) - -|PyTorch Model|Compression algorithm|Dataset|Accuracy (Drop) %| -| :---: | :---: | :---: | :---: | -|BERT-base-chinese|INT8|XNLI|77.22 (0.46)| -|BERT-base-cased|INT8|CoNLL2003|99.18 (-0.01)| -|BERT-base-cased|INT8|MRPC|84.8 (-0.24)| -|BERT-large (Whole Word Masking)|INT8|SQuAD v1.1|F1: 92.68 (0.53)| -|RoBERTa-large|INT8|MNLI|matched: 89.25 (1.35)| -|DistilBERT-base|INT8|SST-2|90.3 (0.8)| -|MobileBERT|INT8|SQuAD v1.1|F1: 89.4 (0.58)| -|GPT-2|INT8|WikiText-2 (raw)|perplexity: 20.9 (-1.17)| - -### TensorFlow models - - -#### Classification - -|Model|Compression algorithm|Dataset|Accuracy (_drop_) %| -| :---: | :---: | :---: | :---: | -|Inception V3|INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations)|ImageNet|78.39 (-0.48)| -|Inception V3|INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations), Sparsity 61% (RB)|ImageNet|77.52 (0.39)| -|Inception V3|Sparsity 54% (Magnitude)|ImageNet|77.86 (0.05)| -|MobileNet V2|INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations)|ImageNet|71.63 (0.22)| -|MobileNet V2|INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations), Sparsity 52% (RB)|ImageNet|70.94 (0.91)| -|MobileNet V2| Sparsity 50% (RB)|ImageNet|71.34 (0.51)| -|MobileNet V2 (TensorFlow Hub MobileNet V2)|Sparsity 35% (Magnitude)|ImageNet|71.87 (-0.02)| -|MobileNet V3 (Small)|INT8 (per-channel symmetric for weights, per-tensor asymmetric half-range for activations)|ImageNet|67.79 (0.59)| -|MobileNet V3 (Small)|INT8 (per-channel symmetric for weights, per-tensor asymmetric half-range for activations) + Sparsity 42% (Magnitude)|ImageNet|67.44 (0.94)| -|MobileNet V3 (Large)|INT8 (per-channel symmetric for weights, per-tensor asymmetric half-range for activations)|ImageNet|75.04 (0.76)| -|MobileNet V3 (Large)|INT8 (per-channel symmetric for weights, per-tensor asymmetric half-range for activations) + Sparsity 42% (RB)|ImageNet|75.24 (0.56)| -|ResNet-50|INT8|ImageNet|74.99 (0.06)| -|ResNet-50|INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations) + Sparsity 65% (RB)|ImageNet|74.36 (0.69)| -|ResNet-50|Sparsity 80% (RB)|ImageNet|74.38 (0.67)| -|ResNet-50|Filter pruning, 40%, geometric median criterion|ImageNet|74.96 (0.09)| -|ResNet-50|INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations) + Filter pruning, 40%, geometric median criterion|ImageNet|75.09 (-0.04)| - - -#### Object detection - -|Model|Compression algorithm|Dataset|mAP (_drop_) %| -| :---: | :---: | :---: | :---: | -|RetinaNet|INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations)|COCO 2017|33.12 (0.31)| -|RetinaNet|Magnitude sparsity (50%)|COCO 2017|33.10 (0.33)| -|RetinaNet|Filter pruning, 40%|COCO 2017|32.72 (0.71)| -|RetinaNet|INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations) + filter pruning 40%|COCO 2017|32.67 (0.76)| -|YOLO v4|INT8 (per-channel symmetric for weights, per-tensor asymmetric half-range for activations)|COCO 2017|46.20 (0.87)| -|YOLO v4|Magnitude sparsity, 50%|COCO 2017|46.49 (0.58)| - - -#### Instance segmentation - -|Model|Compression algorithm|Dataset|mAP (_drop_) %| -| :---: | :---: | :---: | :---: | -|Mask-R-CNN|INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations)|COCO 2017|37.19 (0.14)| -|Mask-R-CNN|Magnitude sparsity, 50%|COCO 2017|36.94 (0.39)| - -### ONNX models - - -#### Classification - -| ONNX Model | Compression algorithm |Dataset|Accuracy (Drop) %| -| :---: |:---------------------:| :---: | :---: | -|ResNet-50| INT8 (Post-Training) |ImageNet|74.63 (0.21)| -|ShuffleNet| INT8 (Post-Training) |ImageNet|47.25 (0.18)| -|GoogleNet| INT8 (Post-Training) |ImageNet|66.36 (0.3)| -|SqueezeNet V1.0| INT8 (Post-Training) |ImageNet|54.3 (0.54)| -|MobileNet V2| INT8 (Post-Training) |ImageNet|71.38 (0.49)| -|DenseNet-121| INT8 (Post-Training) |ImageNet|60.16 (0.8)| -|VGG-16| INT8 (Post-Training) |ImageNet|72.02 (0.0)| - - -#### Object Detection - -|ONNX Model| Compression algorithm | Dataset |mAP (drop) %| -| :---: |:---------------------:| :---: | :---: | -|SSD1200| INT8 (Post-Training) |COCO2017|20.17 (0.17)| -|Tiny-YOLOv2| INT8 (Post-Training) |VOC12|29.03 (0.23)| +List of models and compression results for them can be found at our [Model Zoo page](./docs/ModelZoo.md). ## Citing -``` +```bi @article{kozlov2020neural, title = {Neural network compression framework for fast model inference}, author = {Kozlov, Alexander and Lazarevich, Ivan and Shamporov, Vasily and Lyalyushkin, Nikolay and Gorbachev, Yury}, @@ -515,13 +389,15 @@ to find instruction and links to exact configuration files and final checkpoints ``` ## Contributing Guide + Refer to the [CONTRIBUTING.md](./CONTRIBUTING.md) file for guidelines on contributions to the NNCF repository. ## Useful links + - [Documentation](./docs) - Example scripts (model objects available through links in respective README.md files): - - [PyTorch](./examples/torch) - - [TensorFlow](./examples/tensorflow) + - [PyTorch](./examples/torch) + - [TensorFlow](./examples/tensorflow) - [FAQ](./docs/FAQ.md) - [Notebooks](https://github.com/openvinotoolkit/openvino_notebooks#-model-training) - [HuggingFace Optimum Intel](https://huggingface.co/docs/optimum/intel/optimization_ov) diff --git a/ReleaseNotes.md b/ReleaseNotes.md index 4a2cffe75b1..4d9f3d83b6a 100644 --- a/ReleaseNotes.md +++ b/ReleaseNotes.md @@ -1,12 +1,73 @@ # Release Notes +## New in Release 2.6.0 + +Post-training Quantization: + +- Features: + - Added `CPU_SPR` device type support. + - Added quantizers scales unification. + - Added quantization scheme for ReduceSum operation. + - Added new types (ReduceL2, ReduceSum, Maximum) to the ignored scope for `ModelType.Transformer`. + - (OpenVINO) Added SmoothQuant algorithm. + - (OpenVINO) Added ChannelAlignment algorithm. + - (OpenVINO) Added HyperparameterTuner algorithm. + - (PyTorch) Added FastBiasCorrection algorithm support. + - (OpenVINO, ONNX) Added embedding weights quantization. + - (OpenVINO, PyTorch) Added new `compress_weights` method that provides data-free [INT8 weights compression](docs/compression_algorithms/CompressWeights.md). +- Fixes: + - Fixed detection of decomposed post-processing in models. + - Multiple fixes (new patterns, bugfixes, etc.) to solve [#1936](https://github.com/openvinotoolkit/nncf/issues/1936) issue. + - Fixed model reshaping while quantization to keep original model shape. + - (OpenVINO) Added support for sequential models quanitzation. + - (OpenVINO) Fixed in-place statistics cast to support empty dimensions. + - (OpenVINO, ONNX) Fixed quantization of the MatMul operation with weights rank > 2. + - (OpenVINO, ONNX) Fixed BiasCorrection algorithm to enable [CLIP model quantization](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/228-clip-zero-shot-image-classification). +- Improvements: + - Optimized `quantize(…)` pipeline (up to 4.3x speed up in total). + - Optimized `quantize_with_accuracy_control(…)` pipelilne (up to 8x speed up for [122-quantizing-model-with-accuracy-control](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/122-quantizing-model-with-accuracy-control) notebook). + - Optimized general statistics collection (up to 1.2x speed up for ONNX backend). + - Ignored patterns separated from Fused patterns scheme (with multiple patterns addition). +- Tutorials: + - [Post-Training Optimization of Segment Anything Model](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/237-segment-anything). + - [Post-Training Optimization of CLIP Model](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/228-clip-zero-shot-image-classification). + - [Post-Training Optimization of ImageBind Model](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/239-image-bind). + - [Post-Training Optimization of Whisper Model](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/227-whisper-subtitles-generation). + - [Post-Training Optimization with accuracy control](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/122-quantizing-model-with-accuracy-control). + +Compression-aware training: + +- Features: + - Added shape pruning processor for BootstrapNAS algorithm. + - Added KD loss for BootstrapNAS algorithm. + - Added `validate_scopes` parameter for NNCF configuration. + - (PyTorch) Added PyTorch 2.0 support. + - (PyTorch) Added `.strip()` option to API. + - (PyTorch) Enabled bfloat data type for quantization kernels. + - (PyTorch) Quantized models can now be `torch.jit.trace`d without calling `.strip()`. + - (PyTorch) Added support for overridden `forward` instance attribute on model objects passed into `create_compressed_model`. + - (Tensorflow) Added Tensorflow 2.12 support. +- Fixes: + - (PyTorch) Fixed padding adjustment issue in the elastic kernel to work with the different active kernel sizes. + - (PyTorch) Fixed the torch graph tracing in the case the tensors belonging to parallel edges are interleaved in the order of the tensor argument. + - (PyTorch) Fixed recurrent nodes matching (LSTM, GRU cells) condition with the strict rule to avoid adding not necessary nodes to the ignored scope. + - (PyTorch) Fixed `torch.jit.script` wrapper so that user-side handling exceptions during `torch.jit.script` invocation do not cause NNCF to be permanently disabled. + - (PyTorch, Tensorflow) Adjusted quantizer propagation algorithm to check if quantizer propagation will result in output quantization. + - (PyTorch) Added redefined `__class__` method for ProxyModule that avoids causing error while calling `.super()` in forward method. +- Deprecations/Removals: + - (PyTorch) Removed deprecated `NNCFNetwork.__getattr__`, `NNCFNetwork.get_nncf_wrapped_model` methods. +- Requirements: + - Updated PyTorch version (2.0.1). + - Updated Tensorflow version (2.12.0). + ## New in Release 2.5.0 + Post-training Quantization: - Features: - Official release of OpenVINO framework support. - Ported NNCF OpenVINO backend to use the [nGraph](https://docs.openvino.ai/2021.3/openvino_docs_nGraph_DG_Introduction.html) representation of OpenVINO models. - - Changed dependecies of NNCF OpenVINO backend. It now depends on `openvino` package and not on the `openvino-dev` package. + - Changed dependencies of NNCF OpenVINO backend. It now depends on `openvino` package and not on the `openvino-dev` package. - Added GRU/LSTM quantization support. - Added quantizer scales unification. - Added support for models with 3D and 5D Depthwise convolution. @@ -61,15 +122,18 @@ Compression-aware training: - Added Windows support for NNCF. ## New in Release 2.4.0 + Target version updates: + - Bump target framework versions to PyTorch 1.13.1, TensorFlow 2.8.x, ONNX 1.12, ONNXRuntime 1.13.1 - Increased target HuggingFace transformers version for the integration patch to 4.23.1 Features: + - Official release of the ONNX framework support. NNCF may now be used for post-training quantization (PTQ) on ONNX models. Added an [example script](examples/post_training_quantization/onnx/mobilenet_v2) demonstrating the ONNX post-training quantization on MobileNetV2. -- Preview release of OpenVINO framework support. +- Preview release of OpenVINO framework support. NNCF may now be used for post-training quantization on OpenVINO models. Added an example script demonstrating the OpenVINO post-training quantization on MobileNetV2. `pip install nncf[openvino]` will install NNCF with the required OV framework dependencies. - Common post-training quantization API across the supported framework model formats (PyTorch, TensorFlow, ONNX, OpenVINO IR) via the `nncf.quantize(...)` function. @@ -80,13 +144,14 @@ The parameter set of the function is the same for all frameworks - actual framew See [description](nncf/experimental/torch/sparsity/movement/MovementSparsity.md) of the movement pruning involved in the JPQD for details. Bugfixes: + - Fixed a division by zero if every operation is added to ignored scope - Improved logging output, cutting down on the number of messages being output to the standard `logging.INFO` log level. - Fixed FLOPS calculation for linear filters - this impacts existing models that were pruned with a FLOPS target. - "chunk" and "split" ops are correctly handled during pruning. - Linear layers may now be pruned by input and output independently. - Matmul-like operations and subsequent arithmetic operations are now treated as a fused pattern. -- (PyTorch) Fixed a rare condition with accumulator overflow in CUDA quantization kernels, which led to CUDA runtime errors and NaN values appearing in quantized tensors and +- (PyTorch) Fixed a rare condition with accumulator overflow in CUDA quantization kernels, which led to CUDA runtime errors and NaN values appearing in quantized tensors and - (PyTorch) `transformers` integration patch now allows to export to ONNX during training, and not only at the end of it. - (PyTorch) `torch.nn.utils.weight_norm` weights are now detected correctly. - (PyTorch) Exporting a model with sparsity or pruning no longer leads to weights in the original model object in-memory to be hard-set to 0. @@ -98,6 +163,7 @@ Bugfixes: - (ONNX) Improved the working time of PTQ by optimizing the calls to ONNX shape inferencing. Breaking changes: + - Fused patterns will be excluded from quantization via `ignored_scopes` only if the top-most node in data flow order matches against `ignored_scopes` - NNCF config's `"ignored_scopes"` and `"target_scopes"` are now strictly checked to be matching against at least one node in the model graph instead of silently ignoring the unmatched entries. - Calling `setup.py` directly to install NNCF is deprecated and no longer guaranteed to work. @@ -106,18 +172,21 @@ Breaking changes: - (ONNX) Removed CompressionBuilder. Excluded examples of NNCF for ONNX with CompressionBuilder API ## New in Release 2.3.0 + - (ONNX) PTQ API support for ONNX. - (ONNX) Added PTQ examples for ONNX in image classification, object detection, and semantic segmentation. - (PyTorch) Added `BootstrapNAS` to find high-performing sub-networks from the super-network optimization. Bugfixes: + - (PyTorch) Returned the initial quantized model when the retraining failed to find out the best checkpoint. - (Experimental) Fixed weight initialization for `ONNXGraph` and `MinMaxQuantization` ## New in Release 2.2.0 + - (TensorFlow) Added TensorFlow 2.5.x support. - (TensorFlow) The `SubclassedConverter` class was added to create `NNCFGraph` for the `tf.Graph` Keras model. -- (TensorFlow) Added `TFOpLambda ` layer support with `TFModelConverter`, `TFModelTransformer`, and `TFOpLambdaMetatype`. +- (TensorFlow) Added `TFOpLambda` layer support with `TFModelConverter`, `TFModelTransformer`, and `TFOpLambdaMetatype`. - (TensorFlow) Patterns from `MatMul` and `Conv2D` to `BiasAdd` and `Metatypes` of TensorFlow operations with weights `TFOpWithWeightsMetatype` are added. - (PyTorch, TensorFlow) Added prunings for `Reshape` and `Linear` as `ReshapePruningOp` and `LinearPruningOp`. - (PyTorch) Added mixed precision quantization config with HAWQ for `Resnet50` and `Mobilenet_v2` for the latest VPU. @@ -128,6 +197,7 @@ Bugfixes: - (Experimental) Added `ONNXPostTrainingQuantization` and `MinMaxQuantization` supports for ONNX. Bugfixes: + - (PyTorch, TensorFlow) Added exception handling of BN adaptation for zero sample values. - (PyTorch, TensorFlow) Fixed learning rate after validation step for `EarlyExitCompressionTrainingLoop`. - (PyTorch) Fixed `FakeQuantizer` to make exact zeros. @@ -136,6 +206,7 @@ Bugfixes: - (PyTorch) Fixed the statistics collection from the pruned model. ## New in Release 2.1.0 + - (PyTorch) All PyTorch operations are now NNCF-wrapped automatically. - (TensorFlow) Scales for concat-affecting quantizers are now unified - (PyTorch) The pruned filters are now set to 0 in the exported ONNX file instead of removing them from the ONNX definition. @@ -153,21 +224,27 @@ Bugfixes: - (PyTorch - Experimental) Added an algorithm to search the model's architecture for basic building blocks. Bugfixes: + - (TensorFlow) Fixed a bug where an operation with int32 inputs (following a Cast op) was attempted to be quantized. - (PyTorch, TensorFlow) LeakyReLU now properly handled during pruning - (PyTorch) Fixed errors with custom modules failing at the `determine_subtype` stage of metatype assignment. - (PyTorch) Fix handling modules with `torch.nn.utils.weight_norm.WeightNorm` applied ## New in Release 2.0.2 + Target version updates: + - Relax TensorFlow version requirements to 2.4.x ## New in Release 2.0.1 + Target version updates: + - Bump target framework versions to PyTorch 1.9.1 and TensorFlow 2.4.3 - Increased target HuggingFace transformers version for the integration patch to 4.9.1 Bugfixes: + - (PyTorch, TensorFlow) Fixed statistic collection for the algo mixing scenario - (PyTorch, TensorFlow) Increased pruning algorithm robustness in cases of a disconnected NNCF graph - (PyTorch, TensorFlow) Fixed the fatality of NNCF graph PNG rendering failures @@ -175,7 +252,7 @@ Bugfixes: - (PyTorch) Fixed a bug with quantizing shared weights multiple times - (PyTorch) Fixed knowledge distillation failures in CPU-only and DataParallel scenarios - (PyTorch) Fixed sparsity application for torch.nn.Embedding and EmbeddingBag modules -- (PyTorch) Added GroupNorm + ReLU as a fusable pattern +- (PyTorch) Added GroupNorm + ReLU as a fusible pattern - (TensorFlow) Fixed gamma fusion handling for pruning TF BatchNorm - (PyTorch) Fixed pruning for models where operations have multiple convolution predecessors - (PyTorch) Fixed NNCFNetwork wrapper so that `self` in the calls to the wrapped model refers to the wrapper NNCFNetwork object and not to the wrapped model @@ -185,7 +262,8 @@ Bugfixes: - (PyTorch, TensorFlow) Fixed FLOPS calculation for grouped convolutions - (PyTorch) Fixed knowledge distillation failures for tensors of unsupported shapes - will now ignore output tensors with unsupported shapes instead of crashing. -## New in Release 2.0: +## New in Release 2.0 + - Added TensorFlow 2.4.2 support - NNCF can now be used to apply the compression algorithms to models originally trained in TensorFlow. NNCF with TensorFlow backend supports the following features: - Compression algorithms: @@ -217,15 +295,16 @@ NNCF with TensorFlow backend supports the following features: - Compression results are claimed for MaskRCNN - Accuracy-aware training available for filter pruning and sparsity in order to achieve best compression results within a given accuracy drop threshold in a fully automated fashion. -- Framework-specific checkpoints produced with NNCF now have NNCF-specific compression state information included, so that the exact compressed model state can be restored/loaded without having to provide the same NNCF config file that was used during the creation of the NNCF-compressed checkpoint +- Framework-specific checkpoints produced with NNCF now have NNCF-specific compression state information included, so that the exact compressed model state can be restored/loaded without having to provide the same NNCF config file that was used during the creation of the NNCF-compressed checkpoint - Common interface for compression methods for both PyTorch and TensorFlow backends (https://github.com/openvinotoolkit/nncf/tree/develop/nncf/api). - (PyTorch) Added an option to specify an effective learning rate multiplier for the trainable parameters of the compression algorithms via NNCF config, for finer control over which should tune faster - the underlying FP32 model weights or the compression parameters. - (PyTorch) Unified scales for concat operations - the per-tensor quantizers that affect the concat operations will now have identical scales so that the resulting concatenated tensor can be represented without loss of accuracy w.r.t. the concatenated subcomponents. -- (TensorFlow) Algo-mixing: Added configuration files and reference checkpoints for filter-pruned + qunatized models: ResNet50@ImageNet2012(40% of filters pruned + INT8), RetinaNet@COCO2017(40% of filters pruned + INT8). -- (Experimental, PyTorch) [Learned Global Ranking]((https://arxiv.org/abs/1904.12368)) filter pruning mechanism for better pruning ratios with less accuracy drop for a broad range of models has been implemented. +- (TensorFlow) Algo-mixing: Added configuration files and reference checkpoints for filter-pruned + quantized models: ResNet50@ImageNet2012(40% of filters pruned + INT8), RetinaNet@COCO2017(40% of filters pruned + INT8). +- (Experimental, PyTorch) [Learned Global Ranking](https://arxiv.org/abs/1904.12368) filter pruning mechanism for better pruning ratios with less accuracy drop for a broad range of models has been implemented. - (Experimental, PyTorch) Knowledge distillation supported, ready to be used with any compression algorithm to produce an additional loss source of the compressed model against the uncompressed version Breaking changes: + - `CompressionLevel` has been renamed to `CompressionStage` - `"ignored_scopes"` and "target_scopes" no longer allow prefix matching - use full-fledged regular expression approach via {re} if anything more than an exact match is desired. - (PyTorch) Removed version-agnostic name mapping for ReLU operations, i.e. the NNCF configs that referenced "RELU" (all caps) as an operation name will now have to reference an exact ReLU PyTorch function name such as "relu" or "relu_" @@ -235,15 +314,19 @@ Breaking changes: - `"quantizable_subgraph_patterns"` option removed from the NNCF config Bugfixes: + - (PyTorch) Fixed a hang with batchnorm adaptation being applied in DDP mode - (PyTorch) Fixed tracing of the operations that return NotImplemented -## New in Release 1.7.1: +## New in Release 1.7.1 + Bugfixes: + - Fixed a bug with where compressed models that were supposed to return named tuples actually returned regular tuples - Fixed an issue with batch norm adaptation-enabled compression runs hanging in the DDP scenario -## New in Release 1.7: +## New in Release 1.7 + - Adjust Padding feature to support accurate execution of U4 on VPU - when setting "target_device" to "VPU", the training-time padding values for quantized convolutions will be adjusted to better reflect VPU inference process. - Weighted layers that are "frozen" (i.e. have requires_grad set to False at compressed model creation time) are no longer considered for compression, to better handle transfer learning cases. - Quantization algorithm now sets up quantizers without giving an option for requantization, which guarantees best performance, although at some cost to quantizer configuration flexibility. @@ -254,8 +337,9 @@ Bugfixes: - Bumped target PyTorch version to 1.8.1 and relaxed package requirements constraints to allow installation into environments with PyTorch >=1.5.0 Notable bugfixes: + - Fixed bias pruning in depthwise convolution -- Made per-tensor quantization available for all operations that support per-channel quantization +- Made per-tensor quantization available for all operations that support per-channel quantization - Fixed progressive training performance degradation when an output tensor of an NNCF-compressed model is reused as its input. - `pip install .` path of installing NNCF from a checked-out repository is now supported. - Nested `with no_nncf_trace()` blocks now function as expected. @@ -263,15 +347,16 @@ Notable bugfixes: - Now possible to load AutoQ and HAWQ-produced checkpoints to evaluate them or export to ONNX Removed features: + - Pattern-based quantizer setup mode for quantization algorithm - due to its logic, it did not guarantee that all required operation inputs are ultimately quantized. +## New in Release 1.6 -## New in Release 1.6: - Added AutoQ - an AutoML-based mixed-precision initialization mode for quantization, which utilizes the power of reinforcement learning to select the best quantizer configuration for any model in terms of quality metric for a given HW architecture type. - NNCF now supports inserting compression operations as pre-hooks to PyTorch operations, instead of abusing the post-hooking; the flexibility of quantization setups has been improved as a result of this change. - Improved the pruning algorithm to group together dependent filters from different layers in the network and prune these together - Extended the ONNX compressed model exporting interface with an option to explicitly name input and output tensors -- Changed the compression scheduler so that the correspondingepoch_step and step methods should now be called in the beginning of the epoch and before the optimizer step (previously these were called in the end of the epoch and after the optimizer step respectively) +- Changed the compression scheduler so that the corresponding epoch_step and step methods should now be called in the beginning of the epoch and before the optimizer step (previously these were called in the end of the epoch and after the optimizer step respectively) - Data-dependent compression algorithm initialization is now specified in terms of dataset samples instead of training batches, e.g. `"num_init_samples"` should be used in place of "num_init_steps" in NNCF config files. - Custom user modules to be registered for compression can now be specified to be ignored for certain compression algorithms - Batch norm adaptation now being applied by default for all compression algorithms @@ -281,12 +366,12 @@ Removed features: - Added an option to optimize logarithms of quantizer scales instead of scales themselves directly, a technique which improves convergence in certain cases - Added reference checkpoints for filter-pruned models: UNet@Mapillary (25% of filters pruned), SSD300@VOC (40% of filters pruned) +## New in Release 1.5 -## New in Release 1.5: - Switched to using the propagation-based mode for quantizer setup by default. Compared to the previous default, pattern-based mode, the propagation-based mode better ensures that all the inputs to operations that can be quantized on a given type of hardware are quantized in accordance with what this hardware allows. Default target hardware is CPU - adjustable via `"target_device"` option in the NNCF config. More details can be found in [Quantization.md](./docs/compression_algorithms/Quantization.md#quantizer-setup-and-hardware-config-files). -- HAWQ mixed-precision initialization now supports a compression ratio parameter setting - set to 1 for a fully INT8 model, > 1 to increasingly allow lower bitwidth. The level of compression for each layer is defined by a product of the layer FLOPS and the quantization bitwidth. -- HAWQ mixed-precision initialization allows specifying a more generic `criterion_fn` callable to calculate the related loss in case of complex output's post-processing or multiple losses. -- Improved algorithm of assigning bitwidth for activation quantizers in HAWQ mixed-precision initialization. If after taking into account the corresponding rules of hardware config there're +- HAWQ mixed-precision initialization now supports a compression ratio parameter setting - set to 1 for a fully INT8 model, > 1 to increasingly allow lower bitwidth. The level of compression for each layer is defined by a product of the layer FLOPS and the quantization bitwidth. +- HAWQ mixed-precision initialization allows specifying a more generic `criterion_fn` callable to calculate the related loss in case of complex output's post-processing or multiple losses. +- Improved algorithm of assigning bitwidth for activation quantizers in HAWQ mixed-precision initialization. If after taking into account the corresponding rules of hardware config there're multiple options for choosing bitwidth, it chooses a common bitwidth for all adjacent weight quantizers. Adjacent quantizers refer to all quantizers between inputs-quantizable layers. - Custom user modules can be registered to have their `weight` attribute considered for compression using the @nncf.register_module - Possible to perform quantizer linking in various points in graph - such quantizers will share the quantization parameters, trainable and non-trainable @@ -300,7 +385,8 @@ Removed features: - GPT2 compression enabled, configuration file added to the `transformers` integration patch - Added GoogLeNet as a filter-pruned sample model (with final checkpoints) -## New in Release 1.4: +## New in Release 1.4 + - Models with filter pruning applied are now exportable to ONNX - BatchNorm adaptation now available as a common compression algorithm initialization step - currently disabled by default, see `"batchnorm_adaptation"` config parameters in compression algorithm documentation (e.g. [Quantizer.md](docs/compression_algorithms/Quantization.md)) for instructions on how to enable it in NNCF config - Major performance improvements for per-channel quantization training - now performs almost as fast as the per-tensor quantization training @@ -313,11 +399,13 @@ Removed features: - Added an example config and model checkpoint for the ResNet50 INT8 + 50% sparsity (RB) ## New in Release 1.3.1 + - Now using PyTorch 1.5 and CUDA 10.2 by default - Support for exporting quantized models to ONNX checkpoints with standard ONNX v10 QuantizeLinear/DequantizeLinear pairs (8-bit quantization only) - Compression algorithm initialization moved to the compressed model creation stage -## New in Release 1.3: +## New in Release 1.3 + - Filter pruning algorithm added - Mixed-precision quantization with manual and automatic (HAWQ-powered) precision setup - Support for DistilBERT @@ -329,7 +417,8 @@ Removed features: - Docker images supplied for easier setup in container-based environments - Usability improvements (NNCF config .JSON file validation by schema, less boilerplate code, separate logging and others) -## New in Release 1.2: +## New in Release 1.2 + - Support for transformer-based networks quantization (tested on BERT and RoBERTa) - Added instructions and Git patches for integrating NNCF into third-party repositories ([mmdetection](https://github.com/open-mmlab/mmdetection), [transformers](https://github.com/huggingface/transformers)) - Support for GNMT quantization @@ -350,9 +439,9 @@ Removed features: - Support of symmetric quantization and two sparsity algorithms with fine-tuning - Automatic model graph transformation. The model is wrapped by the custom class and additional layers are inserted in the graph. The transformations are configurable. - Three training samples which demonstrate usage of compression methods from the NNCF: - - Image Classification: torchvision models for classification and custom models on ImageNet and CIFAR10/100 datasets. - - Object Detection: SSD300, SSD512, MobileNet SSD on Pascal VOC2007, Pascal VOC2012, and COCO datasets. - - Semantic Segmentation: UNet, ICNet on CamVid and Mapillary Vistas datasets. + - Image Classification: torchvision models for classification and custom models on ImageNet and CIFAR10/100 datasets. + - Object Detection: SSD300, SSD512, MobileNet SSD on Pascal VOC2007, Pascal VOC2012, and COCO datasets. + - Semantic Segmentation: UNet, ICNet on CamVid and Mapillary Vistas datasets. - Unified interface for compression methods. - GPU-accelerated *Quantization* layer for fast model fine-tuning. - Distributed training support in all samples. diff --git a/Security.md b/Security.md index c3e8bdada43..6143e2263ee 100644 --- a/Security.md +++ b/Security.md @@ -1,6 +1,7 @@ # Security Policy -Intel is committed to rapidly addressing security vulnerabilities affecting our customers and providing clear guidance on the solution, impact, severity and mitigation. + +Intel is committed to rapidly addressing security vulnerabilities affecting our customers and providing clear guidance on the solution, impact, severity and mitigation. ## Reporting a Vulnerability -Please report any security vulnerabilities in this project [utilizing the guidelines here](https://www.intel.com/content/www/us/en/security-center/vulnerability-handling-guidelines.html). +Please report any security vulnerabilities in this project [utilizing the guidelines here](https://www.intel.com/content/www/us/en/security-center/vulnerability-handling-guidelines.html). diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 00000000000..708ea9a7d55 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,27 @@ +ignore: + - "examples" + - "tests" + - "tools" + +#codecov: +# notify: +# after_n_builds: 4 +# wait_for_ci: yes + +coverage: + status: + project: + default: + branches: + - develop + informational: true + only_pulls: true + paths: + - "nncf" +comment: + layout: "diff, flags, files" + require_changes: false # remove me + # require_changes: true + + require_head: false + require_base: false diff --git a/docker/README.md b/docker/README.md index 7d8ba1843c2..7a1f4d37465 100644 --- a/docker/README.md +++ b/docker/README.md @@ -1,3 +1,5 @@ +# Using docker + ## Step 1. Install docker Review the instructions for installation docker [here](https://docs.docker.com/engine/install/ubuntu/) and configure Docker @@ -12,15 +14,18 @@ Review the instructions for installation docker [here](https://github.com/NVIDIA ## Step 3. Build image In the project folder run in terminal: -``` + +```bash sudo docker image build --network=host ``` Use `--network` to duplicate the network settings of your localhost into context build. ## Step 4. Run container + Run in terminal: -``` + +```bash sudo docker run \ -it \ --name= \ diff --git a/docs/Algorithms.md b/docs/Algorithms.md index 2c08e6826d8..94e5ea80fbd 100644 --- a/docs/Algorithms.md +++ b/docs/Algorithms.md @@ -1,4 +1,4 @@ -## Implemented Compression Methods +# Implemented Compression Methods Each compression method receives its own hyperparameters that are organized as a dictionary and basically stored in a JSON file that is deserialized when the training starts. Compression methods can be applied separately or together producing sparse, quantized, or both sparse and quantized models. For more information about the configuration, refer to the samples. @@ -18,4 +18,4 @@ Each compression method receives its own hyperparameters that are organized as a - [Sparsity](./compression_algorithms/Sparsity.md) - Magnitude sparsity - Regularization-based (RB) sparsity -- [Filter pruning](./compression_algorithms/Pruning.md) \ No newline at end of file +- [Filter pruning](./compression_algorithms/Pruning.md) diff --git a/docs/ConfigFile.md b/docs/ConfigFile.md index 86cc2a3f39b..ed131bf495b 100644 --- a/docs/ConfigFile.md +++ b/docs/ConfigFile.md @@ -1,13 +1,13 @@ # NNCF Configuration File Description -The Neural Network Compression Framework (NNCF) is designed to work with the configuration file where the parameters of compression that should be applied to the model are specified. -These parameters are organized as a dictionary and stored in a JSON file that is deserialized when the training starts. +The Neural Network Compression Framework (NNCF) is designed to work with the configuration file where the parameters of compression that should be applied to the model are specified. +These parameters are organized as a dictionary and stored in a JSON file that is deserialized when the training starts. The JSON file allows using comments that are supported by the [jstyleson](https://github.com/linjackson78/jstyleson) Python package. The NNCF config .json file is validated against a JSON schema - you can review the latest version of the schema at https://openvinotoolkit.github.io/nncf/. Below is an example of the NNCF configuration file: -``` +```json5 { "input_info": [ // Required - describe the specifics of your model inputs here. This information is used to build the internal graph representation that is leveraged for proper compression functioning, and for exporting the compressed model to ONNX. Inputs in the array without a "keyword" attribute are described in the order of the model's "forward" function argument order. { @@ -63,7 +63,6 @@ Below is an example of the NNCF configuration file: } ``` - The "compression" section is the core of the configuration file. It defines the specific compression algorithms that are to be applied to the model. You can specify either a single compression algorithm to be applied to the model, or multiple compression algorithms to be applied at once. @@ -74,8 +73,6 @@ To specify multiple compression algorithm at once, the "compression" section sho **IMPORTANT:** The `"ignored_scopes"` and `"target_scopes"` sections use a special string format (see "Operation addressing and scope" in [NNCFArchitecture.md](./NNCFArchitecture.md)) to specify the parts of the model that the compression should be applied to. For all such section, regular expression matching can be enabled by prefixing the string with `{re}`, which helps to specify the same compression pattern concisely for networks with multiple same-structured blocks such as ResNet or BERT. - - The [example scripts](../examples) use the same configuration file structure to specify compression, but extend it at the root level to specify the training pipeline hyperparameters as well. These extensions are training pipeline-specific rather than NNCF-specific and their format differs across the example scripts. diff --git a/docs/FAQ.md b/docs/FAQ.md index 24efda7cef8..39d8a6e0bc3 100644 --- a/docs/FAQ.md +++ b/docs/FAQ.md @@ -1,15 +1,16 @@ # Frequently Asked Questions Links to sections: + - [Common](#common) - [PyTorch](#pytorch) - [TensorFlow](#tensorflow) - [ONNX](#onnx) - ## Common ### What is NNCF for? + NNCF takes a deep learning network model object and modifies it for faster inference. Within NNCF, the process of modification is colloquially known as compression. @@ -17,44 +18,51 @@ Sometimes this is not possible to do without the loss of accuracy for the networ NNCF provides algorithms that strive for minimal or zero loss of accuracy, which can be applied, depending on the algorithm, during training, fine-tuning or post-training. ### Does the Neural Network *Compression* Framework provide *lossless compression*? -Not in the way the term "lossless compression" usually appears in literature. -Under "compression" we mean the preparation of the model for *future* efficient execution of this model in the OpenVINO Inference Engine. + +Not in the way the term "lossless compression" usually appears in literature. +Under "compression" we mean the preparation of the model for *future* efficient execution of this model in the OpenVINO Inference Engine. Under "future" we mean that the process of compression is usually an offline, one-time step before the model is being used in production, which provides a new model object that could then be used instead of the original to run faster and take up lower memory without significantly losing accuracy. No *compression* in the sense of archiving or entropy coding is being done during NNCF compression. ### How does your compression make inference faster? + General, well-known, literature-backed techniques of neural network inference acceleration (such as quantization, filter pruning and knowledge distillation) are applied, with Intel HW/runtime specifics in mind. An overview of some of those can be found in the [following paper](https://arxiv.org/abs/2002.08679). - ### Can I use NNCF-compressed models with runtimes other than OpenVINO Inference Engine? + While this is certainly possible in some cases, with a beneficial outcome even, we recommend NNCF as a way to get the most out of your setup based on OpenVINO Inference Engine inference. We aim for best results on OpenVINO runtime with Intel hardware, and development-wise this is not always easy to generalize to other platforms or runtimes. Some backends such as onnxruntime also support using OpenVINO Inference Engine as the actual executor for the inference, so NNCF-compressed models will also work there. ### Do I need OpenVINO or an Intel CPU to run NNCF? + Currently, this is not required in general. Most NNCF backends can run compression and produce a compressed model object without OpenVINO or an Intel CPU on board of the machine. You only need OpenVINO and Intel hardware when you actually need to run inference on the compressed model, e.g. in a production scenario. ### Do I need a GPU to run NNCF? + Currently all NNCF-supported backends allow running in a CPU-only mode, and NNCF does not disturb this. Note, however, that training-aware compression will naturally work much slower on most CPUs when compared with GPU-powered execution. Check out the [notebooks](https://github.com/openvinotoolkit/openvino_notebooks#-model-training) for examples of NNCF being applied on smaller datasets which work in a reasonable amount of time on a CPU-only setup. ### NNCF supports both training and post-training compression approaches, how do I know which I need? + The rule of thumb is - start with post-training compression, and use training compression if you are not satisfied with the results and if training compression is possible for your use case. Post-training is faster, but can degrade accuracy more than the training-enabled approach. ### I don't see any improvements after applying the `*_sparsity` algorithms + The sparsity algorithms introduce unstructured sparsity which can only be taken advantage of in terms of performance by using specialized hardware and/or software runtimes. Within the scope of these algorithms, NNCF provides functionally correct models with non-salient weights simply zeroed out, which does not lead to the reduction of the model checkpoint size. The models can, however, be used for benchmarking experimental/future hardware or runtimes, and for SOTA claims of applying unstructured sparsity on a given model architecture. For an opportunity to observably increase performance by omitting unnecessary computations in the model, consider using the [filter pruning](./compression_algorithms/Pruning.md) algorithm. Models compressed with this algorithm can be executed more efficiently within OpenVINO Inference Engine runtime when compared to the uncompressed counterparts. ### What is a "saturation issue" and how to avoid it? -On older generations of Intel CPUs (those not supporting [AVX-VNNI](https://en.wikipedia.org/wiki/Advanced_Vector_Extensions#AVX-VNNI)) convolutions and linear layer INT8 execution is implemented in OpenVINO's Inference Engine in such a way that mathematical overflow manifests itself _if more than 128 levels are used in the quantized domain_ (out of possible 2^8 = 256) for the weights of the corresponding operations. + +On older generations of Intel CPUs (those not supporting [AVX-VNNI](https://en.wikipedia.org/wiki/Advanced_Vector_Extensions#AVX-VNNI)) convolutions and linear layer INT8 execution is implemented in OpenVINO's Inference Engine in such a way that mathematical overflow manifests itself *if more than 128 levels are used in the quantized domain* (out of possible 2^8 = 256) for the weights of the corresponding operations. This is referred to as "saturation issue" within NNCF. On newer AVX-VNNI enabled Intel CPUs the Inference Engine uses a better set of instructions that do not exhibit this flaw. @@ -65,6 +73,7 @@ You can influence this behaviour by setting the `"overflow_fix"` parameter in th See documentation for this parameter in the [NNCF configuration file JSON schema reference](https://openvinotoolkit.github.io/nncf/#compression_oneOf_i0_oneOf_i0_overflow_fix). ### How can I exclude certain layers from compression? + Utilize the "ignored_scopes" parameter, either using an [NNCF config file](./ConfigFile.md) or by passing these as a function parameter if you are using NNCF purely by its Python API. Within this parameter you can set up one or multiple identifiers to layers in your model (regex is possible) and these will be correspondingly ignored while applying the algorithms. This can be done either globally or on a per-algorithm basis. @@ -74,56 +83,64 @@ For better understanding of how NNCF sees the layers in your network so that you These files are dumped in the NNCF's log directory at each invocation of model compression. ### Why do I need to pass a dataloader to certain NNCF algorithms? + These algorithms have to run a forward pass on the model to be compressed in order to properly initialize the compressed state of the model and/or to gather activation statistics that are indisposable in this algorithm. It is recommended, although by no means mandatory, to pass a dataloader with the same dataset that you were training the initial model for. - ### The compression process takes too long, how can I make it faster? -For training approaches the majority of time is taken by the training loop, so any regular methods that improve model convergence should work here. + +For training approaches the majority of time is taken by the training loop, so any regular methods that improve model convergence should work here. Try the built-in [knowledge distillation](./compression_algorithms/KnowledgeDistillation.md) to potentially obtain target accuracy faster. Alternatively you may want to reduce the number of initialization samples taken from the initialization dataloader by the algorithms that require it. +### I get a "CUDA out of memory" error when running NNCF in the compression-aware training approach, although the original model to be compressed runs and trains fine without NNCF -### I get a "CUDA out of memory" error when running NNCF in the compression-aware training approach, although the original model to be compressed runs and trains fine without NNCF. As some of the compression algorithm parameters are also trainable, NNCF-compressed model objects ready for training will have a larger GPU memory footprint than the uncompressed counterparts. Try reducing batch size for the NNCF training runs if it makes sense to do so in your situation. - - ## PyTorch + ### Importing anything from `nncf.torch` hangs + NNCF utilizes the [torch C++ extensions](https://pytorch.org/tutorials/advanced/cpp_extension.html) mechanism to accelerate the quantization-aware training process. This is done by just-in-time compiling a set of C++/CUDA files using the system-local compilers and toolsets. The compilation happens at the first import of `nncf.torch` or anything under that namespace on the machine, or within the current Python environment. The result is a set of the `.so` files containing the compiled extensions binary code stored in a system-specific location (commonly `~/.cache/torch_extensions`, or alternatively wherever `TORCH_EXTENSIONS_DIR` environment variable points to). -To avoid race conditions, PyTorch uses `.lock` files in this folder during compilation. -Sometimes, when the compilation process is abnormally aborted, these `.lock` files remain in the filesystem, which leads to a hang the next time you import anything from `nncf.torch`, because the just-in-time compilation process will wait indefinitely until the `.lock` files have been cleared. +To avoid race conditions, PyTorch uses `lock` files in this folder during compilation. +Sometimes, when the compilation process is abnormally aborted, these `lock` files remain in the filesystem, which leads to a hang the next time you import anything from `nncf.torch`, because the just-in-time compilation process will wait indefinitely until the `lock` files have been cleared. To resolve these, delete the `torch_extensions` directory (at `~/.cache`, or pointed to by `TORCH_EXTENSIONS_DIR`, or at your specific location), and re-run the script that imports from `nncf.torch`. The compilation takes some time and happens upon import, so do not interrupt the launch of your Python script until the import has been completed. ### Importing anything from `nncf.torch` leads to an error mentioning `gcc`, `nvcc`, `ninja`, or `cl.exe` + See the answer above for the general description of the reasons why these are involved in NNCF PyTorch operation. To resolve, make sure that your CUDA installation contains the development tools (e.g. the `nvcc` compiler), and that the environmental variables are set properly so that these tools are available in `PATH` or `PYTHONPATH`. ### My model trains and runs slower in PyTorch when compressed by NNCF + NNCF does not in general accelerate training or inference when the compressed model is run in PyTorch. It only prepares the model for further inference with OpenVINO's Inference Engine, where the runtime has capabilities of processing the NNCF-compressed models so that they run faster than their uncompressed counterparts. -The process of compressing in PyTorch relies on hooking regular PyTorch functions and calling extra code for purposes of compression algorithm logic, so the NNCF-processed models will inevitably run slower in PyTorch. Export your model after processing with NNCF to an OpenVINO-ingestable format (e.g. ONNX) and run it with the OpenVINO Inference Engine, to enjoy speedups when compared to the uncompressed model inference with Inference Engine. +The process of compressing in PyTorch relies on hooking regular PyTorch functions and calling extra code for purposes of compression algorithm logic, so the NNCF-processed models will inevitably run slower in PyTorch. Export your model after processing with NNCF to an OpenVINO-ingestible format (e.g. ONNX) and run it with the OpenVINO Inference Engine, to enjoy speedups when compared to the uncompressed model inference with Inference Engine. ### The .pth checkpoints for the compressed model have larger size and parameter count when compared to the uncompressed model + See the answer to the above question. Additional parameters are part of the compression algorithm internal state being saved along with the regular model weights, and any model size footprint reduction is deferred until exporting and/or running the model with OpenVINO Inference Engine. ### My RNN model is not compressed completely or fails at the compression stage + Currently NNCF PyTorch can only properly handle models with acyclic execution graphs. RNNs, which inherently have cycles, can behave oddly when processed with NNCF PyTorch, which includes loss of quality, unreproducible results and failure to compress. -### I get a `Could not deduce the forward arguments from the initializing dataloader output.` runtime error when executing `create_compressed_model`. + +### I get a `Could not deduce the forward arguments from the initializing dataloader output.` runtime error when executing `create_compressed_model` + Dataloaders can return anything, and this output may be preprocessed in the rest of the training pipeline before actually ending up in model's `forward` method. NNCF needs a dataloader already at the compressed model creation stage, e.g. before training, and doesn't in general know about the further preprocessing (turning the output of `v8_dataloader` into actual `forward` args and kwargs. You have to give NNCF this information by wrapping your dataloader object in an own subclass of a `nncf.torch.initialization.PTInitializingDataLoader` object that properly defines the `get_inputs` and `get_target` abstract methods: + ```python from nncf.torch.initialization import PTInitializingDataLoader @@ -131,13 +148,13 @@ class MyInitializingDataLoader(PTInitializingDataLoader): def get_inputs(self, dataloader_output: Any) -> Tuple[Tuple, Dict]: # your implementation - `dataloader_output` is what is returned by your dataloader, # and you have to turn it into a (args, kwargs) tuple that is required by your model - # in this function, for instance, if your dataloader returns dictionaries where + # in this function, for instance, if your dataloader returns dictionaries where # the input image is under key `"img"`, and your YOLOv8 model accepts the input # images as 0-th `forward` positional arg, you would do: return dataloader_output["img"], {} def get_target(self, dataloader_output: Any) -> Any: - # and in this function you should extract the "ground truth" value from your + # and in this function you should extract the "ground truth" value from your # dataloader, so, for instance, if your dataloader output is a dictionary where # ground truth images are under a "gt" key, then here you would write: return dataloader_output["gt"] @@ -155,4 +172,3 @@ nncf_config = register_default_init_args(nncf_config, init_dataloader) ## ONNX *To be filled* - diff --git a/docs/Installation.md b/docs/Installation.md index 5c4c2bb619a..53dc91e80ca 100644 --- a/docs/Installation.md +++ b/docs/Installation.md @@ -1,53 +1,63 @@ -## Installation +# Installation + We suggest to install or use the package in the [Python virtual environment](https://docs.python.org/3/tutorial/venv.html). If you want to optimize a model from PyTorch, install PyTorch by following [PyTorch installation guide](https://pytorch.org/get-started/locally/#start-locally). For other backend follow: [TensorFlow installation guide](https://www.tensorflow.org/install/), [ONNX installation guide](https://onnxruntime.ai/docs/install/), [OpenVINO installation guide](https://docs.openvino.ai/latest/openvino_docs_install_guides_overview.html). -#### As a PyPI package: +## As a PyPI package NNCF can be installed as a regular PyPI package via pip: -``` + +```bash pip install nncf ``` + If you want to install both NNCF and the supported PyTorch version in one line, you can do this by simply running: -``` + +```bash pip install nncf[torch] ``` -Other viable options besides `[torch]` are `[tf]`, `[onnx]` and `[openvino]`. +Other viable options besides `[torch]` are `[tf]`, `[onnx]` and `[openvino]`. -#### As a package built from a checked-out repository: +## As a package built from a checked-out repository Install the package and its dependencies by running the following command in the repository root directory: -``` + +```bash pip install . ``` Use the same `pip install` syntax as above to install NNCF along with the backend package version in one go: -``` + +```bash pip install .[] ``` + List of supported backends: `torch`, `tf`, `onnx` and `openvino`. For development purposes install extra packages by -``` + +```bash pip install .[dev,tests] ``` - _NB_: For launching example scripts in this repository, we recommend setting the `PYTHONPATH` variable to the root of the checked-out repository once the installation is completed. - NNCF is also available via [conda](https://anaconda.org/conda-forge/nncf): -``` + +```bash conda install -c conda-forge nncf ``` -#### From a specific commit hash using pip: +## From a specific commit hash using pip + ```python pip install git+https://github.com/openvinotoolkit/nncf@bd189e2#egg=nncf ``` + Note that in order for this to work for pip versions >= 21.3, your Git version must be at least 2.22. -#### As a Docker image -Use one of the Dockerfiles in the [docker](./docker) directory to build an image with an environment already set up and ready for running NNCF [sample scripts](#model-compression-samples). +## As a Docker image + +Use one of the Dockerfiles in the [docker](../docker) directory to build an image with an environment already set up and ready for running NNCF [sample scripts](../README.md#model-compression-samples). diff --git a/docs/ModelZoo.md b/docs/ModelZoo.md new file mode 100644 index 00000000000..b17cabb54cc --- /dev/null +++ b/docs/ModelZoo.md @@ -0,0 +1,943 @@ +# NNCF Compressed Model Zoo + +Here we present the results achieved using our sample scripts, example patches to third-party repositories and NNCF configuration files. + +The applied quantization compression algorithms are divided into two broad categories: Quantization-Aware Training ([QAT](../README.md#training-time-compression)) and Post-Training Quantization ([PTQ](../README.md#post-training-quantization)). Here we mainly report the QAT results and the PTQ results may be found on an OpenVino Performance Benchmarks [page](https://docs.openvino.ai/latest/openvino_docs_performance_benchmarks.html). + +- [PyTorch](#pytorch) + - [Classification](#pytorch-classification) + - [Object Detection](#pytorch-object-detection) + - [Semantic Segmentation](#pytorch-semantic-segmentation) + - [Natural Language Processing (3rd-party training pipelines)](#pytorch-nlp-huggingface-transformers-powered-models) +- [TensorFlow](#tensorflow) + - [Classification](#tensorflow-classification) + - [Object Detection](#tensorflow-object-detection) + - [Instance Segmentation](#tensorflow-instance-segmentation) +- [ONNX](#onnx) + +## PyTorch + +### PyTorch Classification + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ModelCompression algorithmDatasetAccuracy (drop) %ConfigurationCheckpoint
GoogLeNet-ImageNet69.77Config-
GoogLeNet• Filter pruning: 40%, geometric median criterionImageNet69.47 (0.30)ConfigDownload
Inception V3-ImageNet77.33Config-
Inception V3• QAT: INT8ImageNet77.45 (-0.12)ConfigDownload
Inception V3• QAT: INT8
• Sparsity: 61% (RB)
ImageNet76.36 (0.97)ConfigDownload
MobileNet V2-ImageNet71.87Config-
MobileNet V2• QAT: INT8ImageNet71.07 (0.80)ConfigDownload
MobileNet V2• QAT: INT8 (per-tensor only)ImageNet71.24 (0.63)ConfigDownload
MobileNet V2• QAT: Mixed, 58.88% INT8 / 41.12% INT4ImageNet70.95 (0.92)ConfigDownload
MobileNet V2• QAT: INT8
• Sparsity: 52% (RB)
ImageNet71.09 (0.78)ConfigDownload
MobileNet V3 (Small)-ImageNet67.66Config-
MobileNet V3 (Small)• QAT: INT8ImageNet66.98 (0.68)ConfigDownload
ResNet-18-ImageNet69.76Config-
ResNet-18• Binarization: XNOR (weights), scale/threshold (activations)ImageNet61.67 (8.09)ConfigDownload
ResNet-18• Binarization: DoReFa (weights), scale/threshold (activations)ImageNet61.63 (8.13)ConfigDownload
ResNet-18• Filter pruning: 40%, magnitude criterionImageNet69.27 (0.49)ConfigDownload
ResNet-18• Filter pruning: 40%, geometric median criterionImageNet69.31 (0.45)ConfigDownload
ResNet-18• Accuracy-aware compressed training
• Filter pruning: 60%, geometric median criterion
ImageNet69.2 (-0.6)Config-
ResNet-34-ImageNet73.30Config-
ResNet-34• Filter pruning: 50%, geometric median criterion
• Knowledge distillation
ImageNet73.11 (0.19)ConfigDownload
ResNet-50-ImageNet76.15Config-
ResNet-50• QAT: INT8ImageNet76.46 (-0.31)ConfigDownload
ResNet-50• QAT: INT8 (per-tensor only)ImageNet76.39 (-0.24)ConfigDownload
ResNet-50• QAT: Mixed, 43.12% INT8 / 56.88% INT4ImageNet76.05 (0.10)ConfigDownload
ResNet-50• QAT: INT8
• Sparsity: 61% (RB)
ImageNet75.42 (0.73)ConfigDownload
ResNet-50• QAT: INT8
• Sparsity: 50% (RB)
ImageNet75.50 (0.65)ConfigDownload
ResNet-50• Filter pruning: 40%, geometric median criterionImageNet75.57 (0.58)ConfigDownload
ResNet-50• Accuracy-aware compressed training
• Filter pruning: 52.5%, geometric median criterion
ImageNet75.23 (0.93)Config-
SqueezeNet V1.1-ImageNet58.19Config-
SqueezeNet V1.1• QAT: INT8ImageNet58.22 (-0.03)ConfigDownload
SqueezeNet V1.1• QAT: INT8 (per-tensor only)ImageNet58.11 (0.08)ConfigDownload
SqueezeNet V1.1• QAT: Mixed, 52.83% INT8 / 47.17% INT4ImageNet57.57 (0.62)ConfigDownload
+ +### PyTorch Object Detection + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ModelCompression algorithmDatasetmAP (drop) %ConfigurationCheckpoint
SSD300‑MobileNet-VOC12+07 train, VOC07 eval62.23ConfigDownload
SSD300‑MobileNet• QAT: INT8
• Sparsity: 70% (Magnitude)
VOC12+07 train, VOC07 eval62.95 (-0.72)ConfigDownload
SSD300‑VGG‑BN-VOC12+07 train, VOC07 eval78.28ConfigDownload
SSD300‑VGG‑BN• QAT: INT8VOC12+07 train, VOC07 eval77.81 (0.47)ConfigDownload
SSD300‑VGG‑BN• QAT: INT8
• Sparsity: 70% (Magnitude)
VOC12+07 train, VOC07 eval77.66 (0.62)ConfigDownload
SSD300‑VGG‑BN• Filter pruning: 40%, geometric median criterionVOC12+07 train, VOC07 eval78.35 (-0.07)ConfigDownload
SSD512-VGG‑BN-VOC12+07 train, VOC07 eval80.26ConfigDownload
SSD512-VGG‑BN• QAT: INT8VOC12+07 train, VOC07 eval80.04 (0.22)ConfigDownload
SSD512-VGG‑BN• QAT: INT8
• Sparsity: 70% (Magnitude)
VOC12+07 train, VOC07 eval79.68 (0.58)ConfigDownload
+ +### PyTorch Semantic Segmentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ModelCompression algorithmDatasetmIoU (drop) %ConfigurationCheckpoint
ICNet-CamVid67.89ConfigDownload
ICNet• QAT: INT8CamVid67.89 (0.00)ConfigDownload
ICNet• QAT: INT8
• Sparsity: 60% (Magnitude)
CamVid67.16 (0.73)ConfigDownload
UNet-CamVid71.95ConfigDownload
UNet• QAT: INT8CamVid71.89 (0.06)ConfigDownload
UNet• QAT: INT8
• Sparsity: 60% (Magnitude)
CamVid72.46 (-0.51)ConfigDownload
UNet-Mapillary56.24ConfigDownload
UNet• QAT: INT8Mapillary56.09 (0.15)ConfigDownload
UNet• QAT: INT8
• Sparsity: 60% (Magnitude)
Mapillary55.69 (0.55)ConfigDownload
UNet• Filter pruning: 25%, geometric median criterionMapillary55.64 (0.60)ConfigDownload
+ +### PyTorch NLP (HuggingFace Transformers-powered models) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PyTorch ModelCompression algorithmDatasetAccuracy (drop) %
BERT-base-cased• QAT: INT8CoNLL200399.18 (-0.01)
BERT-base-cased• QAT: INT8MRPC84.8 (-0.24)
BERT-base-chinese• QAT: INT8XNLI77.22 (0.46)
BERT-large
(Whole Word Masking)
• QAT: INT8SQuAD v1.1F1: 92.68 (0.53)
DistilBERT-base• QAT: INT8SST-290.3 (0.8)
GPT-2• QAT: INT8WikiText-2 (raw)perplexity: 20.9 (-1.17)
MobileBERT• QAT: INT8SQuAD v1.1F1: 89.4 (0.58)
RoBERTa-large• QAT: INT8MNLImatched: 89.25 (1.35)
+ +## TensorFlow + +### TensorFlow Classification + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ModelCompression algorithmDatasetAccuracy (drop) %ConfigurationCheckpoint
Inception V3-ImageNet77.91Config-
Inception V3• QAT: INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations)ImageNet78.39 (-0.48)ConfigDownload
Inception V3• QAT: INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations)
• Sparsity: 61% (RB)
ImageNet77.52 (0.39)ConfigDownload
Inception V3• Sparsity: 54% (Magnitude)ImageNet77.86 (0.05)ConfigDownload
MobileNet V2-ImageNet71.85Config-
MobileNet V2• QAT: INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations)ImageNet71.63 (0.22)ConfigDownload
MobileNet V2• QAT: INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations)
• Sparsity: 52% (RB)
ImageNet70.94 (0.91)ConfigDownload
MobileNet V2• Sparsity: 50% (RB)ImageNet71.34 (0.51)ConfigDownload
MobileNet V2 (TensorFlow Hub MobileNet V2)• Sparsity: 35% (Magnitude)ImageNet71.87 (-0.02)ConfigDownload
MobileNet V3 (Large)-ImageNet75.80Config-
MobileNet V3 (Large)• QAT: INT8 (per-channel symmetric for weights, per-tensor asymmetric half-range for activations)ImageNet75.04 (0.76)ConfigDownload
MobileNet V3 (Large)• QAT: INT8 (per-channel symmetric for weights, per-tensor asymmetric half-range for activations)
• Sparsity: 42% (RB)
ImageNet75.24 (0.56)ConfigDownload
MobileNet V3 (Small)-ImageNet68.38Config-
MobileNet V3 (Small)• QAT: INT8 (per-channel symmetric for weights, per-tensor asymmetric half-range for activations)ImageNet67.79 (0.59)ConfigDownload
MobileNet V3 (Small)• QAT: INT8 (per-channel symmetric for weights, per-tensor asymmetric half-range for activations)
• Sparsity: 42% (Magnitude)
ImageNet67.44 (0.94)ConfigDownload
ResNet-50-ImageNet75.05Config-
ResNet-50• QAT: INT8ImageNet74.99 (0.06)ConfigDownload
ResNet-50• QAT: INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations)
• Sparsity: 65% (RB)
ImageNet74.36 (0.69)ConfigDownload
ResNet-50• Sparsity: 80% (RB)ImageNet74.38 (0.67)ConfigDownload
ResNet-50• Filter pruning: 40%, geometric median criterionImageNet74.96 (0.09)ConfigDownload
ResNet-50• QAT: INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations)
• Filter pruning: 40%, geometric median criterion
ImageNet75.09 (-0.04)ConfigDownload
ResNet50• Accuracy-aware compressed training
• Sparsity: 65% (Magnitude)
ImageNet74.37 (0.67)Config-
+ +### TensorFlow Object Detection + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ModelCompression algorithmDatasetmAP (drop) %ConfigurationCheckpoint
RetinaNet-COCO 201733.43ConfigDownload
RetinaNet• QAT: INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations)COCO 201733.12 (0.31)ConfigDownload
RetinaNet• Sparsity: 50% (Magnitude)COCO 201733.10 (0.33)ConfigDownload
RetinaNet• Filter pruning: 40%COCO 201732.72 (0.71)ConfigDownload
RetinaNet• QAT: INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations)
• Filter pruning: 40%
COCO 201732.67 (0.76)ConfigDownload
YOLO v4-COCO 201747.07ConfigDownload
YOLO v4• QAT: INT8 (per-channel symmetric for weights, per-tensor asymmetric half-range for activations)COCO 201746.20 (0.87)ConfigDownload
YOLO v4• Sparsity: 50% (Magnitude)COCO 201746.49 (0.58)ConfigDownload
+ +### TensorFlow Instance Segmentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ModelCompression algorithmDatasetmAP (drop) %ConfigurationCheckpoint
Mask‑R‑CNN-COCO 2017bbox: 37.33
segm: 33.56
ConfigDownload
Mask‑R‑CNN• QAT: INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations)COCO 2017bbox: 37.19 (0.14)
segm: 33.54 (0.02)
ConfigDownload
Mask‑R‑CNN• Sparsity: 50% (Magnitude)COCO 2017bbox: 36.94 (0.39)
segm: 33.23 (0.33)
ConfigDownload
+ +## ONNX + +### ONNX Classification + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ONNX ModelCompression algorithmDatasetAccuracy (drop) %
DenseNet-121PTQImageNet60.16 (0.8)
GoogleNetPTQImageNet66.36 (0.3)
MobileNet V2PTQImageNet71.38 (0.49)
ResNet-50PTQImageNet74.63 (0.21)
ShuffleNetPTQImageNet47.25 (0.18)
SqueezeNet V1.0PTQImageNet54.3 (0.54)
VGG‑16PTQImageNet72.02 (0.0)
+ +### ONNX Object Detection + + + + + + + + + + + + + + + + + + + + + + + + +
ONNX ModelCompression algorithmDatasetmAP (drop) %
SSD1200PTQCOCO201720.17 (0.17)
Tiny-YOLOv2PTQVOC1229.03 (0.23)
diff --git a/docs/NNCFArchitecture.md b/docs/NNCFArchitecture.md index e091d0a6a9a..820f630dff0 100644 --- a/docs/NNCFArchitecture.md +++ b/docs/NNCFArchitecture.md @@ -1,19 +1,22 @@ # NNCF Architectural Overview -### Introduction +## Introduction + Neural Networks Compression Framework is a set of compression algorithms and tools to implement compression algorithms that is designed to work atop PyTorch. In essence, all of the compression algorithms present in NNCF do certain manipulations with the data inside the control flow graph of a DNN - be it the process of quantizing the values of an input tensor for a fully connected layer, or setting certain values of a convolutional layer to zero, etc. A general way to express these manipulations is by using hooks inserted in specific points of the DNN control flow graph. -### NNCFGraph +## NNCFGraph + To abstract away the compression logic from specifics of the backend, NNCF builds an `NNCFGraph` object for each incoming model object to be compressed. -`NNCFGraph` is a wrapper over a regular directed acyclic graph that represents a control flow/execution graph of a DNN. +`NNCFGraph` is a wrapper over a regular directed acyclic graph that represents a control flow/execution graph of a DNN. Each node corresponds to a call of a backend-specific function ("operator"). It is built both for the original, unmodified model, and for the model with compression algorithms applied (which, in general, may have additional operations when compared to the original model). -### PyTorch-specific +## PyTorch-specific + +### NNCFNetwork -#### NNCFNetwork During NNCF compression, the incoming original model object is dynamically extended with NNCF-enabling functionality. This is done by replacing the model object's _class object_ with another class object that lists not only the original class object as its base, but also the `NNCFNetwork` object. The compressed model object can then be identified as passing the `isinstance(obj, NNCFNetwork)` checks, but also the `isinstance(obj, original_class)` checks. @@ -25,19 +28,21 @@ In the model object processed in such a way, the following applies: 3. additional trainable modules and parameters specific to the applied compression algorithms are invisibly stored along with the regular model parameters, so that when saving an instance of `NNCFNetwork` via the usual `torch.save` calls, the trainable parameters of the compression algorithm are saved into the same state dict as the rest of the model parameters. The additional attributes and methods that appear in the original model object are separated from the original attribute/method names - the accesses to the NNCF-specific attributes and methods are done via an intermediate `nncf` property: -```python3 + +```python assert isinstance(model, NNCFNetwork) model.original_method_call() model.nncf.nncf_specific_method() ``` -This allows to avoid name collisions between NNCF-specific attributes and original model attributes. + +This allows to avoid name collisions between NNCF-specific attributes and original model attributes. `model.nncf` returns a `nncf.torch.nncf_network.NNCFNetworkInterface` object - the class contains all of the methods and attributes that could be called on the compressed model object to invoke NNCF-specific functionality. During compression algorithm application, the `NNCFNetwork` serves internally as a receptacle for compression algorithm-related adjustments to the control flow graph of the model. +### Model control flow graph tracing -#### Model control flow graph tracing Unlike other frameworks such as TensorFlow, PyTorch does not have an easily accessible graph representation of a model, and thus no way to identify specific points in the control flow graph. For this reason NNCF performs tracing of the PyTorch operators, implemented via wrapping the corresponding function and module calls. Through this process of tracing, NNCF builds an internal representation of the model graph, which is then supplied as the point of reference for specification and insertion of hooks at proper places in the network. @@ -54,17 +59,15 @@ c) the shape of the input tensors to the current operator, and d) the IDs of the nodes that produced each current operator's input as their output. - This information is stored as an `OperationExecutionContext` of the operator. If an operator call does not match to the nodes already present in the internal graph representation based on its `OperationExecutionContext`, a new node is added to the graph. -This process occurs dynamically during each `forward` call of an `NNCFNetwork`. -If the control flow is data-dependent, a whole new subgraph of the model will be built for each branching in the model definition. -The graph building mechanism can cope with some branching, but it is advisable to disable NNCF tracing for the parts of the model that exhibit branching (such as the "detection output" layers of object detection networks) by using a `no_nncf_trace()` context. -It is possible to wrap third party functionality with `no_nncf_trace()` context so that this source code does not need to be changed. +This process occurs dynamically during each `forward` call of an `NNCFNetwork`. +If the control flow is data-dependent, a whole new subgraph of the model will be built for each branching in the model definition. +The graph building mechanism can cope with some branching, but it is advisable to disable NNCF tracing for the parts of the model that exhibit branching (such as the "detection output" layers of object detection networks) by using a `no_nncf_trace()` context. +It is possible to wrap third party functionality with `no_nncf_trace()` context so that this source code does not need to be changed. This can be done by patching, please refer to this [example](../examples/post_training_quantization/torch/ssd300_vgg16/README.md). +### Operation scope and addressing - -#### Operation scope and addressing A unique identifier of a node in the `NNCFGraph` - i.e. an operation in the DNN control flow graph - is the `OperationExecutionContext`. However, in most cases the input-agnostic part of `OperationExecutionContext` is enough to identify an operation in the model control flow graph for purposes of inserting compression-related hooks into the model. This `InputAgnosticOperationExecutionContext` is built using a) and b) from the information list gathered to build a regular `OperationExecutionContext`. Its string representation is a concatenation of a `Scope` string representation, the name of the operator (Python function), underscore `_`, and the order of the operator call in the same `Scope`. In turn, the string representation of a `Scope` is a sequence of "__module_class_name__[__module_field_name__]/" substrings, where each such substring corresponds to a __module_class_name__ type of `torch.nn.Module` being called as a __module_field_name__ member field of its parent module, and slashes `/` separate the adjacent levels of the module call hierarchy. @@ -73,17 +76,17 @@ As an example, consider a simple PyTorch module: ```python class SimpleModule(torch.nn.Module): - def __init__(): - super().__init__() - self.submodule1 = torch.nn.Conv2d(...) # params omitted - self.submodule2 = torch.nn.Sequential([torch.nn.BatchNorm2d(...), torch.nn.ReLU(...)]) - def forward(x_in): - x = self.submodule1(x_in) - x = self.submodule2(x) - x += torch.ones_like(x) - x += torch.ones_like(x) - x = torch.nn.functional.relu(x) - return x + def __init__(): + super().__init__() + self.submodule1 = torch.nn.Conv2d(...) # params omitted + self.submodule2 = torch.nn.Sequential([torch.nn.BatchNorm2d(...), torch.nn.ReLU(...)]) + def forward(x_in): + x = self.submodule1(x_in) + x = self.submodule2(x) + x += torch.ones_like(x) + x += torch.ones_like(x) + x = torch.nn.functional.relu(x) + return x ``` Each `torch.nn.Conv2d` module call internally calls a `conv2d` operator, which will then be added to an `NNCFGraph` during tracing. Therefore, the two convolution operations in the model's control flow graph will have the following `InputAgnosticOperationExecutionContext` string representations: `SimpleModule/Conv2d[submodule1]/conv2d_0` and `SimpleModule/Conv2d[submodule2]/conv2d_0`. @@ -94,9 +97,8 @@ The two consecutive addition operations will be represented by `SimpleModule/__i These string definitions are referred to as "scopes" in the NNCF configuration files (as in `"ignored_scopes"` or `"target_scopes"`), and help specify exact operations for inclusion into or exclusion from compression or for separate compression parameter specification. +### Compression algorithm API and interaction with NNCFNetwork - -#### Compression algorithm API and interaction with NNCFNetwork A compression algorithm is a modification of a regular model control flow according to some trainable or non-trainable parameters. Modification of the control flow is done via a hook, and the trainable parameters are stored inside special NNCF modules. Each compression algorithm therefore consists of taking an unmodified model, analyzing it and then determining a set of modifications necessary for modifying the model's execution so that it now takes specific compression into account. `NNCFNetwork` defines a common interface for compression algorithms to specify the location for hook insertion (based on a `InputAgnosticOperationExecutionContext` of an operation) and the hook itself. It also allows algorithms to register external modules within itself so that the trainable parameters of the compression algorithm could be saved as a checkpoint along with the model while also being indistinguishable from any other trainable parameter of the original model from the training pipeline optimizer's standpoint. @@ -114,4 +116,4 @@ Once all algorithms are applied to the model, the compression changes are commit A `CompressionAlgorithmController` is then used to control or modify aspects of compression during training, to gather statistics related to compression, or to provide additional loss for proper training of the trainable compression parameters. To this purpose it contains a `CompressionScheduler` and `CompressionLoss` instances, which can then be used as desired during the training pipeline. For instance, a `CompressionScheduler` may be implemented so that it enables quantization for activations only upon a certain training epoch, and a `CompressionLoss` may be implemented so that it facilitates soft filter pruning. -> **NOTE**: In general, the compression method may not have its own scheduler and loss, and the default implementations are used instead. +> __NOTE__: In general, the compression method may not have its own scheduler and loss, and the default implementations are used instead. diff --git a/docs/Usage.md b/docs/Usage.md index 46cebbbea5b..12e4689fe73 100644 --- a/docs/Usage.md +++ b/docs/Usage.md @@ -6,35 +6,41 @@ The task is to prepare this model for accelerated inference by simulating the co The instructions below use certain "helper" functions of the NNCF which abstract away most of the framework specifics and make the integration easier in most cases. As an alternative, you can always use the NNCF internal objects and methods as described in the [architectural overview](./NNCFArchitecture.md). - ## Basic usage -#### Step 1: Create an NNCF configuration file +### Step 1: Create an NNCF configuration file A JSON configuration file is used for easier setup of the parameters of compression to be applied to your model. See [configuration file description](./ConfigFile.md) or the sample configuration files packaged with the [example scripts](../examples) for reference. -#### Step 2: Modify the training pipeline +### Step 2: Modify the training pipeline + NNCF enables compression-aware training by being integrated into the regular training pipelines. The framework is designed so that the modifications to your original training code are minor. 1. **Add** the imports required for NNCF: + ```python import torch import nncf.torch # Important - must be imported before any other external package that depends on torch from nncf import NNCFConfig, create_compressed_model, load_state ``` + **NOTE (PyTorch)**: Due to the way NNCF works within the PyTorch backend, `import nncf` must be done before any other import of `torch` in your package _or_ in third-party packages that your code utilizes, otherwise the compression may be applied incompletely. 2. Load the NNCF JSON configuration file that you prepared during Step 1: + ```python nncf_config = NNCFConfig.from_json("nncf_config.json") # Specify a path to your own NNCF configuration file in place of "nncf_config.json" ``` + 3. (Optional) For certain algorithms such as quantization it is highly recommended to **initialize the algorithm** by passing training data via `nncf_config` prior to starting the compression fine-tuning properly: + ```python from nncf import register_default_init_args nncf_config = register_default_init_args(nncf_config, train_loader, criterion=criterion) ``` + Training data loaders should be attached to the NNCFConfig object as part of a library-defined structure. `register_default_init_args` is a helper method that registers the necessary structures for all available initializations (currently quantizer range and precision initialization) by taking data loader, criterion and criterion function (for sophisticated calculation of loss different from direct call of the @@ -45,47 +51,57 @@ The framework is designed so that the modifications to your original training co `nncf.common.initialization.dataloader.NNCFDataLoader` interface to return a tuple of (_single model input_ , _the rest of the model inputs as a kwargs dict_). 4. Right after you create an instance of the original model and load its weights, **wrap the model** by making the following call + ```python compression_ctrl, compressed_model = create_compressed_model(model, nncf_config) ``` + The `create_compressed_model` function parses the loaded configuration file and returns two objects. `compression_ctrl` is a "controller" object that can be used during compressed model training to adjust certain parameters of the compression algorithm (according to a scheduler, for instance), or to gather statistics related to your compression algorithm (such as the current level of sparsity in your model). 5. (Optional) Wrap your model with `DataParallel` or `DistributedDataParallel` classes for multi-GPU training. If you use `DistributedDataParallel`, add the following call afterwards: - ```python - compression_ctrl.distributed() - ``` - in case the compression algorithms that you use need special adjustments to function in the distributed mode. + ```python + compression_ctrl.distributed() + ``` + in case the compression algorithms that you use need special adjustments to function in the distributed mode. + + 6. In the **training loop**, make the following changes: -6. In the **training loop**, make the following changes: - After inferring the model, take a compression loss and add it (using the `+` operator) to the common loss, for example cross-entropy loss: + ```python compression_loss = compression_ctrl.loss() loss = cross_entropy_loss + compression_loss ``` + - Call the scheduler `step()` before each training iteration: + ```python compression_ctrl.scheduler.step() ``` + - Call the scheduler `epoch_step()` before each training epoch: + ```python compression_ctrl.scheduler.epoch_step() ``` > **NOTE**: For a real-world example of how these changes should be introduced, take a look at the [examples](../examples) published in the NNCF repository. -#### Step 3: Run the training pipeline +### Step 3: Run the training pipeline + At this point, the NNCF is fully integrated into your training pipeline. You can run it as usual and monitor your original model's metrics and/or compression algorithm metrics and balance model metrics quality vs. level of compression. - Important points you should consider when training your networks with compression algorithms: - - Turn off the `Dropout` layers (and similar ones like `DropConnect`) when training a network with quantization or sparsity - - It is better to turn off additional regularization in the loss function (for example, L2 regularization via `weight_decay`) when training the network with RB sparsity, since it already imposes an L0 regularization term. -#### Step 4: Export the compressed model +- Turn off the `Dropout` layers (and similar ones like `DropConnect`) when training a network with quantization or sparsity +- It is better to turn off additional regularization in the loss function (for example, L2 regularization via `weight_decay`) when training the network with RB sparsity, since it already imposes an L0 regularization term. + +### Step 4: Export the compressed model + After the compressed model has been fine-tuned to acceptable accuracy and compression stages, you can export it. There are two ways to export a model: 1. Call the compression controller's `export_model` method to properly export the model with compression specifics into ONNX. @@ -93,6 +109,7 @@ After the compressed model has been fine-tuned to acceptable accuracy and compre ```python compression_ctrl.export_model("./compressed_model.onnx") ``` + The exported ONNX file may contain special, non-ONNX-standard operations and layers to leverage full compressed/low-precision potential of the OpenVINO toolkit. In some cases it is possible to export a compressed model with ONNX standard operations only (so that it can be run using `onnxruntime`, for example) - this is the case for the 8-bit symmetric quantization and sparsity/filter pruning algorithms. Refer to [compression algorithm documentation](./compression_algorithms) for details. @@ -114,6 +131,7 @@ After the compressed model has been fine-tuned to acceptable accuracy and compre ``` ## Saving and loading compressed models + The complete information about compression is defined by a compressed model and a compression state. The model characterizes the weights and topology of the network. The compression state - how to restore the setting of compression layers in the model and how to restore the compression schedule and the compression loss. @@ -136,6 +154,7 @@ sparsity algorithm has learnt masking of 30% weights out of 51% of target rate. algorithm, for example when rb-sparsity method sets final target sparsity rate for the loss. ### Saving and loading compressed models in TensorFlow + ```python # save part compression_ctrl, compress_model = create_compressed_model(model, nncf_config) @@ -172,6 +191,7 @@ string within `tf.train.Checkpoint`. There are 2 helper classes: `TFCompressionS ### Saving and loading compressed models in PyTorch Deprecated API + ```python # save part compression_ctrl, compressed_model = create_compressed_model(model, nncf_config) @@ -191,6 +211,7 @@ compression_ctrl.scheduler.load_state(resuming_checkpoint['scheduler_state']) ``` New API + ```python # save part compression_ctrl, compressed_model = create_compressed_model(model, nncf_config) @@ -237,8 +258,8 @@ have the same structure with regard to PyTorch module and parameters as it was w In practice this means that you should use the same compression algorithms (i.e. the same NNCF configuration file) when loading a compressed model checkpoint. - ## Exploring the compressed model + After a `create_compressed_model` call, the NNCF log directory will contain visualizations of internal representations for the original, uncompressed model (`original_graph.dot`) and for the model with the compression algorithms applied (`compressed_graph.dot`). These graphs form the basis for NNCF analyses of your model. Below is the example of a LeNet network's `original_graph.dot` visualization: @@ -259,10 +280,10 @@ For instance, below is the same LeNet INT8 model as above, but with `"ignored_sc Notice that all RELU operation outputs and the second convolution's weights are no longer quantized. - ## Advanced usage ### Compression of custom modules + With no target model code modifications, NNCF only supports native PyTorch modules with respect to trainable parameter (weight) compressed, such as `torch.nn.Conv2d` If your model contains a custom, non-PyTorch standard module with trainable weights that should be compressed, you can register it using the `@nncf.register_module` decorator: @@ -281,9 +302,11 @@ If registered module should be ignored by specific algorithms use `ignored_algor In the example above, the NNCF-compressed models that contain instances of `MyModule` will have the corresponding modules extended with functionality that will allow NNCF to quantize, sparsify or prune the `weight` parameter of `MyModule` before it takes part in `MyModule`'s `forward` calculation. ### Accuracy-Aware model training + NNCF has the capability to apply the model compression algorithms while satisfying the user-defined accuracy constraints. This is done by executing an internal custom accuracy-aware training loop, which also helps to automate away some of the manual hyperparameter search related to model training such as setting the total number of epochs, the target compression rate for the model, etc. There are two supported training loops. The first one is called [Early Exit Training](./accuracy_aware_model_training/EarlyExitTraining.md), which aims to finish fine-tuning when the accuracy drop criterion is reached. The second one is more sophisticated. It is targeted for the automated discovery of the compression rate for the model given that it satisfies the user-specified maximal tolerable accuracy drop due to compression. Its name is [Adaptive Compression Level Training](./accuracy_aware_model_training/AdaptiveCompressionTraining.md). Both training loops could be run with either PyTorch or TensorFlow backend with the same user interface(except for the TF case where the Keras API is used for training). The following function is required to create the accuracy-aware training loop. One has to pass the `NNCFConfig` object and the compression controller (that is returned upon compressed model creation, see above). + ```python from nncf.common.accuracy_aware_training import create_accuracy_aware_training_loop training_loop = create_accuracy_aware_training_loop(nncf_config, compression_ctrl, uncompressed_model_accuracy) @@ -344,14 +367,17 @@ def dump_checkpoint_fn(model, compression_controller, accuracy_aware_runner, sav ``` Once the above functions are defined, you could pass them to the `run` method of the earlier created training loop : + ```python -model = training_loop.run(model, - train_epoch_fn=train_epoch_fn, - validate_fn=validate_fn, - configure_optimizers_fn=configure_optimizers_fn, - dump_checkpoint_fn=dump_checkpoint_fn) +model = training_loop.run( + model, + train_epoch_fn=train_epoch_fn, + validate_fn=validate_fn, + configure_optimizers_fn=configure_optimizers_fn, + dump_checkpoint_fn=dump_checkpoint_fn) ``` + The above call executes the accuracy-aware training loop and return the compressed model. For more details on how to use the accuracy-aware training loop functionality of NNCF, please refer to its [documentation](./accuracy_aware_model_training/AdaptiveCompressionTraining.md). See a PyTorch [example](../../examples/torch/classification/main.py) for **Quantization** + **Filter Pruning** Adaptive Compression scenario on CIFAR10 and ResNet18 [config](../../examples/torch/classification/configs/pruning/resnet18_cifar10_accuracy_aware.json). diff --git a/docs/accuracy_aware_model_training/AdaptiveCompressionLevelTraining.md b/docs/accuracy_aware_model_training/AdaptiveCompressionLevelTraining.md index cd0ee66fb6b..4ec0a2d4b07 100644 --- a/docs/accuracy_aware_model_training/AdaptiveCompressionLevelTraining.md +++ b/docs/accuracy_aware_model_training/AdaptiveCompressionLevelTraining.md @@ -5,21 +5,22 @@ The compression pipeline can consist of several compression algorithms (Algorith See a PyTorch [example](../../examples/torch/classification/main.py) for **Quantization** + **Filter Pruning** scenario on CIFAR10 and ResNet18 [config](../../examples/torch/classification/configs/pruning/resnet18_cifar10_accuracy_aware.json). -The exact compression algorithm for which the compression level search will be applied is determined in "compression" config section. The parameters to be set by the user in this config section are: -1) `maximal_relative_accuracy_degradation` or `maximal_absolute_accuracy_degradation` (Optional; default `maximal_relative_accuracy_degradation=1.0`) - the maximal allowed accuracy metric drop relative to the original model metrics (in percent) or the maximal allowed absolute accuracy metric drop (in original metrics value), -2) `initial_training_phase_epochs` (Optional; default=5) - number of epochs to train the model with the compression schedule specified in the `"params"` section of `"compression"` algorithm. -3) `patience_epochs` (Optional; default=3) - number of epochs to train the model for a compression rate level set by the search algorithm before switching to another compression rate value. -4) `minimal_compression_rate_step` (Optional; default=0.025) - the minimal compression rate change step value after which the training loop is terminated. -5) `initial_compression_rate_step` (Optional; default=0.1) - initial value for the compression rate increase/decrease training phase of the compression training loop. -6) `compression_rate_step_reduction_factor` (Optional; default=0.5) - factor used to reduce the compression rate change step in the adaptive compression training loop. -7) `lr_reduction_factor` (Optional; default=0.5) - factor used to reduce the base value of the learning rate scheduler after compression rate step is reduced. -8) `maximal_total_epochs` (Optional; default=10000) - number of training epochs, if the fine-tuning epoch reaches this number, the loop finishes the fine-tuning and return the model with thi highest compression rate and the least accuracy drop. +The exact compression algorithm for which the compression level search will be applied is determined in "compression" config section. The parameters to be set by the user in this config section are: +1. `maximal_relative_accuracy_degradation` or `maximal_absolute_accuracy_degradation` (Optional; default `maximal_relative_accuracy_degradation=1.0`) - the maximal allowed accuracy metric drop relative to the original model metrics (in percent) or the maximal allowed absolute accuracy metric drop (in original metrics value), +2. `initial_training_phase_epochs` (Optional; default=5) - number of epochs to train the model with the compression schedule specified in the `"params"` section of `"compression"` algorithm. +3. `patience_epochs` (Optional; default=3) - number of epochs to train the model for a compression rate level set by the search algorithm before switching to another compression rate value. +4. `minimal_compression_rate_step` (Optional; default=0.025) - the minimal compression rate change step value after which the training loop is terminated. +5. `initial_compression_rate_step` (Optional; default=0.1) - initial value for the compression rate increase/decrease training phase of the compression training loop. +6. `compression_rate_step_reduction_factor` (Optional; default=0.5) - factor used to reduce the compression rate change step in the adaptive compression training loop. +7. `lr_reduction_factor` (Optional; default=0.5) - factor used to reduce the base value of the learning rate scheduler after compression rate step is reduced. +8. `maximal_total_epochs` (Optional; default=10000) - number of training epochs, if the fine-tuning epoch reaches this number, the loop finishes the fine-tuning and return the model with thi highest compression rate and the least accuracy drop. To launch the adaptive compression training loop, the user should define several functions related to model training, validation and optimizer creation (see [the usage documentation](../Usage.md#accuracy-aware-model-training) for more details) and pass them to the run method of an `AdaptiveCompressionTrainingLoop` instance. The training loop logic inside of the `AdaptiveCompressionTrainingLoop` is framework-agnostic, while all of the framework specifics are encapsulated inside of corresponding `Runner` objects, which are created and called inside the training loop. The adaptive compression training loop is generally aimed at automatically searching for the optimal compression rate in the model, with the parameters of the search algorithm specified in the configuration file. Below is an example of a filter pruning configuration with added `"accuracy_aware_training"` parameters. + ```json5 { "input_infos": {"sample_size": [1, 2, 224, 224]}, @@ -68,5 +69,6 @@ That is, if a too big of an increase in compression rate resulted in the accurac This sequential search is limited by the minimal granularity of the steps given by `"minimal_compression_rate_step"`. ## Example + An example of how model is compressed using Adaptive Compression Training Loop is given on the figure below. -![Example](actl_progress_plot.png) \ No newline at end of file +![Example](actl_progress_plot.png) diff --git a/docs/accuracy_aware_model_training/EarlyExitTraining.md b/docs/accuracy_aware_model_training/EarlyExitTraining.md index 58313136a32..217f630cc3f 100644 --- a/docs/accuracy_aware_model_training/EarlyExitTraining.md +++ b/docs/accuracy_aware_model_training/EarlyExitTraining.md @@ -1,7 +1,7 @@ # Early Exit training loop in NNCF Early Exit training loop aims to get the compressed model with the desired accuracy criteria as earlier as possible. This is done by checking a compressed model accuracy after each training epoch step and also after the initialization step then exits the fine-tuning process once the accuracy reaches the user-defined criteria -This pipeline is simple but effective. It reduces a fine-tuning time for many models till just an initialization step. +This pipeline is simple but effective. It reduces a fine-tuning time for many models till just an initialization step. Note: since the EarlyExit training does not control any compression parameter the specified accuracy criterium cannot be satisfied in some cases @@ -18,7 +18,7 @@ Example of config file needed to be provided to create_accuracy_aware_training_l "mode": "early_exit", "params": { "maximal_relative_accuracy_degradation": 1.0, - "maximal_total_expochs": 100 + "maximal_total_epochs": 100 } }, "compression": [ @@ -36,5 +36,3 @@ Example of config file needed to be provided to create_accuracy_aware_training_l } ``` - - \ No newline at end of file diff --git a/docs/api/source/conf.py b/docs/api/source/conf.py index bdc3f81e583..3c88fa2cafa 100644 --- a/docs/api/source/conf.py +++ b/docs/api/source/conf.py @@ -1,11 +1,20 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import importlib import inspect import os import pkgutil import sys -from dataclasses import dataclass -from dataclasses import field -from typing import Any, Dict, List +from typing import Any, Dict from sphinx.ext.autodoc import mock @@ -13,7 +22,7 @@ project = "NNCF" html_title = "NNCF" -copyright = "2023, Intel Corporation" +copyright_ = "2023, Intel Corporation" author = "Intel Corporation" extensions = ["autoapi.extension", "sphinx.ext.autodoc", "sphinx.ext.linkcode"] @@ -47,6 +56,7 @@ def __init__(self): self.canonical_name_vs_fqn: Dict[str, str] = {} +# pylint: disable=too-many-branches def collect_api_entities() -> APIInfo: """ Collects the fully qualified names of symbols in NNCF package that contain a special attribute (set via @@ -60,17 +70,16 @@ def collect_api_entities() -> APIInfo: skipped_modules = {} # type: Dict[str, str] import nncf - for importer, modname, ispkg in pkgutil.walk_packages( - path=nncf.__path__, prefix=nncf.__name__ + ".", onerror=lambda x: None - ): + for _, modname, _ in pkgutil.walk_packages(path=nncf.__path__, prefix=nncf.__name__ + ".", onerror=lambda x: None): try: modules[modname] = importlib.import_module(modname) - except Exception as e: + except Exception as e: # pylint: disable=broad-except skipped_modules[modname] = str(e) from nncf.common.utils.api_marker import api canonical_imports_seen = set() + # pylint: disable=too-many-nested-blocks for modname, module in modules.items(): print(f"{modname}") for obj_name, obj in inspect.getmembers(module): @@ -78,7 +87,7 @@ def collect_api_entities() -> APIInfo: if objects_module == modname: if inspect.isclass(obj) or inspect.isfunction(obj): if hasattr(obj, api.API_MARKER_ATTR): - marked_object_name = obj._nncf_api_marker + marked_object_name = obj._nncf_api_marker # pylint: disable=protected-access # Check the actual name of the originally marked object # so that the classes derived from base API classes don't # all automatically end up in API @@ -119,13 +128,26 @@ def collect_api_entities() -> APIInfo: return retval -with mock(["torch", "torchvision", "onnx", "onnxruntime", "openvino", "tensorflow", "tensorflow_addons"]): +mock_modules = [ + "torch", + "torchvision", + "onnx", + "onnxruntime", + "openvino", + "tensorflow", + "tensorflow_addons", + # Need add backend implementation functions to avoid endless loops on registered functions by mock module, + "nncf.experimental.tensor.torch_functions", + "nncf.experimental.tensor.numpy_functions", +] + +with mock(mock_modules): api_info = collect_api_entities() module_fqns = set() -for fqn in api_info.api_names_vs_obj_dict: - path_elements = fqn.split(".") +for fqn_ in api_info.api_names_vs_obj_dict: + path_elements = fqn_.split(".") for i in range(1, len(path_elements)): intermediate_module_path = ".".join(path_elements[:i]) module_fqns.add(intermediate_module_path) diff --git a/docs/compression_algorithms/BatchnormAdaptation.md b/docs/compression_algorithms/BatchnormAdaptation.md index 38219fbf0d8..47df400808c 100644 --- a/docs/compression_algorithms/BatchnormAdaptation.md +++ b/docs/compression_algorithms/BatchnormAdaptation.md @@ -1,17 +1,18 @@ -### Batch-norm statistics adaptation +# Batch-norm statistics adaptation -After the compression-related changes in the model have been committed, the statistics of the batchnorm layers (per-channel rolling means and variances of activation tensors) can be updated by passing several batches of data through the model before the fine-tuning starts. -This allows to correct the compression-induced bias in the model and reduce the corresponding accuracy drop even before model training. -This option is common for quantization, magnitude sparsity and filter pruning algorithms. +After the compression-related changes in the model have been committed, the statistics of the batchnorm layers (per-channel rolling means and variances of activation tensors) can be updated by passing several batches of data through the model before the fine-tuning starts. +This allows to correct the compression-induced bias in the model and reduce the corresponding accuracy drop even before model training. +This option is common for quantization, magnitude sparsity and filter pruning algorithms. It can be enabled by setting a non-zero value of `num_bn_adaptation_samples` in the `batchnorm_adaptation` section of the `initializer` configuration - see [NNCF config schema](https://openvinotoolkit.github.io/nncf/) for reference. Note that in order to use batchnorm adaptation for your model, you must supply to NNCF a data loader using a `register_default_init_args` helper function or by registering a `nncf.config.structures.BNAdaptationInitArgs` structure within the `NNCFConfig` object in your integration code. -### Example configuration files +## Example configuration files >_For the full list of the algorithm configuration parameters via config file, see the corresponding section in the [NNCF config schema](https://openvinotoolkit.github.io/nncf/)_. - Apply batchnorm adaptation for 2048 samples (rounded to nearest batch size multiple) during model quantization: + ```json5 { "input_info": {"sample_size" : [1, 3, 224, 224]}, // the input shape of your model may vary @@ -27,6 +28,7 @@ Note that in order to use batchnorm adaptation for your model, you must supply t ``` - Apply batchnorm adaptation for 32 samples (rounded to nearest batch size multiple) during model magnitude-based sparsification: + ```json5 { "input_info": {"sample_size" : [1, 3, 224, 224]}, // the input shape of your model may vary @@ -43,4 +45,4 @@ Note that in order to use batchnorm adaptation for your model, you must supply t } } } -``` \ No newline at end of file +``` diff --git a/docs/compression_algorithms/Binarization.md b/docs/compression_algorithms/Binarization.md index 33fe08ac93e..d2836b87ff3 100644 --- a/docs/compression_algorithms/Binarization.md +++ b/docs/compression_algorithms/Binarization.md @@ -1,6 +1,8 @@ +# Binarization + >_Scroll down for the examples of the JSON configuration files that can be used to apply this algorithm_. -### Binarization -NNCF supports binarizing weights and activations for 2D convolutional PyTorch\* layers (Conv2D) *only*. + +NNCF supports binarizing weights and activations for 2D convolutional PyTorch\* layers (Conv2D) _only_. Weight binarization may be done in two ways, depending on the configuration file parameters - either via [XNOR binarization](https://arxiv.org/abs/1603.05279) or via [DoReFa binarization](https://arxiv.org/abs/1606.06160). For DoReFa binarization, the scale of binarized weights for each convolution operation is calculated as the mean of absolute values of non-binarized convolutional filter weights, while for XNOR binarization, each convolutional operation has scales that are calculated in the same manner, but _per input channel_ of the convolutional filter. Refer to the original papers for details. @@ -8,20 +10,21 @@ Binarization of activations is implemented via binarizing inputs to the convolut $\text{out} = s * H(\text{in} - s*t)$ -In the formula above, - - $\text{in}$ - non-binarized activation values - - $\text{out}$ - binarized activation values - - $H(x)$ is the Heaviside step function - - $s$ and $t$ are trainable parameters corresponding to binarization scale and threshold respectively +In the formula above: -Training binarized networks requires special scheduling of the training process. For instance, binarizing a pretrained ResNet18 model on ImageNet is a four-stage process, with each stage taking a certain number of epochs. During the stage 1, the network is trained without any binarization. During the stage 2, the training continues with binarization enabled for activations only. During the stage 3, binarization is enabled both for activations and weights. Finally, during the stage 4 the optimizer learning rate, which was kept constant at previous stages, is decreased according to a polynomial law, while weight decay parameter of the optimizer is set to 0. The configuration files for the NNCF binarization algorithm allow to control certain parameters of this training schedule. +- $\text{in}$ - non-binarized activation values +- $\text{out}$ - binarized activation values +- $H(x)$ is the Heaviside step function +- $s$ and $t$ are trainable parameters corresponding to binarization scale and threshold respectively +Training binarized networks requires special scheduling of the training process. For instance, binarizing a pretrained ResNet18 model on ImageNet is a four-stage process, with each stage taking a certain number of epochs. During the stage 1, the network is trained without any binarization. During the stage 2, the training continues with binarization enabled for activations only. During the stage 3, binarization is enabled both for activations and weights. Finally, during the stage 4 the optimizer learning rate, which was kept constant at previous stages, is decreased according to a polynomial law, while weight decay parameter of the optimizer is set to 0. The configuration files for the NNCF binarization algorithm allow to control certain parameters of this training schedule. -### Example configuration files: +## Example configuration files >_For the full list of the algorithm configuration parameters via config file, see the corresponding section in the [NNCF config schema](https://openvinotoolkit.github.io/nncf/)_. - Binarize a ResNet using XNOR algorithm, ignoring several portions of the model, with finetuning on the scope of 60 epochs and staged binarization schedule (activations first, then weights) + ```json5 { "input_info": { "sample_size": [1, 3, 224, 224] }, @@ -36,12 +39,13 @@ Training binarized networks requires special scheduling of the training process. "lr_poly_drop_duration_epochs": 30, // Duration, in epochs, of the learning rate dropping process. "disable_wd_start_epoch": 60 // Epoch to disable weight decay in the optimizer }, - - "ignored_scopes": ["ResNet/NNCFLinear[fc]/linear_0", - "ResNet/NNCFConv2d[conv1]/conv2d_0", - "ResNet/Sequential[layer2]/BasicBlock[0]/Sequential[downsample]/NNCFConv2d[0]/conv2d_0", - "ResNet/Sequential[layer3]/BasicBlock[0]/Sequential[downsample]/NNCFConv2d[0]/conv2d_0", - "ResNet/Sequential[layer4]/BasicBlock[0]/Sequential[downsample]/NNCFConv2d[0]/conv2d_0"] + "ignored_scopes": [ + "ResNet/NNCFLinear[fc]/linear_0", + "ResNet/NNCFConv2d[conv1]/conv2d_0", + "ResNet/Sequential[layer2]/BasicBlock[0]/Sequential[downsample]/NNCFConv2d[0]/conv2d_0", + "ResNet/Sequential[layer3]/BasicBlock[0]/Sequential[downsample]/NNCFConv2d[0]/conv2d_0", + "ResNet/Sequential[layer4]/BasicBlock[0]/Sequential[downsample]/NNCFConv2d[0]/conv2d_0" + ] } } ``` diff --git a/docs/compression_algorithms/CompressWeights.md b/docs/compression_algorithms/CompressWeights.md new file mode 100644 index 00000000000..8f2335ac4f8 --- /dev/null +++ b/docs/compression_algorithms/CompressWeights.md @@ -0,0 +1,22 @@ +### Weights Compression + +[OpenVINO](https://github.com/openvinotoolkit/openvino) is the preferred backend to run Weights Compression with, and PyTorch is also supported. + +#### The algorithm description + +The Weights Compression algorithm is aimed at compressing the weights of the models and can be used to optimize the model footprint and performance of large models where the size of weights is relatively larger than the size of activations, for example, Large Language Models (LLM). The algorithm compresses weights only for Linear and Embedding layers. + +#### User guide + +- Compress weights of linear layers and embeddings to int8 + +```python +from nncf import compress_weights +compressed_model = compress_weights(model) +``` + +##### Limitations + +- The algorithm is supported for OpenVINO and PyTorch models. +- The compression applies in-place. +- The compressed model is not trainable. diff --git a/docs/compression_algorithms/KnowledgeDistillation.md b/docs/compression_algorithms/KnowledgeDistillation.md index d1c2a75cdea..ceef7f41b18 100644 --- a/docs/compression_algorithms/KnowledgeDistillation.md +++ b/docs/compression_algorithms/KnowledgeDistillation.md @@ -1,43 +1,45 @@ -### Knowledge Distillation (experimental feature) +# Knowledge Distillation (experimental feature) + +## The algorithm description -#### The algorithm description The Knowledge Distillation [Hinton et al., 2015](https://arxiv.org/pdf/1503.02531.pdf) -implies that a small model (student) is trained to mimic a pre-trained large model (teacher) through knowledge -transfer. The goal is to improve the accuracy of the student network. +implies that a small model (student) is trained to mimic a pre-trained large model (teacher) through knowledge +transfer. The goal is to improve the accuracy of the student network. -The NNCF for PyTorch supports Knowledge Distillation out of the box along with all supported compression algorithm -(quantization, sparsity, filter pruning), when a student is a model being compressed and teacher - original -non-compressed one. +The NNCF for PyTorch supports Knowledge Distillation out of the box along with all supported compression algorithm +(quantization, sparsity, filter pruning), when a student is a model being compressed and teacher - original +non-compressed one. -Knowledge is transferred from the teacher model to the student one by minimizing loss function, which is calculated -based on predictions of the models. At the moment, two types of loss functions are available. +Knowledge is transferred from the teacher model to the student one by minimizing loss function, which is calculated +based on predictions of the models. At the moment, two types of loss functions are available. One of them should be explicitly specified in the config. - + MSE distillation loss: - + ${L}_{MSE}(z^{s}, z^{t}) = || z^s - z^t ||_2^2$ - + Cross-Entropy distillation loss: - + ${p}_{i} = \frac{\exp({z}\_{i})}{\sum\_{j}(\exp({z}\_{j}))}$ - + ${L}\_{CE}({p}^{s}, {p}^{t}) = -\sum_{i}{p}^{t}\_{i}*\log({p}^{s}\_{i})$ - + The Knowledge Distillation loss function is combined with a regular loss function, so overall loss function will be computed as: - + $L = {L}\_{reg}({z}^{s}, y) + {L}\_{distill}({z}^{s}, {z}^{t})$ - + ![kd_pic](../pics/knowledge_distillation.png) - - Note: the Cross-Entropy distillation loss was proposed in [Hinton et al., 2015](https://arxiv.org/pdf/1503.02531.pdf) + + Note: the Cross-Entropy distillation loss was proposed in [Hinton et al., 2015](https://arxiv.org/pdf/1503.02531.pdf) with temperature parameter, but we don't use it or assume that T=1. - -#### User guide + +## User guide To turn on the Knowledge Distillation with some compression algorithm (e.g. filter_pruning) it's necessary to specify `knowledge_distillation` algorithm and its type in the config: -``` + +```json { ... "compression": [ @@ -52,14 +54,13 @@ specify `knowledge_distillation` algorithm and its type in the config: ] } ``` + See this [config file](../../examples/torch/classification/configs/pruning/resnet34_imagenet_pruning_geometric_median_kd.json) for an example, and [NNCF config schema](https://openvinotoolkit.github.io/nncf/) for reference to the available configuration parameters for the algorithm. -##### Limitations +## Limitations - The algorithm is supported for PyTorch only. -- Training the same configuration with Knowledge Distillation requires more time and GPU memory than without it. +- Training the same configuration with Knowledge Distillation requires more time and GPU memory than without it. On average, memory (for all GPU execution modes) and time overhead is below 20% each. - Outputs of model that shouldn't be differentiated must have `requires_grad=False`. - Model should output predictions, not calculate the losses. - - diff --git a/docs/compression_algorithms/Pruning.md b/docs/compression_algorithms/Pruning.md index 3651256542f..abe84354caf 100644 --- a/docs/compression_algorithms/Pruning.md +++ b/docs/compression_algorithms/Pruning.md @@ -1,11 +1,12 @@ +# Filter pruning + >_Scroll down for the examples of the JSON configuration files that can be used to apply this algorithm_. -### Filter pruning Filter pruning algorithm zeros output filters in Convolutional layers based on some filter importance criterion (filters with smaller importance are pruned). The framework contains three filter importance criteria: `L1`, `L2` norm, and `Geometric Median`. Also, different schemes of pruning application are presented by different schedulers. Not all Convolution layers in the model can be pruned. Such layers are determined by the model architecture automatically as well as cross-layer dependencies that impose constraints on pruning filters. -#### Filter importance criteria **L1, L2** +## Filter importance criteria **L1, L2** `L1`, `L2` filter importance criteria are based on the following assumption: > Convolutional filters with small $l_p$ norms do not significantly contribute to output activation values, and thus have a small impact on the final predictions of CNN models. @@ -26,17 +27,18 @@ Where $L_j$ is j-th convolutional layer in model. $\{F_1, \dots F_m\} \in L_j$ - Then during pruning filters with smaller $G(F_i)$ importance function will be pruned first. -#### Schedulers +## Schedulers **Baseline Scheduler** - Firstly, during `num_init_steps` epochs the model is trained without pruning. Secondly, the pruning algorithm calculates filter importances and prunes a `pruning_target` part of the filters with the smallest importance in each prunable convolution. + +Firstly, during `num_init_steps` epochs the model is trained without pruning. Secondly, the pruning algorithm calculates filter importances and prunes a `pruning_target` part of the filters with the smallest importance in each prunable convolution. The zeroed filters are frozen afterwards and the remaining model parameters are fine-tuned. **Parameters of the scheduler:** + - `num_init_steps` - number of epochs for model pretraining **before** pruning. - `pruning_target` - pruning level target. For example, the value `0.5` means that right after pretraining, convolutions that can be pruned will have 50% of their filters set to zero. - **Exponential scheduler** Similar to the Baseline scheduler, during `num_init_steps` epochs model is pretrained without pruning. @@ -47,19 +49,21 @@ $P_i = a * e^{- k * i}$ Where $a, k$ - parameters. **Parameters of scheduler:** + - `num_init_steps` - number of epochs for model pretraining before pruning. - `pruning_steps` - the number of epochs during which the pruning level target is increased from `pruning_init` to `pruning_target` value. - `pruning_init` - initial pruning level target. For example, value `0.1` means that at the begging of training, convolutions that can be pruned will have 10% of their filters set to zero. - `pruning_target` - pruning level target at the end of the schedule. For example, the value `0.5` means that at the epoch with the number of `num_init_steps + pruning_steps`, convolutions that can be pruned will have 50% of their filters set to zero. **Exponential with bias scheduler** + Similar to the `Exponential scheduler`, but current pruning level $P_{i}$ (on i-th epoch) during training calculates by equation: $P_i = a * e^{- k * i} + b$ Where $a, k, b$ - parameters. > **NOTE**: Baseline scheduler prunes filters only ONCE and after it just fine-tunes remaining parameters while exponential (and exponential with bias) schedulers choose and prune different filters subsets at each pruning epoch. -#### Batch-norm statistics adaptation +## Batch-norm statistics adaptation After the compression-related changes in the model have been committed, the statistics of the batchnorm layers (per-channel rolling means and variances of activation tensors) can be updated by passing several batches of data @@ -68,24 +72,26 @@ and reduce the corresponding accuracy drop even before model training. This opti sparsity and filter pruning algorithms. It can be enabled by setting a non-zero value of `num_bn_adaptation_samples` in the `batchnorm_adaptation` section of the `initializer` configuration (see example below). -#### Interlayer ranking types +## Interlayer ranking types Interlayer ranking type can be one of `unweighted_ranking` or `learned_ranking`. + - In case of `unweighted_ranking` and with `all_weights=True` all filter norms will be collected together and sorted to choose the least important ones. But this approach may not be optimal because filter norms are a good measure of filter importance inside a layer, but not across layers. - In the case of `learned_ranking` that uses re-implementation of [Learned Global Ranking method](https://arxiv.org/abs/1904.12368) (LeGR), a set of ranking coefficients will be learned for comparing filters across different layers. The $(a_i, b_i)$ pair of scalars will be learned for each ( $i$ layer and used to transform norms of $i$-th layer filters before sorting all filter norms together as $a_i * N_i + b_i$ , where $N_i$ - is vector of filter norma of $i$-th layer, $(a_i, b_i)$ is ranking coefficients for $i$-th layer. This approach allows pruning the model taking into account layer-specific sensitivity to weight perturbations and get pruned models with higher accuracy. - > **NOTE:** In all our pruning experiments we used SGD optimizer. -#### Filter pruning statistics -A model compression can be measured by two main metrics: filter pruning level and FLOPs pruning level. While -filter pruning level shows the ratio of removed filters to the total number of filters in the model, FLOPs pruning level -indicates how the removed filters affect the number of floating point operations required to run a model. +## Filter pruning statistics + +A model compression can be measured by two main metrics: filter pruning level and FLOPs pruning level. While +filter pruning level shows the ratio of removed filters to the total number of filters in the model, FLOPs pruning level +indicates how the removed filters affect the number of floating point operations required to run a model. During the algorithm execution several compression statistics are available. See the example below. -``` + +```text Statistics by pruned layers: +----------------------+------------------+--------------+---------------------+ | Layer's name | Weight's shape | Mask's shape | Filter pruning | @@ -118,155 +124,163 @@ Statistics of the filter pruning algorithm: +---------------------------------------+-------+ ``` -##### Layer statistics -`Statistics by pruned layers` section lists names of all layers that will be pruned, shapes of their weight tensors, -shapes of pruning masks applied to respective weights and percentage of zeros in those masks. +### Layer statistics + +`Statistics by pruned layers` section lists names of all layers that will be pruned, shapes of their weight tensors, +shapes of pruning masks applied to respective weights and percentage of zeros in those masks. -##### Model statistics -The columns `Full` and `Current` represent the values of the corresponding statistics in the original model and compressed one in the current state, respectively. +### Model statistics + +The columns `Full` and `Current` represent the values of the corresponding statistics in the original model and compressed one in the current state, respectively. The `Pruning level` column indicates the ratio between the values of the full and current statistics in the corresponding rows, defined by the formula: $Statistic\\:pruning\\:level = 1 - statistic\\:current / statistic\\:full$ - -`Filter pruning level` - percentage of filters removed from the model. -`GFLOPs pruning level` - an estimated reduction in the number of floating point operations of the model. +`Filter pruning level` - percentage of filters removed from the model. + +`GFLOPs pruning level` - an estimated reduction in the number of floating point operations of the model. The number of FLOPs for a single convolutional layer can be calculated as: $FLOPs = 2 * input\\:channels * kernel\\:size ^2 * W * H * filters$ > **NOTE**: One GFLOP is one billion (1e9) FLOPs. -Each removed filter contributes to FLOPs reduction in two convolutional layers as it affects the number -of filters in one and the number of input channels of the next layer. Thus, it is expected that this number may differ +Each removed filter contributes to FLOPs reduction in two convolutional layers as it affects the number +of filters in one and the number of input channels of the next layer. Thus, it is expected that this number may differ significantly from the filter pruning level. -In addition, the decrease in GFLOPs is estimated by calculating the number of FLOPs of convolutional and fully connected layers. +In addition, the decrease in GFLOPs is estimated by calculating the number of FLOPs of convolutional and fully connected layers. As a result, these estimates may differ slightly from the actual number of FLOPs in the compressed model. `MParams pruning level` - calculated reduction in the number of parameters in the model in millions. Typically convolutional layer weights have the shape of $(kernel\\:size,\\:kernel\\:size,\\:input\\:channels,\\:filter\\:num)$. -Thus, each removed filter affects the number of parameters in two convolutional layers as it affects the number -of filters in one and the number of input channels of the next layer. It is expected that this number may differ +Thus, each removed filter affects the number of parameters in two convolutional layers as it affects the number +of filters in one and the number of input channels of the next layer. It is expected that this number may differ significantly from the filter pruning level. -##### Algorithm statistics +### Algorithm statistics -`Filter (or FLOPs) pruning level in current epoch` - a pruning level calculated by the algorithm scheduler to be applied in the current training epoch. -> **NOTE**: In case of `Filter pruning level in current epoch` this metric does not indicate the whole model filter pruning level, as +`Filter (or FLOPs) pruning level in current epoch` - a pruning level calculated by the algorithm scheduler to be applied in the current training epoch. +> **NOTE**: In case of `Filter pruning level in current epoch` this metric does not indicate the whole model filter pruning level, as it does not take into account the number of filters in layers that cannot be pruned. `Target filter (or FLOPs) pruning level` - a pruning level that is expected to be achieved at the end of the algorithm execution. > **NOTE**: In case of `Target filter pruning level` this number indicates what percentage of filters will be removed from only those layers that can be pruned. It is important to note that pruning levels mentioned in the `statistics of the filter pruning algorithm` are the goals the algorithm aims to achieve. -It is not always possible to achieve these levels of pruning due to cross-layer and inference constraints. +It is not always possible to achieve these levels of pruning due to cross-layer and inference constraints. Therefore, it is expected that these numbers may differ from the calculated statistics in the `statistics of the pruned model` section. -### Example configuration files +## Example configuration files >_For the full list of the algorithm configuration parameters via config file, see the corresponding section in the [NNCF config schema](https://openvinotoolkit.github.io/nncf/)_. - Prune a model with default parameters (from 0 to 0.5 filter pruning level across 100 epochs with exponential schedule) -```json5 -{ - "input_info": { "sample_size": [1, 3, 224, 224] }, - "compression": - { - "algorithm": "filter_pruning" - } -} -``` + + ```json5 + { + "input_info": { "sample_size": [1, 3, 224, 224] }, + "compression": + { + "algorithm": "filter_pruning" + } + } + ``` - Same as above, but filter importance is considered globally across all eligible weighted operations: -```json5 -{ - "input_info": { "sample_size": [1, 3, 224, 224] }, - "compression": - { - "algorithm": "filter_pruning", - "all_weights": true - } -} -``` + + ```json5 + { + "input_info": { "sample_size": [1, 3, 224, 224] }, + "compression": + { + "algorithm": "filter_pruning", + "all_weights": true + } + } + ``` - Prune a model, immediately setting filter pruning level to 10%, applying [batchnorm adaptation](./BatchnormAdaptation.md) and reaching 60% within 20 epochs using exponential schedule, enabling pruning of first convolutional layers and downsampling convolutional layers: -```json5 -{ - "input_info": { "sample_size": [1, 3, 224, 224] }, - "compression": - { - "algorithm": "filter_pruning", - "pruning_init": 0.1, - "params": { - "pruning_target": 0.6, - "pruning_steps": 20, - "schedule": "exponential", - "prune_first_conv": true, - "prune_downsample_convs": true - } - } -} -``` + + ```json5 + { + "input_info": { "sample_size": [1, 3, 224, 224] }, + "compression": + { + "algorithm": "filter_pruning", + "pruning_init": 0.1, + "params": { + "pruning_target": 0.6, + "pruning_steps": 20, + "schedule": "exponential", + "prune_first_conv": true, + "prune_downsample_convs": true + } + } + } + ``` - Prune a model using geometric median filter importance and reaching 30% filter pruning level within 10 epochs using exponential schedule, postponing application of pruning for 10 epochs: -```json5 -{ - "input_info": { "sample_size": [1, 3, 224, 224] }, - "compression": - { - "algorithm": "filter_pruning", - "params": { - "filter_importance": "geometric_median", - "pruning_target": 0.3, - "pruning_steps": 10, - "schedule": "exponential", - "num_init_steps": 10 - } - } -} -``` + + ```json5 + { + "input_info": { "sample_size": [1, 3, 224, 224] }, + "compression": + { + "algorithm": "filter_pruning", + "params": { + "filter_importance": "geometric_median", + "pruning_target": 0.3, + "pruning_steps": 10, + "schedule": "exponential", + "num_init_steps": 10 + } + } + } + ``` - Prune and quantize a model at the same time using a FLOPS target for pruning and defaults for the rest of parameters: -```json5 -{ - "input_info": { "sample_size": [1, 3, 224, 224] }, - "compression": - [ - { - "algorithm": "filter_pruning", - "params": { - "pruning_flops_target": 0.6 - } - }, - { - "algorithm": "quantization" - } - ] -} -``` -- Prune a model with default parameters, estimate filter ranking by Learned Global Ranking method before finetuning. + ```json5 + { + "input_info": { "sample_size": [1, 3, 224, 224] }, + "compression": + [ + { + "algorithm": "filter_pruning", + "params": { + "pruning_flops_target": 0.6 + } + }, + { + "algorithm": "quantization" + } + ] + } + ``` + +- Prune a model with default parameters, estimate filter ranking by Learned Global Ranking method before finetuning. LEGR algorithm will be using 200 generations for the evolution algorithm, 20 train steps to estimate pruned model accuracy on each generation and target maximal filter pruning level equal to 50%: -```json5 -{ - "input_info": { "sample_size": [1, 3, 224, 224] }, - "compression": - [ - { - "algorithm": "filter_pruning", - "params": - { - "interlayer_ranking_type": "learned_ranking", - "legr_params": - { - "generations": 200, - "train_steps": 20, - "max_pruning": 0.5 - } - } - } - ] -} -``` \ No newline at end of file + + ```json5 + { + "input_info": { "sample_size": [1, 3, 224, 224] }, + "compression": + [ + { + "algorithm": "filter_pruning", + "params": + { + "interlayer_ranking_type": "learned_ranking", + "legr_params": + { + "generations": 200, + "train_steps": 20, + "max_pruning": 0.5 + } + } + } + ] + } + ``` diff --git a/docs/compression_algorithms/Quantization.md b/docs/compression_algorithms/Quantization.md index 37e3f186bb7..977d77483eb 100644 --- a/docs/compression_algorithms/Quantization.md +++ b/docs/compression_algorithms/Quantization.md @@ -1,10 +1,11 @@ +# Uniform Quantization with Fine-Tuning + >_Scroll down for the examples of the JSON configuration files that can be used to apply this algorithm_. -### Uniform Quantization with Fine-Tuning A uniform "fake" quantization method supports an arbitrary number of bits (>=2) which is used to represent weights and activations. The method performs differentiable sampling of the continuous signal (for example, activations or weights) during forward pass, simulating inference with integer arithmetic. -#### Common Quantization Formula +## Common Quantization Formula Quantization is parametrized by clamping range and number of quantization levels. The sampling formula is the following: @@ -16,11 +17,9 @@ $clamp(input; input\\_low, input\\_high)$ $s = \frac{levels - 1}{input\\_high - input\\_low}$ - $input\\_low$ and $input\\_high$ represent the quantization range and $\left\lfloor \cdot \right\rceil$ denotes rounding to the nearest integer. - -#### Symmetric Quantization +## Symmetric Quantization During the training, we optimize the **scale** parameter that represents the range `[input_low, input_range]` of the original signal using gradient descent: @@ -28,17 +27,17 @@ $input\\_low=scale*\frac{level\\_low}{level\\_high}$ $input\\_high=scale$ - In the formula above, $level\\_low$ and $level\\_high$ represent the range of the discrete signal. - - For weights: - + +- For weights: + $level\\_low=-2^{bits-1}+1$ - + $level\\_high=2^{bits-1}-1$ $levels=255$ - - For unsigned activations: +- For unsigned activations: $level\\_low=0$ @@ -46,7 +45,7 @@ In the formula above, $level\\_low$ and $level\\_high$ represent the range of th $levels=256$ - - For signed activations: +- For signed activations: $level\\_low=-2^{bits-1}$ @@ -60,7 +59,7 @@ $output = \left\lfloor clamp(input * \frac{level\\_high}{scale}, level\\_low, le Use the `num_init_samples` parameter from the `initializer` group to initialize the values of `scale` and determine which activation should be signed or unsigned from the collected statistics using given number of samples. -#### Asymmetric Quantization +## Asymmetric Quantization During the training we optimize the `input_low` and `input_range` parameters using gradient descent: @@ -93,19 +92,20 @@ $$ &\end{flalign} $$ - You can use the `num_init_samples` parameter from the `initializer` group to initialize the values of `input_low` and `input_range` from the collected statistics using given number of samples. -#### Quantizer setup and hardware config files +## Quantizer setup and hardware config files + NNCF allows to quantize models for best results on a given Intel hardware type when executed using OpenVINO runtime. To achieve this, the quantizer setup should be performed with following considerations in mind: -1) every operation that can accept quantized inputs on a given HW (i.e. can be executed using quantized input values) should have its inputs quantized in NNCF -2) the quantized inputs should be quantized with a configuration that is supported on a given HW for a given operation (e.g. per-tensor vs per-channel quantization, or 8 bits vs. 4 bits) -3) for operations that are agnostic to quantization, the execution should handle quantized tensors rather than full-precision tensors. -4) certain operation sequences will be runtime-optimized to execute in a single kernel call ("fused"), and additional quantizer insertion/quantization simulation within such operation sequences will be detrimental to overall performance + +1. every operation that can accept quantized inputs on a given HW (i.e. can be executed using quantized input values) should have its inputs quantized in NNCF +2. the quantized inputs should be quantized with a configuration that is supported on a given HW for a given operation (e.g. per-tensor vs per-channel quantization, or 8 bits vs. 4 bits) +3. for operations that are agnostic to quantization, the execution should handle quantized tensors rather than full-precision tensors. +4. certain operation sequences will be runtime-optimized to execute in a single kernel call ("fused"), and additional quantizer insertion/quantization simulation within such operation sequences will be detrimental to overall performance These requirements are fulfilled by the quantizer propagation algorithm. -The algorithm first searches the internal NNCF representation of the model's control flow graph for predefined patterns that are "fusable", and apply the fusing to the internal graph representation as well. +The algorithm first searches the internal NNCF representation of the model's control flow graph for predefined patterns that are "fusible", and apply the fusing to the internal graph representation as well. Next, the operations in the graph that can be associated to input-quantizable operations on a given target hardware are assigned a single quantizer for each its quantizable activation input, with a number of possible quantizer configurations attached to it (that are feasible on target HW). The quantizers are then "propagated" against the data flow in the model's control flow graph as far as possible, potentially merging with other quantizers. Once all quantizers have reached a standstill in their propagation process, each will have a final (possibly reduced) set of possible quantizer configurations, from which a single one is either chosen manually, or using a precision initialization algorithm (which accepts the potential quantizer locations and associated potential quantizer configuration sets). @@ -122,10 +122,9 @@ The quantization configuration in the `"target_device": "TRIAL"` case may be ove For all target HW types, parts of the model graph can be marked as non-quantizable by using the `"ignored_scopes"` field - inputs and weights of matching nodes in the NNCF internal graph representation will not be quantized, and the downstream quantizers will not propagate upwards through such nodes. +## Quantization Implementation -#### Quantization Implementation - -In our implementation, we use a slightly transformed formula. It is equivalent by order of floating-point operations to simplified symmetric formula and the assymetric one. The small difference is addition of small positive number `eps` to prevent division by zero and taking absolute value of range, since it might become negative on backward: +In our implementation, we use a slightly transformed formula. It is equivalent by order of floating-point operations to simplified symmetric formula and the asymmetric one. The small difference is addition of small positive number `eps` to prevent division by zero and taking absolute value of range, since it might become negative on backward: $output = \frac{clamp(\left\lfloor (input-input\\_low^{*}) *s - ZP \right \rceil, level\\_low, level\\_high)}{s}$ @@ -145,10 +144,11 @@ $input\\_low^{*} = 0$ $input\\_range^{*} = scale$ -The most common case of applying quantization is 8-bit uniform quantization. +The most common case of applying quantization is 8-bit uniform quantization. NNCF example scripts provide a plethora of configuration files that implement this case ([PyTorch](../../examples/torch/classification/configs/quantization/inception_v3_imagenet_int8.json), [TensorFlow](../../examples/tensorflow/classification/configs/quantization/inception_v3_imagenet_int8.json)) --- + **NOTE** There is a known issue with AVX2 and AVX512 CPU devices. The issue appears with 8-bit matrix calculations with tensors which elements are close to the maximum or saturated. @@ -160,18 +160,18 @@ This regime is used when `"target_device": "CPU"` or `"target_device": "ANY"` se To control the application of overflow fix, `"overflow_fix"` config option is introduced. The default value is `"overflow_fix": "enable"`. To apply the overflow issue fix only to the first layer, use `"overflow_fix": "first_layer_only"`. To disable the overflow issue fix for all layers, use `"overflow_fix": "disable"`. - - --- -#### Mixed-Precision Quantization + +## Mixed-Precision Quantization Quantization to lower precisions (e.g. 6, 4, 2 bits) is an efficient way to accelerate inference of neural networks. Although NNCF supports quantization with an arbitrary number of bits to represent weights and activations values, choosing ultra-low bitwidth could noticeably affect the model's accuracy. A good trade-off between accuracy and performance is achieved by assigning different precisions to different layers. NNCF provides two automatic precision assignment algorithms, namely **HAWQ** and **AutoQ**. -#### HAWQ +### HAWQ + NNCF utilizes the [HAWQ-v2](https://arxiv.org/pdf/1911.03852.pdf) method to automatically choose optimal mixed-precision configuration by taking into account the sensitivity of each layer, i.e. how much lower-bit quantization of each layer decreases the accuracy of model. The most sensitive layers are kept at higher precision. The sensitivity of the i-th layer is @@ -227,7 +227,8 @@ is chosen. By default, liberal mode is used as it does not reject a large number The `bitwidth_assignment_mode` parameter can override it to the strict one. For automatic mixed-precision selection it's recommended to use the following template of configuration file: -``` + +```json "optimizer": { "base_lr": 3.1e-4, "schedule_type": "plateau", @@ -271,7 +272,8 @@ file. --- -#### AutoQ +### AutoQ + NNCF provides an alternate mode, namely AutoQ, for mixed-precision automation. It is an AutoML-based technique that automatically learns the layer-wise bitwidth with explored experiences. Based on [HAQ](https://openaccess.thecvf.com/content_CVPR_2019/papers/Wang_HAQ_Hardware-Aware_Automated_Quantization_With_Mixed_Precision_CVPR_2019_paper.pdf), AutoQ utilizes an actor-critic algorithm, Deep Deterministic Policy Gradient (DDPG) for efficient search over the bitwidth space. DDPG is trained in an episodic fashion, converging to a deterministic mixed-precision policy after a number of episodes. An episode is constituted by stepping, the DDPG transitions from quantizer to quantizer sequentially to predict a precision of a layer. Each quantizer essentially denotes a state in RL framework and it is represented by attributes of the associated layers. For example, a quantizer for 2D Convolution is represented by its quantizer Id (integer), input and output channel size, feature map dimension, stride size, if it is depthwise, number of parameters etc. It is recommended to check out ```_get_layer_attr``` in [```quantization_env.py```](https://github.com/openvinotoolkit/nncf/blob/develop/nncf/automl/environment/quantization_env.py#L333) for the featurization of different network layer types. When the agent enters a state/quantizer, it receives the state features and forward passes them through its network. The output of the forward pass is a scalar continuous action output which is subsequently mapped to the bitwidth options of the particular quantizer. The episode terminates after the prediction of the last quantizer and a complete layer-wise mixed-precision policy is obtained. To ensure a policy fits in the user-specified compression ratio, the policy is post processed by reducing the precision sequentially from the last quantizer until the compression ratio is met. @@ -315,7 +317,7 @@ As briefly mentioned earlier, user is required to register a callback function f Following is an example of wrapping ImageNet validation loop as a callback. Top5 accuracy is chosen as the scalar objective metric. ```autoq_eval_fn``` and ```val_loader``` are registered in the call of ```register_default_init_args```. -``` +```python def autoq_eval_fn(model, eval_loader): _, top5 = validate(eval_loader, model, criterion, config) return top5 @@ -327,11 +329,12 @@ Following is an example of wrapping ImageNet validation loop as a callback. Top5 The complete config [example](../../examples/torch/classification/configs/mixed_precision/mobilenet_v2_imagenet_mixed_int_autoq_staged.json) that applies AutoQ to MobileNetV2 is provided within the [classification sample](../../examples/torch/classification) for PyTorch. -### Example configuration files: +## Example configuration files >_For the full list of the algorithm configuration parameters via config file, see the corresponding section in the [NNCF config schema](https://openvinotoolkit.github.io/nncf/)_. - Quantize a model using default algorithm settings (8-bit, quantizers configuration chosen to be compatible with all Intel target HW types): + ```json5 { "input_info": { "sample_size": [1, 3, 224, 224] }, // the input shape of your model may vary @@ -342,6 +345,7 @@ The complete config [example](../../examples/torch/classification/configs/mixed_ ``` - Quantize a model to 8-bit precision targeted for Intel CPUs, with additional constraints of symmetric weight quantization and asymmetric activation quantization: + ```json5 { "input_info": { "sample_size": [1, 3, 32, 32] }, // the input shape of your model may vary @@ -355,6 +359,7 @@ The complete config [example](../../examples/torch/classification/configs/mixed_ ``` - Quantize a model with fully symmetric INT8 quantization and increased number of quantizer range initialization samples (make sure to supply a corresponding data loader in code via `nncf.config.structures.QuantizationRangeInitArgs` or the `register_default_init_args` helper function): + ```json5 { "input_info": { "sample_size": [1, 3, 224, 224] }, // the input shape of your model may vary @@ -369,6 +374,7 @@ The complete config [example](../../examples/torch/classification/configs/mixed_ ``` - Quantize a model using 4-bit per-channel quantization for experimentation/trial purposes (end-to-end performance and/or compatibility with OpenVINO Inference Engine not guaranteed) + ```json5 { "input_info": { "sample_size": [1, 3, 32, 32] }, // the input shape of your model may vary @@ -382,6 +388,7 @@ The complete config [example](../../examples/torch/classification/configs/mixed_ ``` - Quantize a multi-input model to 8-bit precision targeted for Intel CPUs, with a range initialization performed using percentile statistics (empirically known to be better for NLP models, for example) and excluding some parts of the model from quantization: + ```json5 { "input_info": [ @@ -418,7 +425,9 @@ The complete config [example](../../examples/torch/classification/configs/mixed_ "target_device": "TRIAL" } ``` + - Quantize a model to variable bit width using 300 iterations of the AutoQ algorithm, with a target model size (w.r.t the effective parameter storage size) set to 15% of the FP32 model and possible quantizer bitwidths limited to INT2, INT4 or INT8. + ```json5 { "input_info": { "sample_size": [1, 3, 224, 224] }, // the input shape of your model may vary @@ -436,4 +445,3 @@ The complete config [example](../../examples/torch/classification/configs/mixed_ "target_device": "TRIAL" } ``` - diff --git a/docs/compression_algorithms/Sparsity.md b/docs/compression_algorithms/Sparsity.md index 6922113dc4a..b0916550eec 100644 --- a/docs/compression_algorithms/Sparsity.md +++ b/docs/compression_algorithms/Sparsity.md @@ -1,11 +1,11 @@ +# Non-Structured Sparsity >_Scroll down for the examples of the JSON configuration files that can be used to apply this algorithm_. -### Non-Structured Sparsity Sparsity algorithm zeros weights in Convolutional and Fully-Connected layers in a non-structured way, so that zero values are randomly distributed inside the tensor. Most of the sparsity algorithms set the less important weights to zero but the criteria of how they do it is different. The framework contains several implementations of sparsity methods. -#### RB-Sparsity +## RB-Sparsity This section describes the Regularization-Based Sparsity (RB-Sparsity) algorithm implemented in this framework. The method is based on $L_0$-regularization, with which parameters of the model tend to zero: @@ -33,7 +33,7 @@ The method requires a long schedule of the training process in order to minimize > **NOTE**: The known limitation of the method is that the sparsified CNN must include Batch Normalization layers which make the training process more stable. -#### Batch-norm statistics adaptation +## Batch-norm statistics adaptation After the compression-related changes in the model have been committed, the statistics of the batchnorm layers (per-channel rolling means and variances of activation tensors) can be updated by passing several batches of data @@ -44,14 +44,15 @@ sparsity and filter pruning algorithms. It can be enabled by setting a non-zero > **NOTE**: In all our sparsity experiments, we used the Adam optimizer and initial learning rate `0.001` for model weights and sparsity mask. -#### Magnitude Sparsity +## Magnitude Sparsity The magnitude sparsity method implements a naive approach that is based on the assumption that the contribution of lower weights is lower so that they can be pruned. After each training epoch the method calculates a threshold based on the current sparsity ratio and uses it to zero weights which are lower than this threshold. And here there are two options: + - Weights are used as is during the threshold calculation procedure. - Weights are normalized before the threshold calculation. +## Constant Sparsity -#### Constant Sparsity This special algorithm takes no additional parameters and is used when you want to load a checkpoint already trained with another sparsity algorithm and do other compression without changing the sparsity mask. ### Example configuration files @@ -70,6 +71,7 @@ This special algorithm takes no additional parameters and is used when you want ``` - Apply magnitude sparsity, increasing sparsity level step-wise from 0 to 70% in 3 steps at given training epoch indices: + ```json5 { "input_info": { "sample_size": [1, 3, 224, 224] }, // the input shape of your model may vary @@ -87,6 +89,7 @@ This special algorithm takes no additional parameters and is used when you want ``` - Apply magnitude sparsity, immediately setting sparsity level to 10%, performing [batch-norm adaptation](./BatchnormAdaptation.md) to potentially recover accuracy, and exponentially increasing sparsity to 50% over 30 epochs of training: + ```json5 { "input_info": { "sample_size": [1, 3, 224, 224] }, // the input shape of your model may vary @@ -108,6 +111,7 @@ This special algorithm takes no additional parameters and is used when you want ``` - Apply RB-sparsity to UNet, increasing sparsity level exponentially from 1% to 60% over 100 epochs, keeping the sparsity mask trainable until epoch 110 (after which the mask is frozen and the model is allowed to fine-tune with a fixed sparsity level), and excluding parts of the model from sparsification: + ```json5 { "input_info": { "sample_size": [1, 3, 224, 224] }, // the input shape of your model may vary @@ -126,4 +130,4 @@ This special algorithm takes no additional parameters and is used when you want ] } } -``` \ No newline at end of file +``` diff --git a/docs/compression_algorithms/post_training/ONNX.md b/docs/compression_algorithms/post_training/ONNX.md index 54f610cc186..58c6f6def97 100644 --- a/docs/compression_algorithms/post_training/ONNX.md +++ b/docs/compression_algorithms/post_training/ONNX.md @@ -1,9 +1,9 @@ -## Post-Training Quantization for ONNX +# Post-Training Quantization for ONNX NNCF supports [ONNX](https://onnx.ai/) backend for the Post-Training Quantization algorithm. This guide contains some notes that you should consider before working with NNCF for ONNX. -### Model Preparation +## Model Preparation The majority of the ONNX models are exported from different frameworks, such as PyTorch or TensorFlow. @@ -22,9 +22,10 @@ from onnx.version_converter import convert_version model = onnx.load_model('/path_to_model') converted_model = convert_version(model, target_version=13) ``` -# ONNX Results -Below are some results obtained using [benchmarking section](../../../tests/onnx/benchmarking/README.md) for the models from [ONNX Model Zoo](https://github.com/onnx/models). +## ONNX Results + +Below are some results obtained using [benchmarking section](../../../tests/onnx/benchmarking/README.md) for the models from [ONNX Model Zoo](https://github.com/onnx/models). ### Classification diff --git a/docs/compression_algorithms/post_training/Quantization.md b/docs/compression_algorithms/post_training/Quantization.md index fe39416a592..2f8a1b20104 100644 --- a/docs/compression_algorithms/post_training/Quantization.md +++ b/docs/compression_algorithms/post_training/Quantization.md @@ -1,4 +1,4 @@ -## Post-Training Quantization +# Post-Training Quantization Post-Training Quantization is a quantization algorithm that doesn't demand retraining of a quantized model. It utilizes a small subset of the initial dataset to calibrate quantization constants. @@ -9,7 +9,7 @@ NNCF provides an advanced Post-Training Quantization algorithm, which consists o 2) FastBiasCorrection or BiasCorrection - Reduces the bias errors between quantized layers and the corresponding original layers. -### Usage +## Usage To start the algorithm, provide the following entities: @@ -19,29 +19,30 @@ To start the algorithm, provide the following entities: The basic workflow steps: -1) Create the [data transformation function](#data-transformation-function). +1. Create the [data transformation function](#data-transformation-function). -```python -def transform_fn(data_item): - images, _ = data_item - return images -``` + ```python + def transform_fn(data_item): + images, _ = data_item + return images + ``` -2) Create an instance of `nncf.Dataset` class by passing two parameters: -* `data_source` - Iterable python object that contains data items for model calibration. -* `transform_fn` - Data transformation function from the Step 1. +2. Create an instance of `nncf.Dataset` class by passing two parameters: -```python -calibration_dataset = nncf.Dataset(val_dataset, transform_fn) -``` + * `data_source` - Iterable python object that contains data items for model calibration. + * `transform_fn` - Data transformation function from the Step 1. -3) Run the quantization pipeline. + ```python + calibration_dataset = nncf.Dataset(val_dataset, transform_fn) + ``` -```python -quantized_model = nncf.quantize(model, calibration_dataset) -``` +3. Run the quantization pipeline. + + ```python + quantized_model = nncf.quantize(model, calibration_dataset) + ``` -### Data Transformation Function +## Data Transformation Function Model input structure differs from one pipeline to another. Thus NNCF introduces the interface to adapt the user dataset format to the NNCF format. This interface is called the data transformation function. @@ -87,4 +88,4 @@ for data_item in val_loader: NNCF provides the examples of Post-Training Quantization where you can find the implementation of data transformation -function: [PyTorch](../../../examples/post_training_quantization/torch/mobilenet_v2/README.md), [TensorFlow](../../../examples/post_training_quantization/tensorflow/mobilenet_v2/README.md), [ONNX](../../../examples/post_training_quantization/onnx/mobilenet_v2/README.md), and [OpenVINO](../../../examples/post_training_quantization/openvino/mobilenet_v2/README.md) \ No newline at end of file +function: [PyTorch](../../../examples/post_training_quantization/torch/mobilenet_v2/README.md), [TensorFlow](../../../examples/post_training_quantization/tensorflow/mobilenet_v2/README.md), [ONNX](../../../examples/post_training_quantization/onnx/mobilenet_v2/README.md), and [OpenVINO](../../../examples/post_training_quantization/openvino/mobilenet_v2/README.md) diff --git a/docs/styleguide/PyGuide.md b/docs/styleguide/PyGuide.md index 3e7cb969a2c..0c7f9169766 100644 --- a/docs/styleguide/PyGuide.md +++ b/docs/styleguide/PyGuide.md @@ -3,40 +3,42 @@
Table of Contents -- [1 Introduction](#s1-introduction) -- [2 Automating Code Formatting](#s2-auto-code-formatting) -- [3 Python Language Rules](#s3-python-language-rules) - * [3.1 PyLint](#s3.1-pylint) - * [3.2 3rd party packages](#s3.2-3rd-party-packages) - * [3.3 Global variables](#s3.3-global-variables) - * [3.4 Nested/Local/Inner Classes and Functions](#s3.4-nested) - * [3.5 Default Iterators and Operators](#s3.5-default-iterators-and-operators) - * [3.6 Type Annotated Code](#s3.6-type-annotated-code) - * [3.7 Files and Sockets](#s3.7-files-and-sockets) - * [3.8 Abstract Classes](#s3.8-abstract-classes) -- [4 Python Style Rules](#s4-python-style-rules) - * [4.1 Line length](#s4.1-line-length) - * [4.2 Comments and Docstrings](#s4.2-comments-and-docstrings) - + [4.2.1 Modules](#s4.2.1-modules) - + [4.2.2 Functions and Methods](#s4.2.2-functions-and-methods) - + [4.2.3 Classes](#s4.2.3-classes) - + [4.2.4 Block and Inline Comments](#s4.2.4-block-and-inline-comments) - * [4.3 Strings](#s4.3-strings) - * [4.4 Logging](#s4.4-logging) - * [4.5 Error Messages](#s4.5-error-messages) - * [4.6 TODO Comments](#s4.6-todo-comments) - * [4.7 Naming](#s4.7-naming) - + [4.7.1 Names to Avoid](#s4.7.1-names-to-avoid) - + [4.7.2 Naming Conventions](#s4.7.2-naming-conventions) - + [4.7.3 Framework specific class naming](#s4.7.3-framework-specific-class-naming) - + [4.7.4 File Naming](#s4.7.4-file-naming) - * [4.8 Main](#s4.8-main) -- [5 API documentation rules](#s5-api-doc-rules) +- [1 Introduction](#s1-introduction) +- [2 Automating Code Formatting](#s2-auto-code-formatting) +- [3 Python Language Rules](#s3-python-language-rules) + - [3.1 PyLint](#s3.1-pylint) + - [3.2 3rd party packages](#s3.2-3rd-party-packages) + - [3.3 Global variables](#s3.3-global-variables) + - [3.4 Nested/Local/Inner Classes and Functions](#s3.4-nested) + - [3.5 Default Iterators and Operators](#s3.5-default-iterators-and-operators) + - [3.6 Type Annotated Code](#s3.6-type-annotated-code) + - [3.7 Files and Sockets](#s3.7-files-and-sockets) + - [3.8 Abstract Classes](#s3.8-abstract-classes) +- [4 Python Style Rules](#s4-python-style-rules) + - [4.1 Line length](#s4.1-line-length) + - [4.2 Comments and Docstrings](#s4.2-comments-and-docstrings) + - [4.2.1 Modules](#s4.2.1-modules) + - [4.2.2 Functions and Methods](#s4.2.2-functions-and-methods) + - [4.2.3 Classes](#s4.2.3-classes) + - [4.2.4 Block and Inline Comments](#s4.2.4-block-and-inline-comments) + - [4.3 Strings](#s4.3-strings) + - [4.4 Logging](#s4.4-logging) + - [4.5 Error Messages](#s4.5-error-messages) + - [4.6 TODO Comments](#s4.6-todo-comments) + - [4.7 Naming](#s4.7-naming) + - [4.7.1 Names to Avoid](#s4.7.1-names-to-avoid) + - [4.7.2 Naming Conventions](#s4.7.2-naming-conventions) + - [4.7.3 Framework specific class naming](#s4.7.3-framework-specific-class-naming) + - [4.7.4 File Naming](#s4.7.4-file-naming) + - [4.8 Main](#s4.8-main) +- [5 API documentation rules](#s5-api-doc-rules) +
+ ## 1 Introduction This document gives coding conventions for the Python code comprising [Neural Network Compression Framework (NNCF)](../../README.md). @@ -48,6 +50,7 @@ the [PEP 8 -- Style Guide for Python Code](https://www.python.org/dev/peps/pep-0 + ## 2 Automating Code Formatting To maintain consistency and readability throughout the codebase, we use the [black](https://github.com/psf/black) @@ -62,10 +65,11 @@ make pre-commit Also recommend configuring your IDE to run Black and isort tools automatically when saving files. Automatic code formatting is mandatory for all Python files, but you can disable it for specific cases if required: - - if you need a specialized order of importing modules; - - for large data structures for which autoformatting unnecessarily breaks into lines, - e.g. reference data in tests, class lists or arguments for subprocess; - - for structures for which formatting helps understanding, such as matrix. + +- if you need a specialized order of importing modules; +- for large data structures for which autoformatting unnecessarily breaks into lines, + e.g. reference data in tests, class lists or arguments for subprocess; +- for structures for which formatting helps understanding, such as matrix. Example for 'isort': @@ -95,11 +99,13 @@ arr2 = [ + ## 3 Python Language Rules + ### 3.1 PyLint Run [pylint](https://github.com/PyCQA/pylint) over your code using this [pylintrc](../../.pylintrc). @@ -108,16 +114,18 @@ Run [pylint](https://github.com/PyCQA/pylint) over your code using this [pylintr - *Preferred solution*: Change the code to fix the warning. - *Exception*: Suppress the warning if they are inappropriate so that other issues are not hidden. To suppress warnings you can set a line-level comment + ```python dict = "something awful" # Bad Idea... pylint: disable=redefined-builtin ``` + or update [pylintrc](../../.pylintrc) if applicable for the whole project. If the reason for the suppression is not clear from the symbolic name, add an explanation. - + ### 3.2 3rd party packages Do not add new third-party dependencies unless absolutely necessary. All things being equal, give preference to built-in packages. @@ -125,6 +133,7 @@ Do not add new third-party dependencies unless absolutely necessary. All things + ### 3.3 Global variables Avoid global variables. @@ -137,6 +146,7 @@ Avoid global variables. + ### 3.4 Nested/Local/Inner Classes and Functions No need to overuse nested local functions or classes and inner classes. @@ -144,6 +154,7 @@ No need to overuse nested local functions or classes and inner classes. - Nested local functions or classes are fine if it satisfy the following conditions: - The code becomes more readable and simpler. - Closing over a local variables. + ```python # Correct: def make_scaling_fn(scale): @@ -156,6 +167,7 @@ No need to overuse nested local functions or classes and inner classes. - Do not nest a function just to hide it from users of a module. Instead, prefix its name with an `_` at the module level so that it can still be accessed by tests. + ```Python # Wrong: def avg(a, b, c): @@ -167,6 +179,7 @@ No need to overuse nested local functions or classes and inner classes. m = sum(m,c) return m/3 ``` + ```Python # Correct: def _sum(x, y): @@ -181,6 +194,7 @@ No need to overuse nested local functions or classes and inner classes. + ### 3.5 Default Iterators and Operators Use default iterators and operators for types that support them, like lists, @@ -196,6 +210,7 @@ if obj in alist: ... for line in afile: ... for k, v in adict.items(): ... ``` + ```python # Wrong: for key in adict.keys(): ... @@ -207,6 +222,7 @@ for k, v in dict.iteritems(): ... + ### 3.6 Type Annotated Code Code should be annotated with type hints according to @@ -220,6 +236,7 @@ def func(a: int) -> List[int]: + ### 3.7 Files and Sockets Explicitly close files and sockets when done with them. @@ -230,10 +247,10 @@ with open("hello.txt") as hello_file: print(line) ``` - + ### 3.8 Abstract Classes When defining abstract classes, the following template should be used: @@ -275,31 +292,32 @@ class C(ABC): pass ``` - + ## 4 Python Style Rules + ### 4.1 Line length Maximum line length is *120 characters*. Explicit exceptions to the 120 character limit: -- Long import statements. -- URLs, pathnames, or long flags in comments. -- Long string module level constants not containing whitespace that would be - inconvenient to split across lines such as URLs or pathnames. - - Pylint disable comments. (e.g.: `# pylint: disable=invalid-name`) - +- Long import statements. +- URLs, pathnames, or long flags in comments. +- Long string module level constants not containing whitespace that would be + inconvenient to split across lines such as URLs or pathnames. + - Pylint disable comments. (e.g.: `# pylint: disable=invalid-name`) + ### 4.2 Comments and Docstrings Be sure to use the right style for module, function, method docstrings and @@ -308,6 +326,7 @@ inline comments. + #### 4.2.1 Modules Every file should contain a license boilerplate. @@ -328,14 +347,16 @@ Every file should contain a license boilerplate. + #### 4.2.2 Functions and Methods In this section, "function" means a method, function, or generator. A function must have a docstring, unless it meets all of the following criteria: -- not externally visible -- very short -- obvious + +- not externally visible +- very short +- obvious ```python def load_state(model: torch.nn.Module, state_dict_to_load: dict, is_resume: bool = False) -> int: @@ -364,6 +385,7 @@ def load_state(model: torch.nn.Module, state_dict_to_load: dict, is_resume: bool + #### 4.2.3 Classes Classes should have a docstring below the class definition describing the class. If your class @@ -403,6 +425,7 @@ if there is nothing special about this exact implementation of the magic method hashing all fields as a tuple in `__hash__` or concatenating string-like objects in `__add__` etc.) For instance, this simple `__init__` method may omit the method description in the docstring (the parameter description is, however, still required): + ```python class Klass: # ... @@ -414,11 +437,13 @@ class Klass: self.param1 = param1 self.param2 = param2 ``` + while this `__init__` requires a description of external dependencies and potential side effects of creating objects of the class: + ```python class ComplexKlass(BaseClass): # ... - def __init__(self, param1: ParamType, param2: AnotherParamType): + def __init__(self, param1: ParamType, param2: AnotherParamType): """ *Add a brief explanation of what happens during this particular __init__, such as :* The construction of this object is dependent on the value of GLOBAL_VARIABLE... @@ -448,6 +473,7 @@ class ComplexKlass(BaseClass): + #### 4.2.4 Block and Inline Comments The final place to have comments is in tricky parts of the code. If you're going to have to explain it @@ -478,8 +504,8 @@ knows Python (though not what you're trying to do) better than you do. -### 4.3 Strings +### 4.3 Strings ```python # Correct: @@ -506,7 +532,9 @@ long_string = textwrap.dedent( + ### 4.4 Logging + Use the logger object built into NNCF for all purposes of logging within the NNCF package code. Do not use `print(...)` or other ways of output. @@ -519,6 +547,7 @@ nncf_logger.info("This is an info-level log message") ``` Wrong: + ```python print("This is an info-level log message") ``` @@ -568,10 +597,12 @@ This ensures that the deprecation warning is seen to the user at all NNCF log le + ### 4.5 Error Messages Error messages (such as: message strings on exceptions like `ValueError`, or messages shown to the user) should follow guidelines: + - The message needs to precisely match the actual error condition. - Interpolated pieces need to always be clearly identifiable as such. - The message should start with a capital letter. @@ -579,6 +610,7 @@ messages shown to the user) should follow guidelines: + ### 4.6 TODO Comments Use `TODO` comments for code that is temporary, a short-term solution, or @@ -607,13 +639,13 @@ event ("Remove this code when all clients can handle XML responses."). + ### 4.7 Naming `module_name`, `package_name`, `ClassName`, `method_name`, `ExceptionName`, `function_name`, `GLOBAL_CONSTANT_NAME`, `global_var_name`, `instance_var_name`, `function_parameter_name`, `local_var_name`. - Function names, variable names, and filenames should be descriptive; eschew abbreviation. In particular, do not use abbreviations that are ambiguous or unfamiliar to readers outside your project, and do not abbreviate by deleting @@ -621,7 +653,6 @@ letters within a word. Always use a `.py` filename extension. Never use dashes. - @@ -702,44 +733,47 @@ Always use a `.py` filename extension. Never use dashes. + #### 4.7.1 Names to Avoid -- single character names, except for specifically allowed cases: - - counters or iterators (e.g. `i`, `j`, `k`, `v`, et al.) - - `e` as an exception identifier in `try/except` statements. - - `f` as a file handle in `with` statements - Please be mindful not to abuse single-character naming. Generally speaking, - descriptiveness should be proportional to the name's scope of visibility. - For example, `i` might be a fine name for 5-line code block but within - multiple nested scopes, it is likely too vague. -- dashes (`-`) in any package/module name -- `__double_leading_and_trailing_underscore__` names (reserved by Python) -- offensive terms -- names that needlessly include the type of the variable (for example: +- single character names, except for specifically allowed cases: + - counters or iterators (e.g. `i`, `j`, `k`, `v`, et al.) + - `e` as an exception identifier in `try/except` statements. + - `f` as a file handle in `with` statements + Please be mindful not to abuse single-character naming. Generally speaking, + descriptiveness should be proportional to the name's scope of visibility. + For example, `i` might be a fine name for 5-line code block but within + multiple nested scopes, it is likely too vague. +- dashes (`-`) in any package/module name +- `__double_leading_and_trailing_underscore__` names (reserved by Python) +- offensive terms +- names that needlessly include the type of the variable (for example: `id_to_name_dict`) + #### 4.7.2 Naming Conventions -- "Internal" means internal to a module, or protected or private within a - class. -- Prepending a single underscore (`_`) has some support for protecting module - variables and functions (linters will flag protected member access). While - prepending a double underscore (`__` aka "dunder") to an instance variable - or method effectively makes the variable or method private to its class - (using name mangling); we discourage its use as it impacts readability and - testability, and isn't *really* private. -- Place related classes and top-level functions together in a - module. -- Use CapWords for class names, but lower\_with\_under.py for module names. -- Use the word "layer" (instead of "module") in the `nncf.common` module to - refer to the building block of neural networks. +- "Internal" means internal to a module, or protected or private within a + class. +- Prepending a single underscore (`_`) has some support for protecting module + variables and functions (linters will flag protected member access). While + prepending a double underscore (`__` aka "dunder") to an instance variable + or method effectively makes the variable or method private to its class + (using name mangling); we discourage its use as it impacts readability and + testability, and isn't *really* private. +- Place related classes and top-level functions together in a + module. +- Use CapWords for class names, but lower\_with\_under.py for module names. +- Use the word "layer" (instead of "module") in the `nncf.common` module to + refer to the building block of neural networks. + #### 4.7.3 Framework specific class naming - `PTClassName` for Torch @@ -748,6 +782,7 @@ Always use a `.py` filename extension. Never use dashes. + #### 4.7.4 File Naming Python filenames must have a `.py` extension and must not contain dashes (`-`). @@ -756,6 +791,7 @@ This allows them to be imported and unit tested. + ### 4.8 Main ```python @@ -766,16 +802,17 @@ if __name__ == "__main__": main() ``` - + ## 5 API documentation rules -All functions and classes that belong to NNCF API should be documented. + +All functions and classes that belong to NNCF API should be documented. The documentation should utilize the reStructuredText (.rst) format for specifying parameters, return types and otherwise formatting the docstring, since the docstring is used as a source for generating the HTML API documentation with Sphinx. Argument descriptions for `__init__(...)` methods of API classes should be located in the docstring of the class itself, not the docstring of the `__init__(...)` method. This is required so that the autogenerated API documentation is rendered properly. -If the autogenerated API documentation does not show type hints for certain arguments despite the fact that the type hints are present in the object's implementation code, +If the autogenerated API documentation does not show type hints for certain arguments despite the fact that the type hints are present in the object's implementation code, or if the type hints do not refer to the API symbol's canonical alias, then the type hint should be explicitly declared in the docstring using the `:type *param_name*:` directive (or `:rtype:` for return types). diff --git a/examples/common/README.md b/examples/common/README.md index 8d472e3fb16..a41e21da71d 100644 --- a/examples/common/README.md +++ b/examples/common/README.md @@ -1,2 +1,2 @@ This directory contains common code for example scripts. -See [other directories at the same level](./..) for actual example scripts that you can launch to evaluate NNCF for various backend frameworks and use cases. \ No newline at end of file +See [other directories at the same level](./..) for actual example scripts that you can launch to evaluate NNCF for various backend frameworks and use cases. diff --git a/examples/experimental/torch/classification/Quickstart.md b/examples/experimental/torch/classification/Quickstart.md index 9dcb3a8d32d..823abaa0a4d 100644 --- a/examples/experimental/torch/classification/Quickstart.md +++ b/examples/experimental/torch/classification/Quickstart.md @@ -1,47 +1,54 @@ # Setup -### PyTorch +## PyTorch + Install PyTorch and Torchvision using the [PyTorch installation guide](https://pytorch.org/get-started/locally/#start-locally). NNCF currently supports PyTorch 1.12.1. For this quickstart, PyTorch 1.12.1 and Torchvision 0.13.1 with CUDA 11.3 was installed using: + ```bash pip install torch==1.12.1+cu113 torchvision==0.13.1+cu113 --extra-index-url https://download.pytorch.org/whl/cu113 ``` +## NNCF -### NNCF There are two options for installing [***NNCF***](https://github.com/openvinotoolkit/nncf#installation): -- Package built from NNCF repository or -- PyPI package. + +- Package built from NNCF repository or +- PyPI package. To install NNCF and dependencies from the NNCF repository, install by running the following in the repository root directory and also set `PYTHONPATH` variable to include the root directory: + ```bash python setup.py develop export PYTHONPATH="${PYTHONPATH}:/nncf" ``` To install NNCF and dependencies as a PyPI package, use the following: + ```bash pip install nncf ``` The ```examples``` folder from the NNCF repository ***is not*** included when you install NNCF using a package manager. To run the BootstrapNAS examples, you will need to obtain this folder from the repository and add it to your path. +## Additional Dependencies -### Additional Dependencies -The examples in the NNCF repo have additional requirements, such as EfficientNet, MLFlow, Tensorboard, etc., which are not installed with NNCF. You will need to install them using: -``` +The examples in the NNCF repo have additional requirements, such as EfficientNet, MLFlow, Tensorboard, etc., which are not installed with NNCF. You will need to install them using: + +```bash pip install efficientnet_pytorch tensorboard mlflow returns ``` +## Example -# Example -To run an example of super-network generation and sub-network search, use the ```bootstrap_nas.py``` script located [here](https://github.com/openvinotoolkit/nncf/blob/develop/examples/experimental/torch/classification/bootstrap_nas.py) and the sample ```config.json``` from [here](https://github.com/jpablomch/bootstrapnas/blob/main/bootstrapnas_examples/config.json). +To run an example of super-network generation and sub-network search, use the ```bootstrap_nas.py``` script located [here](https://github.com/openvinotoolkit/nncf/blob/develop/examples/experimental/torch/classification/bootstrap_nas.py) and the sample ```config.json``` from [here](https://github.com/jpablomch/bootstrapnas/blob/main/bootstrapnas_examples/config.json). -The file ```config.json``` contains a sample configuration for generating a super-network from a trained model. The sample file is configured to generate a super-network from ResNet-50 trained with CIFAR-10. The file should be modified depending on the model to be used as input for BootstrapNAS. +The file ```config.json``` contains a sample configuration for generating a super-network from a trained model. The sample file is configured to generate a super-network from ResNet-50 trained with CIFAR-10. The file should be modified depending on the model to be used as input for BootstrapNAS. -Weights for CIFAR10-based models can be found at: https://github.com/huyvnphan/PyTorch_CIFAR10 +Weights for CIFAR10-based models can be found at: https://github.com/huyvnphan/PyTorch_CIFAR10 -Use the following to test training a super-network: -``` +Use the following to test training a super-network: + +```bash cd /examples/experimental/torch/classification python bootstrap_nas.py -m train \ -c /bootstrapnas_examples/config.json \ @@ -49,22 +56,22 @@ python bootstrap_nas.py -m train \ --weights ``` - ### Expected Output Files after executing BootstrapNAS -The output of running ```bootstrap_nas.py``` will be a sub-network configuration that has an accuracy similar to the input model (by default a $\pm$1% absolute difference in accuracy is allowed), but with improvements in MACs. Format: ([MACs_subnet, ACC_subnet]). -Several files are saved to your `log_dir` after the training has ended: +The output of running ```bootstrap_nas.py``` will be a sub-network configuration that has an accuracy similar to the input model (by default a $\pm$1% absolute difference in accuracy is allowed), but with improvements in MACs. Format: ([MACs_subnet, ACC_subnet]). + +Several files are saved to your `log_dir` after the training has ended: -- `compressed_graph.{dot, png}`- Dot and PNG files that describe the wrapped NNCF model. -- `original_graph.dot` - Dot file that describes the original model. -- `config.json`- A copy of your original config file. +- `compressed_graph.{dot, png}`- Dot and PNG files that describe the wrapped NNCF model. +- `original_graph.dot` - Dot file that describes the original model. +- `config.json`- A copy of your original config file. - `events.*`- Tensorboard logs. - `last_elasticity.pth`- Super-network's elasticity information. This file can be used when loading super-networks for searching or inspection. -- `last_model_weights.pth`- Super-network's weights after training. -- `snapshot.tar.gz` - Copy of the code used for this run. +- `last_model_weights.pth`- Super-network's weights after training. +- `snapshot.tar.gz` - Copy of the code used for this run. - `subnetwork_best.pth` - Dictionary with the configuration of the best sub-network. Best defined as a sub-network that performs in the Pareto front, and that deviates a maximum `acc_delta` from original model. -- `supernet_{best, last}.pth` - Super-network weights at its best and last state. +- `supernet_{best, last}.pth` - Super-network weights at its best and last state. If the user wants to have a CSV output file of the search progression, ```search_algo.search_progression_to_csv()``` can be called after running the search step. -For a visualization of the search progression please use ```search_algo.visualize_search_progression()``` after the search has concluded. A PNG file will be generated. +For a visualization of the search progression please use ```search_algo.visualize_search_progression()``` after the search has concluded. A PNG file will be generated. diff --git a/examples/experimental/torch/classification/bootstrap_nas.py b/examples/experimental/torch/classification/bootstrap_nas.py index be609f65d08..5ae16e5c25d 100644 --- a/examples/experimental/torch/classification/bootstrap_nas.py +++ b/examples/experimental/torch/classification/bootstrap_nas.py @@ -213,8 +213,8 @@ def validate_model_fn_top1(model_, loader_): # Maximal subnet elasticity_ctrl.multi_elasticity_handler.activate_maximum_subnet() - search_algo.bn_adaptation.run(model) - top1_acc = validate_model_fn_top1(model, val_loader) + search_algo.bn_adaptation.run(nncf_network) + top1_acc = validate_model_fn_top1(nncf_network, val_loader) logger.info( "Maximal subnet Top1 acc: {top1_acc}, Macs: {macs}".format( top1_acc=top1_acc, @@ -224,8 +224,8 @@ def validate_model_fn_top1(model_, loader_): # Best found subnet elasticity_ctrl.multi_elasticity_handler.activate_subnet_for_config(best_config) - search_algo.bn_adaptation.run(model) - top1_acc = validate_model_fn_top1(model, val_loader) + search_algo.bn_adaptation.run(nncf_network) + top1_acc = validate_model_fn_top1(nncf_network, val_loader) logger.info( "Best found subnet Top1 acc: {top1_acc}, Macs: {macs}".format( top1_acc=top1_acc, @@ -235,7 +235,7 @@ def validate_model_fn_top1(model_, loader_): elasticity_ctrl.export_model(osp.join(config.log_dir, "best_subnet.onnx")) if "test" in config.mode: - validate(val_loader, model, criterion, config) + validate(val_loader, nncf_network, criterion, config) if __name__ == "__main__": diff --git a/examples/experimental/torch/classification/bootstrap_nas_search.py b/examples/experimental/torch/classification/bootstrap_nas_search.py index 3118c12715c..228641ff0bf 100644 --- a/examples/experimental/torch/classification/bootstrap_nas_search.py +++ b/examples/experimental/torch/classification/bootstrap_nas_search.py @@ -164,8 +164,8 @@ def validate_model_fn_top1(model_, loader_): # Maximal subnet elasticity_ctrl.multi_elasticity_handler.activate_maximum_subnet() - search_algo.bn_adaptation.run(model) - top1_acc = validate_model_fn_top1(model, val_loader) + search_algo.bn_adaptation.run(nncf_network) + top1_acc = validate_model_fn_top1(nncf_network, val_loader) logger.info( "Maximal subnet Top1 acc: {top1_acc}, Macs: {macs}".format( top1_acc=top1_acc, @@ -175,8 +175,8 @@ def validate_model_fn_top1(model_, loader_): # Best found subnet elasticity_ctrl.multi_elasticity_handler.activate_subnet_for_config(best_config) - search_algo.bn_adaptation.run(model) - top1_acc = validate_model_fn_top1(model, val_loader) + search_algo.bn_adaptation.run(nncf_network) + top1_acc = validate_model_fn_top1(nncf_network, val_loader) logger.info( "Best found subnet Top1 acc: {top1_acc}, Macs: {macs}".format( top1_acc=top1_acc, @@ -191,7 +191,7 @@ def validate_model_fn_top1(model_, loader_): assert best_config == elasticity_ctrl.multi_elasticity_handler.get_active_config() if "test" in config.mode: - validate(val_loader, model, criterion, config) + validate(val_loader, nncf_network, criterion, config) if __name__ == "__main__": diff --git a/examples/post_training_quantization/onnx/mobilenet_v2/README.md b/examples/post_training_quantization/onnx/mobilenet_v2/README.md index e9d528ab35f..8aaaad8bf46 100644 --- a/examples/post_training_quantization/onnx/mobilenet_v2/README.md +++ b/examples/post_training_quantization/onnx/mobilenet_v2/README.md @@ -1,24 +1,29 @@ # Post-Training Quantization of MobileNet v2 ONNX Model -This example demonstrates how to use Post-Training Quantization API from Neural Network Compression Framework (NNCF) to quantize ONNX models on the example of [MobileNet v2](https://huggingface.co/alexsu52/mobilenet_v2_imagenette) quantization, pretrained on [Imagenette](https://github.com/fastai/imagenette) dataset. +This example demonstrates how to use Post-Training Quantization API from Neural Network Compression Framework (NNCF) to quantize ONNX models on the example of [MobileNet v2](https://huggingface.co/alexsu52/mobilenet_v2_imagenette) quantization, pretrained on [Imagenette](https://github.com/fastai/imagenette) dataset. The example includes the following steps: + - Loading the [Imagenette](https://github.com/fastai/imagenette) dataset (~340 Mb) and the [MobileNet v2 ONNX model](https://huggingface.co/alexsu52/mobilenet_v2_imagenette) pretrained on this dataset. - Quantizing the model using NNCF Post-Training Quantization algorithm. - Output of the following characteristics of the quantized model: - - Accuracy drop of the quantized model (INT8) over the pre-trained model (FP32) - - Performance speed up of the quantized model (INT8) + - Accuracy drop of the quantized model (INT8) over the pre-trained model (FP32) + - Performance speed up of the quantized model (INT8) + +## Install requirements -# Install requirements At this point it is assumed that you have already installed NNCF. You can find information on installation NNCF [here](https://github.com/openvinotoolkit/nncf#user-content-installation). To work with the example you should install the corresponding Python package dependencies: -``` + +```bash pip install -r requirements.txt ``` -# Run Example +## Run Example + It's pretty simple. The example does not require additional preparation. It will do the preparation itself, such as loading the dataset and model, etc. -``` + +```bash python main.py -``` \ No newline at end of file +``` diff --git a/examples/post_training_quantization/onnx/mobilenet_v2/main.py b/examples/post_training_quantization/onnx/mobilenet_v2/main.py index 3e4687b8081..007b537ceb6 100755 --- a/examples/post_training_quantization/onnx/mobilenet_v2/main.py +++ b/examples/post_training_quantization/onnx/mobilenet_v2/main.py @@ -122,7 +122,7 @@ def transform_fn(data_item): # item and prepare model input data. The quantize method uses a small subset # (default: 300 samples) of the calibration dataset. calibration_dataset = nncf.Dataset(val_loader, transform_fn) -quantized_model = nncf.quantize(model, calibration_dataset) +onnx_quantized_model = nncf.quantize(model, calibration_dataset) ############################################################################### # Benchmark performance and validate accuracy @@ -132,7 +132,7 @@ def transform_fn(data_item): print(f"[1/7] Save FP32 model: {fp32_model_path}") int8_model_path = f"{ROOT}/mobilenet_v2_int8.onnx" -onnx.save(quantized_model, int8_model_path) +onnx.save(onnx_quantized_model, int8_model_path) print(f"[2/7] Save INT8 model: {int8_model_path}") print("[3/7] Benchmark FP32 model:") diff --git a/examples/post_training_quantization/onnx/mobilenet_v2/requirements.txt b/examples/post_training_quantization/onnx/mobilenet_v2/requirements.txt index 07d361f6bda..90e7a225c68 100644 --- a/examples/post_training_quantization/onnx/mobilenet_v2/requirements.txt +++ b/examples/post_training_quantization/onnx/mobilenet_v2/requirements.txt @@ -4,4 +4,4 @@ scikit-learn fastdownload onnx~=1.13.1 onnxruntime~=1.14.1 -openvino-dev +openvino-dev==2023.0.1 diff --git a/examples/post_training_quantization/openvino/quantize_with_accuracy_control/README.md b/examples/post_training_quantization/openvino/anomaly_stfpm_quantize_with_accuracy_control/README.md similarity index 84% rename from examples/post_training_quantization/openvino/quantize_with_accuracy_control/README.md rename to examples/post_training_quantization/openvino/anomaly_stfpm_quantize_with_accuracy_control/README.md index e2ff0691c41..c425ecbdef0 100644 --- a/examples/post_training_quantization/openvino/quantize_with_accuracy_control/README.md +++ b/examples/post_training_quantization/openvino/anomaly_stfpm_quantize_with_accuracy_control/README.md @@ -7,27 +7,30 @@ This example demonstrates how to quantize [Student-Teacher Feature Pyramid Match The `nncf.quantize_with_accuracy_control()` method quantizes a model with a specified accuracy drop and the `max_drop` parameter is passed to specify the maximum absolute difference between the quantized and pre-trained model. The example includes the following steps: -- + - Loading the [MVTec (capsule category)](https://www.mvtec.com/company/research/datasets/mvtec-ad) dataset (~385 Mb) and the [STFPM OpenVINO model](https://huggingface.co/alexsu52/stfpm_mvtec_capsule) pretrained on this dataset. - Quantizing the model using NNCF Post-Training Quantization algorithm with accuracy control. - Output of the following characteristics of the quantized model: - - Accuracy drop between the quantized model (INT8) and the pre-trained model (FP32) - - Compression rate of the quantized model file size relative to the pre-trained model file size - - Performance speed up of the quantized model (INT8) + - Accuracy drop between the quantized model (INT8) and the pre-trained model (FP32) + - Compression rate of the quantized model file size relative to the pre-trained model file size + - Performance speed up of the quantized model (INT8) + +## Install requirements -# Install requirements At this point it is assumed that you have already installed NNCF. You can find information on installation NNCF [here](https://github.com/openvinotoolkit/nncf#user-content-installation). To work with the example you should install the corresponding Python package dependencies: -``` + +```bash pip install -r requirements.txt ``` -# Run Example +## Run Example + It's pretty simple. The example does not require additional preparation. It will do the preparation itself, such as loading the dataset and model, etc. -The maximum accuracy drop you can pass as a command line argument. F1 score is calculted in range [0,1] for STFPM. Thus if you want to specify the maximum accuracy drop between the quantized and pre-trained model of 0.5% you must specify 0.005 as a command line argument: +The maximum accuracy drop you can pass as a command line argument. F1 score is calculted in range [0,1] for STFPM. Thus if you want to specify the maximum accuracy drop between the quantized and pre-trained model of 0.1% you must specify 0.001 as a command line argument: +```bash +python main.py 0.001 ``` -python main.py 0.005 -``` \ No newline at end of file diff --git a/examples/post_training_quantization/openvino/quantize_with_accuracy_control/main.py b/examples/post_training_quantization/openvino/anomaly_stfpm_quantize_with_accuracy_control/main.py similarity index 79% rename from examples/post_training_quantization/openvino/quantize_with_accuracy_control/main.py rename to examples/post_training_quantization/openvino/anomaly_stfpm_quantize_with_accuracy_control/main.py index a86b1f6baf2..daaa491e4ba 100644 --- a/examples/post_training_quantization/openvino/quantize_with_accuracy_control/main.py +++ b/examples/post_training_quantization/openvino/anomaly_stfpm_quantize_with_accuracy_control/main.py @@ -16,7 +16,7 @@ import sys from functools import partial from pathlib import Path -from typing import Any, Dict, Iterable, List, Optional +from typing import Any, Dict, Iterable, List, Optional, Tuple import numpy as np import openvino.runtime as ov @@ -33,18 +33,19 @@ MODEL_INFO = download.DownloadInfo( name="stfpm_mvtec_capsule", url="https://huggingface.co/alexsu52/stfpm_mvtec_capsule/resolve/main/openvino_model.tar", - hash="0d15817bc56af80793de38c8a0b3fd9e", + hash="2005ef44eb701ad35e51417d196d8632", ) MODEL_PATH = HOME_PATH / ".cache/nncf/models/stfpm_mvtec_capsule" DATASET_INFO = download.DownloadInfo( name="mvtec_capsule", - url="https://www.mydrive.ch/shares/38536/3830184030e49fe74747669442f0f282/download/420937454-1629951595/capsule.tar.xz", + url="https://www.mydrive.ch/shares/38536/3830184030e49fe74747669442f0f282/" + "download/420937454-1629951595/capsule.tar.xz", hash="380afc46701c99cb7b9a928edbe16eb5", ) DATASET_PATH = HOME_PATH / ".cache/nncf/datasets/mvtec_capsule" -max_accuracy_drop = 0.005 if len(sys.argv) < 2 else float(sys.argv[1]) +max_accuracy_drop = 0.001 if len(sys.argv) < 2 else float(sys.argv[1]) def download_and_extract(root: Path, info: download.DownloadInfo) -> None: @@ -53,16 +54,19 @@ def download_and_extract(root: Path, info: download.DownloadInfo) -> None: def get_anomaly_images(data_loader: Iterable[Any]) -> List[Dict[str, torch.Tensor]]: - anomaly_images = [] + anomaly_images_ = [] for data_item in data_loader: if data_item["label"].int() == 1: - anomaly_images.append({"image": data_item["image"]}) - return anomaly_images + anomaly_images_.append({"image": data_item["image"]}) + return anomaly_images_ -def validate(model: ov.CompiledModel, val_loader: Iterable[Any], val_params: Dict[str, float]) -> float: +def validate( + model: ov.CompiledModel, val_loader: Iterable[Any], val_params: Dict[str, float] +) -> Tuple[float, List[float]]: metric = create_metric_collection(["F1Score"], prefix="image_")["F1Score"] metric.threshold = 0.5 + per_sample_metric_values = [] output = model.outputs[0] @@ -71,12 +75,16 @@ def validate(model: ov.CompiledModel, val_loader: Iterable[Any], val_params: Dic anomaly_maps = model(batch["image"])[output] pred_scores = np.max(anomaly_maps, axis=(1, 2, 3)) pred_scores = normalize(pred_scores, val_params["image_threshold"], val_params["min"], val_params["max"]) - metric.update(torch.from_numpy(pred_scores), batch["label"].int()) + pred_label = 1 if pred_scores > metric.threshold else 0 + groundtruth_label = batch["label"].int() + per_sample_metric = 1.0 if pred_label == groundtruth_label else 0.0 + per_sample_metric_values.append(per_sample_metric) + metric.update(torch.from_numpy(pred_scores), groundtruth_label) counter += 1 metric_value = metric.compute() - print(f"Validate: dataset lenght = {counter}, " f"metric value = {metric_value:.3f}") - return metric_value + print(f"Validate: dataset length = {counter}, metric value = {metric_value:.3f}") + return metric_value, per_sample_metric_values def run_benchmark(model_path: str, shape: Optional[List[int]] = None, verbose: bool = True) -> float: @@ -118,9 +126,9 @@ def get_model_size(ir_path: str, m_type: str = "Mb", verbose: bool = True) -> fl test_loader = datamodule.test_dataloader() download_and_extract(MODEL_PATH, MODEL_INFO) -model = ov.Core().read_model(MODEL_PATH / "stfpm_capsule.xml") +ov_model = ov.Core().read_model(MODEL_PATH / "stfpm_capsule.xml") -with open(MODEL_PATH / "meta_data_stfpm_capsule.json", "r") as f: +with open(MODEL_PATH / "meta_data_stfpm_capsule.json", "r", encoding="utf-8") as f: validation_params = json.load(f) ############################################################################### @@ -145,8 +153,8 @@ def transform_fn(data_item): validation_fn = partial(validate, val_params=validation_params) validation_dataset = nncf.Dataset(test_loader, transform_fn) -quantized_model = nncf.quantize_with_accuracy_control( - model=model, +ov_quantized_model = nncf.quantize_with_accuracy_control( + model=ov_model, calibration_dataset=calibration_dataset, validation_dataset=validation_dataset, validation_fn=validation_fn, @@ -157,12 +165,12 @@ def transform_fn(data_item): # Benchmark performance, calculate compression rate and validate accuracy fp32_ir_path = f"{ROOT}/stfpm_fp32.xml" -ov.serialize(model, fp32_ir_path) +ov.serialize(ov_model, fp32_ir_path) print(f"[1/7] Save FP32 model: {fp32_ir_path}") fp32_size = get_model_size(fp32_ir_path, verbose=True) int8_ir_path = f"{ROOT}/stfpm_int8.xml" -ov.serialize(quantized_model, int8_ir_path) +ov.serialize(ov_quantized_model, int8_ir_path) print(f"[2/7] Save INT8 model: {int8_ir_path}") int8_size = get_model_size(int8_ir_path, verbose=True) @@ -172,13 +180,13 @@ def transform_fn(data_item): int8_fps = run_benchmark(int8_ir_path, shape=[1, 3, 256, 256], verbose=True) print("[5/7] Validate OpenVINO FP32 model:") -compiled_model = ov.compile_model(model) -fp32_top1 = validate(compiled_model, test_loader, validation_params) +compiled_model = ov.compile_model(ov_model) +fp32_top1, _ = validate(compiled_model, test_loader, validation_params) print(f"Accuracy @ top1: {fp32_top1:.3f}") print("[6/7] Validate OpenVINO INT8 model:") -quantized_compiled_model = ov.compile_model(quantized_model) -int8_top1 = validate(quantized_compiled_model, test_loader, validation_params) +quantized_compiled_model = ov.compile_model(ov_quantized_model) +int8_top1, _ = validate(quantized_compiled_model, test_loader, validation_params) print(f"Accuracy @ top1: {int8_top1:.3f}") print("[7/7] Report:") diff --git a/examples/post_training_quantization/openvino/anomaly_stfpm_quantize_with_accuracy_control/requirements.txt b/examples/post_training_quantization/openvino/anomaly_stfpm_quantize_with_accuracy_control/requirements.txt new file mode 100644 index 00000000000..ca02eb77f09 --- /dev/null +++ b/examples/post_training_quantization/openvino/anomaly_stfpm_quantize_with_accuracy_control/requirements.txt @@ -0,0 +1,2 @@ +anomalib==0.6.0 +openvino-dev==2023.0.1 diff --git a/examples/post_training_quantization/openvino/mobilenet_v2/README.md b/examples/post_training_quantization/openvino/mobilenet_v2/README.md index 180012993de..65b6797d156 100644 --- a/examples/post_training_quantization/openvino/mobilenet_v2/README.md +++ b/examples/post_training_quantization/openvino/mobilenet_v2/README.md @@ -1,25 +1,30 @@ # Post-Training Quantization of MobileNet v2 OpenVINO Model -This example demonstrates how to use Post-Training Quantization API from Neural Network Compression Framework (NNCF) to quantize OpenVINO models on the example of [MobileNet v2](https://huggingface.co/alexsu52/mobilenet_v2_imagenette) quantization, pretrained on [Imagenette](https://github.com/fastai/imagenette) dataset. +This example demonstrates how to use Post-Training Quantization API from Neural Network Compression Framework (NNCF) to quantize OpenVINO models on the example of [MobileNet v2](https://huggingface.co/alexsu52/mobilenet_v2_imagenette) quantization, pretrained on [Imagenette](https://github.com/fastai/imagenette) dataset. The example includes the following steps: + - Loading the [Imagenette](https://github.com/fastai/imagenette) dataset (~340 Mb) and the [MobileNet v2 OpenVINO model](https://huggingface.co/alexsu52/mobilenet_v2_imagenette) pretrained on this dataset. - Quantizing the model using NNCF Post-Training Quantization algorithm. - Output of the following characteristics of the quantized model: - - Accuracy drop of the quantized model (INT8) over the pre-trained model (FP32) - - Compression rate of the quantized model file size relative to the pre-trained model file size - - Performance speed up of the quantized model (INT8) + - Accuracy drop of the quantized model (INT8) over the pre-trained model (FP32) + - Compression rate of the quantized model file size relative to the pre-trained model file size + - Performance speed up of the quantized model (INT8) + +## Install requirements -# Install requirements At this point it is assumed that you have already installed NNCF. You can find information on installation NNCF [here](https://github.com/openvinotoolkit/nncf#user-content-installation). To work with the example you should install the corresponding Python package dependencies: -``` + +```bash pip install -r requirements.txt ``` -# Run Example +## Run Example + It's pretty simple. The example does not require additional preparation. It will do the preparation itself, such as loading the dataset and model, etc. -``` + +```bash python main.py -``` \ No newline at end of file +``` diff --git a/examples/post_training_quantization/openvino/mobilenet_v2/main.py b/examples/post_training_quantization/openvino/mobilenet_v2/main.py index c7890322f3b..2cc6ab0329f 100644 --- a/examples/post_training_quantization/openvino/mobilenet_v2/main.py +++ b/examples/post_training_quantization/openvino/mobilenet_v2/main.py @@ -100,20 +100,19 @@ def get_model_size(ir_path: str, m_type: str = "Mb", verbose: bool = True) -> fl ] ), ) -val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=1, shuffle=False) +val_data_loader = torch.utils.data.DataLoader(val_dataset, batch_size=1, shuffle=False) -model_path = download(MODEL_URL, MODEL_PATH) -model = ov.Core().read_model(model_path / "mobilenet_v2_fp32.xml") +path_to_model = download(MODEL_URL, MODEL_PATH) +ov_model = ov.Core().read_model(path_to_model / "mobilenet_v2_fp32.xml") ############################################################################### # Quantize an OpenVINO model -""" -The transformation function transforms a data item into model input data. - -To validate the transform function use the following code: ->> for data_item in val_loader: ->> model(transform_fn(data_item)) -""" +# +# The transformation function transforms a data item into model input data. +# +# To validate the transform function use the following code: +# >> for data_item in val_loader: +# >> model(transform_fn(data_item)) def transform_fn(data_item): @@ -121,30 +120,29 @@ def transform_fn(data_item): return images -""" -The calibration dataset is a small, no label, representative dataset -(~100-500 samples) that is used to estimate the range, i.e. (min, max) of all -floating point activation tensors in the model, to initialize the quantization -parameters. +# The calibration dataset is a small, no label, representative dataset +# (~100-500 samples) that is used to estimate the range, i.e. (min, max) of all +# floating point activation tensors in the model, to initialize the quantization +# parameters. +# +# The easiest way to define a calibration dataset is to use a training or +# validation dataset and a transformation function to remove labels from the data +# item and prepare model input data. The quantize method uses a small subset +# (default: 300 samples) of the calibration dataset. -The easiest way to define a calibration dataset is to use a training or -validation dataset and a transformation function to remove labels from the data -item and prepare model input data. The quantize method uses a small subset -(default: 300 samples) of the calibration dataset. -""" -calibration_dataset = nncf.Dataset(val_loader, transform_fn) -quantized_model = nncf.quantize(model, calibration_dataset) +calibration_dataset = nncf.Dataset(val_data_loader, transform_fn) +ov_quantized_model = nncf.quantize(ov_model, calibration_dataset) ############################################################################### # Benchmark performance, calculate compression rate and validate accuracy fp32_ir_path = f"{ROOT}/mobilenet_v2_fp32.xml" -ov.serialize(model, fp32_ir_path) +ov.serialize(ov_model, fp32_ir_path) print(f"[1/7] Save FP32 model: {fp32_ir_path}") fp32_model_size = get_model_size(fp32_ir_path, verbose=True) int8_ir_path = f"{ROOT}/mobilenet_v2_int8.xml" -ov.serialize(quantized_model, int8_ir_path) +ov.serialize(ov_quantized_model, int8_ir_path) print(f"[2/7] Save INT8 model: {int8_ir_path}") int8_model_size = get_model_size(int8_ir_path, verbose=True) @@ -154,11 +152,11 @@ def transform_fn(data_item): int8_fps = run_benchmark(int8_ir_path, shape=[1, 3, 224, 224], verbose=True) print("[5/7] Validate OpenVINO FP32 model:") -fp32_top1 = validate(model, val_loader) +fp32_top1 = validate(ov_model, val_data_loader) print(f"Accuracy @ top1: {fp32_top1:.3f}") print("[6/7] Validate OpenVINO INT8 model:") -int8_top1 = validate(quantized_model, val_loader) +int8_top1 = validate(ov_quantized_model, val_data_loader) print(f"Accuracy @ top1: {int8_top1:.3f}") print("[7/7] Report:") diff --git a/examples/post_training_quantization/openvino/mobilenet_v2/requirements.txt b/examples/post_training_quantization/openvino/mobilenet_v2/requirements.txt index 1f5b24e12d1..76ff07f90a8 100644 --- a/examples/post_training_quantization/openvino/mobilenet_v2/requirements.txt +++ b/examples/post_training_quantization/openvino/mobilenet_v2/requirements.txt @@ -2,4 +2,4 @@ torchvision tqdm scikit-learn fastdownload -openvino-dev~=2022.3.0.dev \ No newline at end of file +openvino-dev==2023.0.1 \ No newline at end of file diff --git a/examples/post_training_quantization/openvino/quantize_with_accuracy_control/requirements.txt b/examples/post_training_quantization/openvino/quantize_with_accuracy_control/requirements.txt deleted file mode 100644 index 15c58531b66..00000000000 --- a/examples/post_training_quantization/openvino/quantize_with_accuracy_control/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -anomalib@ git+https://github.com/openvinotoolkit/anomalib@d0b8ce8ea100bbdfb058f00a3b246f257a08da93#egg=anomalib -openvino-dev diff --git a/examples/post_training_quantization/openvino/yolov8/README.md b/examples/post_training_quantization/openvino/yolov8/README.md index 4b21899c46f..f861b3c5b15 100644 --- a/examples/post_training_quantization/openvino/yolov8/README.md +++ b/examples/post_training_quantization/openvino/yolov8/README.md @@ -1,28 +1,37 @@ # Post-Training Quantization of YOLOv8 OpenVINO Model -This example demonstrates how to use Post-Training Quantization API from Neural Network Compression Framework (NNCF) to quantize YOLOv8n model. +This example demonstrates how to use Post-Training Quantization API from Neural Network Compression Framework (NNCF) to quantize YOLOv8n model. The example includes the following steps: + - Download and prepare COCO-128 dataset. - Quantize the model with NNCF Post-Training Quantization algorithm. - Measure accuracy and performance of the floating-point and quantized models. -# Install requirements +## Install requirements + To run the example you should install the corresponding Python dependencies: + - Install NNCF from source: -``` + +```bash pip install ../../../../ ``` + - Install 3rd party dependencies: -``` + +```bash pip install -r requirements.txt ``` -# Run Example +## Run Example + The example is fully automated. Just run the following comman in the prepared Python environment: -``` + +```bash python main.py ``` ## See also + - [YOLOv8 Jupyter notebook](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/230-yolov8-optimization) diff --git a/examples/post_training_quantization/openvino/yolov8/main.py b/examples/post_training_quantization/openvino/yolov8/main.py index dbc4355ca15..f20730970f6 100644 --- a/examples/post_training_quantization/openvino/yolov8/main.py +++ b/examples/post_training_quantization/openvino/yolov8/main.py @@ -17,16 +17,19 @@ import openvino.runtime as ov import torch from tqdm import tqdm -from ultralytics import YOLO -from ultralytics.yolo.configs import get_config -from ultralytics.yolo.data.utils import check_dataset_yaml -from ultralytics.yolo.engine.validator import BaseValidator as Validator -from ultralytics.yolo.utils import DEFAULT_CONFIG -from ultralytics.yolo.utils import ops -from ultralytics.yolo.utils.metrics import ConfusionMatrix +from ultralytics.cfg import get_cfg +from ultralytics.data.converter import coco80_to_coco91_class +from ultralytics.data.utils import check_det_dataset +from ultralytics.engine.validator import BaseValidator as Validator +from ultralytics.models.yolo import YOLO +from ultralytics.utils import DATASETS_DIR +from ultralytics.utils import DEFAULT_CFG +from ultralytics.utils.metrics import ConfusionMatrix import nncf +ROOT = Path(__file__).parent.resolve() + def validate( model: ov.Model, data_loader: torch.utils.data.DataLoader, validator: Validator, num_samples: int = None @@ -63,17 +66,17 @@ def print_statistics(stats: np.ndarray, total_images: int, total_objects: int) - def prepare_validation(model: YOLO, args: Any) -> Tuple[Validator, torch.utils.data.DataLoader]: - data = check_dataset_yaml(args.data) - dataset = data["val"] + validator = model.smart_load("validator")(args) + validator.data = check_det_dataset(args.data) + dataset = validator.data["val"] print(f"{dataset}") - validator = model.ValidatorClass(args) - data_loader = validator.get_dataloader("../datasets/coco128", 1) + data_loader = validator.get_dataloader(f"{DATASETS_DIR}/coco128", 1) - validator = model.ValidatorClass(args) + validator = model.smart_load("validator")(args) validator.is_coco = True - validator.class_map = ops.coco80_to_coco91_class() + validator.class_map = coco80_to_coco91_class() validator.names = model.model.names validator.metrics.names = validator.names validator.nc = model.model.model[-1].nc @@ -91,7 +94,7 @@ def benchmark_performance(model_path, config) -> float: def prepare_openvino_model(model: YOLO, model_name: str) -> Tuple[ov.Model, Path]: - model_path = Path(f"{model_name}_openvino_model/{model_name}.xml") + model_path = Path(f"{ROOT}/{model_name}_openvino_model/{model_name}.xml") if not model_path.exists(): model.export(format="openvino", dynamic=True, half=False) @@ -142,8 +145,8 @@ def transform_fn(data_item: Dict): def main(): MODEL_NAME = "yolov8n" - model = YOLO(f"{MODEL_NAME}.pt") - args = get_config(config=DEFAULT_CONFIG) + model = YOLO(f"{ROOT}/{MODEL_NAME}.pt") + args = get_cfg(cfg=DEFAULT_CFG) args.data = "coco128.yaml" # Prepare validation dataset and helper @@ -154,7 +157,7 @@ def main(): # Quantize mode in OpenVINO representation quantized_model = quantize(ov_model, data_loader, validator) - quantized_model_path = Path(f"{MODEL_NAME}_openvino_model/{MODEL_NAME}_quantized.xml") + quantized_model_path = Path(f"{ROOT}/{MODEL_NAME}_openvino_model/{MODEL_NAME}_quantized.xml") ov.serialize(quantized_model, str(quantized_model_path)) # Validate FP32 model diff --git a/examples/post_training_quantization/openvino/yolov8/requirements.txt b/examples/post_training_quantization/openvino/yolov8/requirements.txt index eeed0ae5507..bcbac83cbd1 100644 --- a/examples/post_training_quantization/openvino/yolov8/requirements.txt +++ b/examples/post_training_quantization/openvino/yolov8/requirements.txt @@ -1,3 +1,3 @@ -ultralytics==8.0.5 -onnx -openvino-dev \ No newline at end of file +ultralytics==8.0.170 +onnx>=1.12.0 +openvino-dev==2023.0.1 \ No newline at end of file diff --git a/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/README.md b/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/README.md index 2458f52f52d..f5649efbdf8 100644 --- a/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/README.md +++ b/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/README.md @@ -8,25 +8,29 @@ The example includes the following steps: - Quantize the model with "AccuracyAwareQuantization" algorithm instead of "DefaultQuantization". - Measure accuracy and performance of the floating-point and quantized models. -# Install requirements +## Install requirements To run the example you should install the corresponding Python dependencies: + - Install NNCF from source: -``` -git clone https://github.com/openvinotoolkit/nncf.git -cd nncf -pip install . -``` + + ```bash + git clone https://github.com/openvinotoolkit/nncf.git + cd nncf + pip install . + ``` + - Install 3rd party dependencies of this example: -``` -pip install -r requirements.txt -``` -# Run Example + ```bash + pip install -r requirements.txt + ``` + +## Run Example The example is fully automated. Just run the following command in the prepared Python environment: -``` +```bash python main.py ``` diff --git a/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/main.py b/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/main.py index 345c432bc6a..a6e17830289 100644 --- a/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/main.py +++ b/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/main.py @@ -18,16 +18,20 @@ import openvino.runtime as ov import torch from tqdm import tqdm -from ultralytics import YOLO -from ultralytics.yolo.configs import get_config -from ultralytics.yolo.data.utils import check_dataset_yaml -from ultralytics.yolo.engine.validator import BaseValidator as Validator -from ultralytics.yolo.utils import DEFAULT_CONFIG -from ultralytics.yolo.utils import ops -from ultralytics.yolo.utils.metrics import ConfusionMatrix +from ultralytics.cfg import get_cfg +from ultralytics.data.converter import coco80_to_coco91_class +from ultralytics.data.utils import check_det_dataset +from ultralytics.engine.validator import BaseValidator as Validator +from ultralytics.models.yolo import YOLO +from ultralytics.utils import DATASETS_DIR +from ultralytics.utils import DEFAULT_CFG +from ultralytics.utils import ops +from ultralytics.utils.metrics import ConfusionMatrix import nncf +ROOT = Path(__file__).parent.resolve() + def validate( model: ov.Model, data_loader: torch.utils.data.DataLoader, validator: Validator, num_samples: int = None @@ -50,7 +54,7 @@ def validate( else: preds = [ torch.from_numpy(results[compiled_model.output(0)]), - [torch.from_numpy(results[compiled_model.output(1)])], + torch.from_numpy(results[compiled_model.output(1)]), ] preds = validator.postprocess(preds) validator.update_metrics(preds, batch) @@ -88,17 +92,17 @@ def print_statistics(stats: np.ndarray, total_images: int, total_objects: int) - def prepare_validation(model: YOLO, args: Any) -> Tuple[Validator, torch.utils.data.DataLoader]: - data = check_dataset_yaml(args.data) - dataset = data["val"] + validator = model.smart_load("validator")(args) + validator.data = check_det_dataset(args.data) + dataset = validator.data["val"] print(f"{dataset}") - validator = model.ValidatorClass(args) - data_loader = validator.get_dataloader("../datasets/coco128-seg", 1) + data_loader = validator.get_dataloader(f"{DATASETS_DIR}/coco128-seg", 1) - validator = model.ValidatorClass(args) + validator = model.smart_load("validator")(args) validator.is_coco = True - validator.class_map = ops.coco80_to_coco91_class() + validator.class_map = coco80_to_coco91_class() validator.names = model.model.names validator.metrics.names = validator.names validator.nc = model.model.model[-1].nc @@ -119,7 +123,7 @@ def benchmark_performance(model_path, config) -> float: def prepare_openvino_model(model: YOLO, model_name: str) -> Tuple[ov.Model, Path]: - model_path = Path(f"{model_name}_openvino_model/{model_name}.xml") + model_path = Path(f"{ROOT}/{model_name}_openvino_model/{model_name}.xml") if not model_path.exists(): model.export(format="openvino", dynamic=True, half=False) @@ -156,7 +160,7 @@ def validation_ac( else: preds = [ torch.from_numpy(results[compiled_model.output(0)]), - [torch.from_numpy(results[compiled_model.output(1)])], + torch.from_numpy(results[compiled_model.output(1)]), ] preds = validator.postprocess(preds) validator.update_metrics(preds, batch) @@ -166,7 +170,7 @@ def validation_ac( stats_metrics = stats["metrics/mAP50-95(B)"] else: stats_metrics = stats["metrics/mAP50-95(M)"] - print(f"Validate: dataset lenght = {counter}, " f"metric value = {stats_metrics:.3f}") + print(f"Validate: dataset length = {counter}, metric value = {stats_metrics:.3f}") return stats_metrics quantization_dataset = nncf.Dataset(data_loader, transform_fn) @@ -204,8 +208,8 @@ def validation_ac( def main(): MODEL_NAME = "yolov8n-seg" - model = YOLO(f"{MODEL_NAME}.pt") - args = get_config(config=DEFAULT_CONFIG) + model = YOLO(f"{ROOT}/{MODEL_NAME}.pt") + args = get_cfg(cfg=DEFAULT_CFG) args.data = "coco128-seg.yaml" # Prepare validation dataset and helper @@ -217,7 +221,7 @@ def main(): # Quantize mode in OpenVINO representation quantized_model = quantize_ac(ov_model, data_loader, validator) - quantized_model_path = Path(f"{MODEL_NAME}_openvino_model/{MODEL_NAME}_quantized.xml") + quantized_model_path = Path(f"{ROOT}/{MODEL_NAME}_openvino_model/{MODEL_NAME}_quantized.xml") ov.serialize(quantized_model, str(quantized_model_path)) # Validate FP32 model diff --git a/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/requirements.txt b/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/requirements.txt index eeed0ae5507..bcbac83cbd1 100644 --- a/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/requirements.txt +++ b/examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/requirements.txt @@ -1,3 +1,3 @@ -ultralytics==8.0.5 -onnx -openvino-dev \ No newline at end of file +ultralytics==8.0.170 +onnx>=1.12.0 +openvino-dev==2023.0.1 \ No newline at end of file diff --git a/examples/post_training_quantization/tensorflow/mobilenet_v2/README.md b/examples/post_training_quantization/tensorflow/mobilenet_v2/README.md index 6a599e5d8e1..567b1503c8a 100644 --- a/examples/post_training_quantization/tensorflow/mobilenet_v2/README.md +++ b/examples/post_training_quantization/tensorflow/mobilenet_v2/README.md @@ -1,25 +1,30 @@ # Post-Training Quantization of MobileNet v2 TensorFlow Model -This example demonstrates how to use Post-Training Quantization API from Neural Network Compression Framework (NNCF) to quantize TensorFlow models on the example of [MobileNet v2](https://huggingface.co/alexsu52/mobilenet_v2_imagenette) quantization, pretrained on [Imagenette](https://github.com/fastai/imagenette) dataset. +This example demonstrates how to use Post-Training Quantization API from Neural Network Compression Framework (NNCF) to quantize TensorFlow models on the example of [MobileNet v2](https://huggingface.co/alexsu52/mobilenet_v2_imagenette) quantization, pretrained on [Imagenette](https://github.com/fastai/imagenette) dataset. The example includes the following steps: + - Loading the [Imagenette](https://github.com/fastai/imagenette) dataset (~340 Mb) and the [MobileNet v2 TensorFlow model](https://huggingface.co/alexsu52/mobilenet_v2_imagenette) pretrained on this dataset. - Quantizing the model using NNCF Post-Training Quantization algorithm. - Output of the following characteristics of the quantized model: - - Accuracy drop of the quantized model (INT8) over the pre-trained model (FP32) - - Compression rate of the quantized model file size relative to the pre-trained model file size - - Performance speed up of the quantized model (INT8) + - Accuracy drop of the quantized model (INT8) over the pre-trained model (FP32) + - Compression rate of the quantized model file size relative to the pre-trained model file size + - Performance speed up of the quantized model (INT8) + +## Install requirements -# Install requirements At this point it is assumed that you have already installed NNCF. You can find information on installation NNCF [here](https://github.com/openvinotoolkit/nncf#user-content-installation). To work with the example you should install the corresponding Python package dependencies: -``` + +```bash pip install -r requirements.txt ``` -# Run Example +## Run Example + It's pretty simple. The example does not require additional preparation. It will do the preparation itself, such as loading the dataset and model, etc. -``` + +```bash python main.py -``` \ No newline at end of file +``` diff --git a/examples/post_training_quantization/tensorflow/mobilenet_v2/main.py b/examples/post_training_quantization/tensorflow/mobilenet_v2/main.py index 29b7f92a636..03f9c3b8e7c 100644 --- a/examples/post_training_quantization/tensorflow/mobilenet_v2/main.py +++ b/examples/post_training_quantization/tensorflow/mobilenet_v2/main.py @@ -35,7 +35,7 @@ def validate(model: ov.Model, val_loader: tf.data.Dataset) -> tf.Tensor: metric = tf.keras.metrics.CategoricalAccuracy(name="acc@1") for images, labels in tqdm(val_loader): - pred = compiled_model(images)[output] + pred = compiled_model(images.numpy())[output] metric.update_state(labels, pred) return metric.result() @@ -114,18 +114,16 @@ def preprocess_for_eval(image, label): val_dataset = val_dataset.map(preprocess_for_eval).batch(128) weights_path = data_utils.get_file("mobilenet_v2_imagenette_weights.h5", WEIGHTS_URL, cache_subdir="models") -model = tf.keras.applications.MobileNetV2(weights=weights_path, classes=DATASET_CLASSES) +tf_model = tf.keras.applications.MobileNetV2(weights=weights_path, classes=DATASET_CLASSES) ############################################################################### # Quantize a Tensorflow model - -""" -The transformation function transforms a data item into model input data. - -To validate the transform function use the following code: ->> for data_item in val_loader: ->> model(transform_fn(data_item)) -""" +# +# The transformation function transforms a data item into model input data. +# +# To validate the transform function use the following code: +# >> for data_item in val_loader: +# >> model(transform_fn(data_item)) def transform_fn(data_item): @@ -133,25 +131,24 @@ def transform_fn(data_item): return images -""" -The calibration dataset is a small, no label, representative dataset -(~100-500 samples) that is used to estimate the range, i.e. (min, max) of all -floating point activation tensors in the model, to initialize the quantization -parameters. +# The calibration dataset is a small, no label, representative dataset +# (~100-500 samples) that is used to estimate the range, i.e. (min, max) of all +# floating point activation tensors in the model, to initialize the quantization +# parameters. +# +# The easiest way to define a calibration dataset is to use a training or +# validation dataset and a transformation function to remove labels from the data +# item and prepare model input data. The quantize method uses a small subset +# (default: 300 samples) of the calibration dataset. -The easiest way to define a calibration dataset is to use a training or -validation dataset and a transformation function to remove labels from the data -item and prepare model input data. The quantize method uses a small subset -(default: 300 samples) of the calibration dataset. -""" calibration_dataset = nncf.Dataset(val_dataset, transform_fn) -quantized_model = nncf.quantize(model, calibration_dataset) +tf_quantized_model = nncf.quantize(tf_model, calibration_dataset) ############################################################################### # Benchmark performance, calculate compression rate and validate accuracy -ov_model = mo.convert_model(model) -ov_quantized_model = mo.convert_model(quantized_model) +ov_model = mo.convert_model(tf_model) +ov_quantized_model = mo.convert_model(tf_quantized_model) fp32_ir_path = f"{ROOT}/mobilenet_v2_fp32.xml" ov.serialize(ov_model, fp32_ir_path) diff --git a/examples/post_training_quantization/tensorflow/mobilenet_v2/requirements.txt b/examples/post_training_quantization/tensorflow/mobilenet_v2/requirements.txt index 39300c5f1bb..1aa43b1667f 100644 --- a/examples/post_training_quantization/tensorflow/mobilenet_v2/requirements.txt +++ b/examples/post_training_quantization/tensorflow/mobilenet_v2/requirements.txt @@ -1,4 +1,4 @@ -tensorflow~=2.11.1 +tensorflow>=2.7.0,<2.12 tensorflow-datasets tqdm -openvino-dev~=2022.3.0.dev +openvino-dev==2023.0.1 diff --git a/examples/post_training_quantization/torch/mobilenet_v2/README.md b/examples/post_training_quantization/torch/mobilenet_v2/README.md index 7e9ecf43de1..fbf2ecbe8ea 100644 --- a/examples/post_training_quantization/torch/mobilenet_v2/README.md +++ b/examples/post_training_quantization/torch/mobilenet_v2/README.md @@ -1,25 +1,30 @@ # Post-Training Quantization of MobileNet v2 PyTorch Model -This example demonstrates how to use Post-Training Quantization API from Neural Network Compression Framework (NNCF) to quantize PyTorch models on the example of [MobileNet v2](https://huggingface.co/alexsu52/mobilenet_v2_imagenette) quantization, pretrained on [Imagenette](https://github.com/fastai/imagenette) dataset. +This example demonstrates how to use Post-Training Quantization API from Neural Network Compression Framework (NNCF) to quantize PyTorch models on the example of [MobileNet v2](https://huggingface.co/alexsu52/mobilenet_v2_imagenette) quantization, pretrained on [Imagenette](https://github.com/fastai/imagenette) dataset. The example includes the following steps: + - Loading the [Imagenette](https://github.com/fastai/imagenette) dataset (~340 Mb) and the [MobileNet v2 PyTorch model](https://huggingface.co/alexsu52/mobilenet_v2_imagenette) pretrained on this dataset. - Quantizing the model using NNCF Post-Training Quantization algorithm. - Output of the following characteristics of the quantized model: - - Accuracy drop of the quantized model (INT8) over the pre-trained model (FP32) - - Compression rate of the quantized model file size relative to the pre-trained model file size - - Performance speed up of the quantized model (INT8) + - Accuracy drop of the quantized model (INT8) over the pre-trained model (FP32) + - Compression rate of the quantized model file size relative to the pre-trained model file size + - Performance speed up of the quantized model (INT8) + +## Install requirements -# Install requirements At this point it is assumed that you have already installed NNCF. You can find information on installation NNCF [here](https://github.com/openvinotoolkit/nncf#user-content-installation). To work with the example you should install the corresponding Python package dependencies: -``` + +```bash pip install -r requirements.txt ``` -# Run Example +## Run Example + It's pretty simple. The example does not require additional preparation. It will do the preparation itself, such as loading the dataset and model, etc. -``` + +```bash python main.py -``` \ No newline at end of file +``` diff --git a/examples/post_training_quantization/torch/mobilenet_v2/main.py b/examples/post_training_quantization/torch/mobilenet_v2/main.py index acb7c9844ce..9297d5cf94f 100644 --- a/examples/post_training_quantization/torch/mobilenet_v2/main.py +++ b/examples/post_training_quantization/torch/mobilenet_v2/main.py @@ -107,22 +107,20 @@ def get_model_size(ir_path: str, m_type: str = "Mb", verbose: bool = True) -> fl ] ), ) -val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=128, num_workers=4, shuffle=False) +val_data_loader = torch.utils.data.DataLoader(val_dataset, batch_size=128, num_workers=4, shuffle=False) -model = models.mobilenet_v2(num_classes=DATASET_CLASSES) -model.eval() -model = load_checkpoint(model) +torch_model = models.mobilenet_v2(num_classes=DATASET_CLASSES) +torch_model.eval() +torch_model = load_checkpoint(torch_model) ############################################################################### # Quantize a PyTorch model -""" -The transformation function transforms a data item into model input data. - -To validate the transform function use the following code: ->> for data_item in val_loader: ->> model(transform_fn(data_item)) -""" +# The transformation function transforms a data item into model input data. +# +# To validate the transform function use the following code: +# >> for data_item in val_loader: +# >> model(transform_fn(data_item)) def transform_fn(data_item): @@ -130,25 +128,49 @@ def transform_fn(data_item): return images -""" -The calibration dataset is a small, no label, representative dataset -(~100-500 samples) that is used to estimate the range, i.e. (min, max) of all -floating point activation tensors in the model, to initialize the quantization -parameters. +# The calibration dataset is a small, no label, representative dataset +# (~100-500 samples) that is used to estimate the range, i.e. (min, max) of all +# floating point activation tensors in the model, to initialize the quantization +# parameters. + +# The easiest way to define a calibration dataset is to use a training or +# validation dataset and a transformation function to remove labels from the data +# item and prepare model input data. The quantize method uses a small subset +# (default: 300 samples) of the calibration dataset. -The easiest way to define a calibration dataset is to use a training or -validation dataset and a transformation function to remove labels from the data -item and prepare model input data. The quantize method uses a small subset -(default: 300 samples) of the calibration dataset. -""" -calibration_dataset = nncf.Dataset(val_loader, transform_fn) -quantized_model = nncf.quantize(model, calibration_dataset) +calibration_dataset = nncf.Dataset(val_data_loader, transform_fn) +torch_quantized_model = nncf.quantize( + torch_model, + calibration_dataset, + advanced_parameters=nncf.AdvancedQuantizationParameters(disable_bias_correction=True), +) ############################################################################### # Benchmark performance, calculate compression rate and validate accuracy -ov_model = mo.convert_model(model.cpu(), input_shape=[-1, 3, 224, 224]) -ov_quantized_model = mo.convert_model(quantized_model.cpu(), input_shape=[-1, 3, 224, 224]) +dummy_input = torch.randn(1, 3, 224, 224) + +fp32_onnx_path = f"{ROOT}/mobilenet_v2_fp32.onnx" +torch.onnx.export( + torch_model.cpu(), + dummy_input, + fp32_onnx_path, + input_names=["input"], + output_names=["output"], + dynamic_axes={"input": {0: "-1"}}, +) +ov_model = mo.convert_model(fp32_onnx_path) + +int8_onnx_path = f"{ROOT}/mobilenet_v2_int8.onnx" +torch.onnx.export( + torch_quantized_model.cpu(), + dummy_input, + int8_onnx_path, + input_names=["input"], + output_names=["output"], + dynamic_axes={"input": {0: "-1"}}, +) +ov_quantized_model = mo.convert_model(int8_onnx_path) fp32_ir_path = f"{ROOT}/mobilenet_v2_fp32.xml" ov.serialize(ov_model, fp32_ir_path) @@ -166,11 +188,11 @@ def transform_fn(data_item): int8_fps = run_benchmark(int8_ir_path, shape=[1, 3, 224, 224], verbose=True) print("[5/7] Validate OpenVINO FP32 model:") -fp32_top1 = validate(ov_model, val_loader) +fp32_top1 = validate(ov_model, val_data_loader) print(f"Accuracy @ top1: {fp32_top1:.3f}") print("[6/7] Validate OpenVINO INT8 model:") -int8_top1 = validate(ov_quantized_model, val_loader) +int8_top1 = validate(ov_quantized_model, val_data_loader) print(f"Accuracy @ top1: {int8_top1:.3f}") print("[7/7] Report:") diff --git a/examples/post_training_quantization/torch/mobilenet_v2/requirements.txt b/examples/post_training_quantization/torch/mobilenet_v2/requirements.txt index 2efd33ec900..aabb54f6d66 100644 --- a/examples/post_training_quantization/torch/mobilenet_v2/requirements.txt +++ b/examples/post_training_quantization/torch/mobilenet_v2/requirements.txt @@ -1,5 +1,6 @@ -torchvision~=0.14 +torchvision>=0.10.1,<0.16 tqdm scikit-learn fastdownload -openvino-dev~=2022.3.0.dev +openvino-dev==2023.0.1 +onnx diff --git a/examples/post_training_quantization/torch/ssd300_vgg16/README.md b/examples/post_training_quantization/torch/ssd300_vgg16/README.md index d50cd265d21..a4e57c3c469 100644 --- a/examples/post_training_quantization/torch/ssd300_vgg16/README.md +++ b/examples/post_training_quantization/torch/ssd300_vgg16/README.md @@ -1,27 +1,32 @@ # Post-Training Quantization of SSD PyTorch Model -This example demonstrates how to use Post-Training Quantization API from Neural Network Compression Framework (NNCF) to quantize PyTorch models on the example of [SSD300_VGG16](https://pytorch.org/vision/main/models/generated/torchvision.models.detection.ssd300_vgg16.html) from torchvision library. +This example demonstrates how to use Post-Training Quantization API from Neural Network Compression Framework (NNCF) to quantize PyTorch models on the example of [SSD300_VGG16](https://pytorch.org/vision/main/models/generated/torchvision.models.detection.ssd300_vgg16.html) from torchvision library. The example includes the following steps: + - Loading the [COCO128](https://www.kaggle.com/datasets/ultralytics/coco128) dataset (~7 Mb). - Loading [SSD300_VGG16](https://pytorch.org/vision/main/models/generated/torchvision.models.detection.ssd300_vgg16.html) from torchvision pretrained on the full COCO dataset. - Patching some internal methods with `no_nncf_trace` context so that the model graph is traced properly by NNCF. - Quantizing the model using NNCF Post-Training Quantization algorithm. - Output of the following characteristics of the quantized model: - - Accuracy drop of the quantized model (INT8) over the pre-trained model (FP32). - - Compression rate of the quantized model file size relative to the pre-trained model file size. - - Performance speed up of the quantized model (INT8). + - Accuracy drop of the quantized model (INT8) over the pre-trained model (FP32). + - Compression rate of the quantized model file size relative to the pre-trained model file size. + - Performance speed up of the quantized model (INT8). + +## Install requirements -# Install requirements At this point it is assumed that you have already installed NNCF. You can find information on installation NNCF [here](https://github.com/openvinotoolkit/nncf#user-content-installation). To work with the example you should install the corresponding Python package dependencies: -``` + +```bash pip install -r requirements.txt ``` -# Run Example +## Run Example + The example does not require any additional preparation, just run: -``` + +```bash python main.py -``` \ No newline at end of file +``` diff --git a/examples/post_training_quantization/torch/ssd300_vgg16/main.py b/examples/post_training_quantization/torch/ssd300_vgg16/main.py index 97835fe40f7..c90ee304e2c 100644 --- a/examples/post_training_quantization/torch/ssd300_vgg16/main.py +++ b/examples/post_training_quantization/torch/ssd300_vgg16/main.py @@ -14,6 +14,10 @@ import subprocess from pathlib import Path +# nncf.torch must be imported before torchvision +import nncf +from nncf.torch import disable_tracing + import openvino.runtime as ov import torch import torchvision @@ -25,9 +29,6 @@ from torchvision.models.detection.ssd import GeneralizedRCNNTransform from tqdm import tqdm -import nncf -from nncf.torch import disable_tracing - ROOT = Path(__file__).parent.resolve() DATASET_URL = "https://ultralytics.com/assets/coco128.zip" DATASET_PATH = "~/.cache/nncf/datasets" @@ -67,87 +68,10 @@ def run_benchmark(model_path: str, shape=None, verbose: bool = True) -> float: class COCO128Dataset(torch.utils.data.Dataset): category_mapping = [ - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 13, - 14, - 15, - 16, - 17, - 18, - 19, - 20, - 21, - 22, - 23, - 24, - 25, - 27, - 28, - 31, - 32, - 33, - 34, - 35, - 36, - 37, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 46, - 47, - 48, - 49, - 50, - 51, - 52, - 53, - 54, - 55, - 56, - 57, - 58, - 59, - 60, - 61, - 62, - 63, - 64, - 65, - 67, - 70, - 72, - 73, - 74, - 75, - 76, - 77, - 78, - 79, - 80, - 81, - 82, - 84, - 85, - 86, - 87, - 88, - 89, - 90, - ] + 1,2,3,4,5,6,7,8,9,10,11,13,14,15,16,17,18,19,20,21,22,23,24,25,27,28,31,32,33, + 34,35,36,37,38,39,40,41,42,43,44,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60, + 61,62,63,64,65,67,70,72,73,74,75,76,77,78,79,80,81,82,84,85,86,87,88,89,90 + ] # fmt: skip def __init__(self, data_path, transform): super().__init__() @@ -165,7 +89,7 @@ def __getitem__(self, item): target = dict(image_id=[image_id], boxes=[], labels=[]) label_filepath = self.labels_path / f"{image_id:012d}.txt" if label_filepath.exists(): - with open(label_filepath, "r") as f: + with open(label_filepath, "r", encoding="utf-8") as f: for box_descr in f.readlines(): category_id, rel_x, rel_y, rel_w, rel_h = tuple(map(float, box_descr.split(" "))) box_x1, box_y1 = img_w * (rel_x - rel_w / 2), img_h * (rel_y - rel_h / 2) @@ -173,8 +97,12 @@ def __getitem__(self, item): target["boxes"].append((box_x1, box_y1, box_x2, box_y2)) target["labels"].append(self.category_mapping[int(category_id)]) - for k in target.keys(): - target[k] = torch.as_tensor(target[k], dtype=torch.float32 if k == "boxes" else torch.int64) + target_copy = {} + target_keys = target.keys() + for k in target_keys: + target_copy[k] = torch.as_tensor(target[k], dtype=torch.float32 if k == "boxes" else torch.int64) + target = target_copy + img, target = self.transform(img, target) return img, target @@ -211,7 +139,7 @@ def main(): # Get the pretrained ssd300_vgg16 model from torchvision.models model = torchvision.models.get_model("ssd300_vgg16", weights=weights_name) - device = torch.device("cuda") if torch.cuda.is_available() else torch.cpu + device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu") model.to(device) model.eval() @@ -224,9 +152,15 @@ def main(): quantized_model = nncf.quantize(model, calibration_dataset) # Convert to OpenVINO - input_shape = [3, 480, 480] - ov_model = mo.convert_model(model.cpu(), input_shape=[-1] + input_shape) - ov_quantized_model = mo.convert_model(quantized_model.cpu(), input_shape=[-1] + input_shape) + dummy_input = torch.randn(1, 3, 480, 480) + + fp32_onnx_path = f"{ROOT}/ssd300_vgg16_fp32.onnx" + torch.onnx.export(model.cpu(), dummy_input, fp32_onnx_path) + ov_model = mo.convert_model(fp32_onnx_path) + + int8_onnx_path = f"{ROOT}/ssd300_vgg16_int8.onnx" + torch.onnx.export(quantized_model.cpu(), dummy_input, int8_onnx_path) + ov_quantized_model = mo.convert_model(int8_onnx_path) fp32_ir_path = f"{ROOT}/ssd300_vgg16_fp32.xml" ov.serialize(ov_model, fp32_ir_path) @@ -239,9 +173,9 @@ def main(): int8_model_size = get_model_size(int8_ir_path, verbose=True) print("[3/7] Benchmark FP32 model:") - fp32_fps = run_benchmark(fp32_ir_path, shape=[1] + input_shape, verbose=True) + fp32_fps = run_benchmark(fp32_ir_path, verbose=True) print("[4/7] Benchmark INT8 model:") - int8_fps = run_benchmark(int8_ir_path, shape=[1] + input_shape, verbose=True) + int8_fps = run_benchmark(int8_ir_path, verbose=True) print("[5/7] Validate FP32 model:") torch.backends.cudnn.deterministic = True @@ -258,6 +192,8 @@ def main(): # https://docs.openvino.ai/latest/openvino_docs_optimization_guide_dldt_optimization_guide.html print(f"Performance speed up (throughput mode): {int8_fps / fp32_fps:.3f}") + return fp32_map, int8_map, fp32_fps, int8_fps, fp32_model_size, int8_model_size + if __name__ == "__main__": main() diff --git a/examples/post_training_quantization/torch/ssd300_vgg16/requirements.txt b/examples/post_training_quantization/torch/ssd300_vgg16/requirements.txt index ee37539a575..27d416bc987 100644 --- a/examples/post_training_quantization/torch/ssd300_vgg16/requirements.txt +++ b/examples/post_training_quantization/torch/ssd300_vgg16/requirements.txt @@ -1,5 +1,7 @@ fastdownload -openvino-dev -torchmetrics -torchvision>=0.14.1 -tqdm \ No newline at end of file +openvino-dev==2023.0.1 +torchmetrics==1.0.1 +pycocotools +torchvision~=0.15.1 +tqdm +onnx diff --git a/examples/tensorflow/classification/README.md b/examples/tensorflow/classification/README.md index 9e1eaabe4fc..64f5c30cf88 100644 --- a/examples/tensorflow/classification/README.md +++ b/examples/tensorflow/classification/README.md @@ -15,7 +15,7 @@ At this point it is assumed that you have already installed nncf. You can find i To work with the sample you should install the corresponding Python package dependencies: -``` +```bash pip install -r examples/tensorflow/requirements.txt ``` @@ -34,6 +34,7 @@ Please read the following [guide](https://www.tensorflow.org/datasets/overview) For the [ImageNet](http://www.image-net.org/challenges/LSVRC/2012/) dataset, TFDS requires a manual download. Please refer to the [TFDS ImageNet Readme](https://www.tensorflow.org/datasets/catalog/imagenet2012) for download instructions. The TFDS ImageNet dataset should be specified in the configuration file as follows: + ```json "dataset": "imagenet2012", "dataset_type": "tfds" @@ -43,6 +44,7 @@ The TFDS ImageNet dataset should be specified in the configuration file as follo To download the [ImageNet](http://www.image-net.org/challenges/LSVRC/2012/) dataset and convert it to [TFRecord](https://www.tensorflow.org/tutorials/load_data/tfrecord) format, refer to the following [tutorial](https://github.com/tensorflow/models/tree/master/research/slim#Data). The ImageNet dataset in TFRecords format should be specified in the configuration file as follows: + ```json "dataset": "imagenet2012", "dataset_type": "tfrecords" @@ -58,6 +60,7 @@ The ImageNet dataset in TFRecords format should be specified in the configuratio Before compressing a model, it is highly recommended checking the accuracy of the pretrained model. All models which are supported in the sample has pretrained weights for ImageNet. To load pretrained weights into a model and then evaluate the accuracy of that model, make sure that the pretrained=True option is set in the configuration file and use the following command: + ```bash python main.py \ --mode=test \ @@ -69,13 +72,15 @@ python main.py \ #### Compress Pretrained Model Run the following command to start compression with fine-tuning on all available GPUs on the machine: - ```bash - python main.py \ - --mode=train \ - --config=configs/quantization/mobilenet_v2_imagenet_int8.json \ - --data= \ - --log-dir=../../results/quantization/mobilenet_v2_int8 - ``` + +```bash +python main.py \ +--mode=train \ +--config=configs/quantization/mobilenet_v2_imagenet_int8.json \ +--data= \ +--log-dir=../../results/quantization/mobilenet_v2_int8 +``` + It may take a few epochs to get the baseline accuracy results. Use the `--resume` flag with the path to the checkpoint to resume training from the defined checkpoint or folder with checkpoints to resume training from the last checkpoint. @@ -83,6 +88,7 @@ Use the `--resume` flag with the path to the checkpoint to resume training from ### Validate Your Model Checkpoint To estimate the test scores of your trained model checkpoint, use the following command: + ```bash python main.py \ --mode=test \ @@ -94,6 +100,7 @@ python main.py \ ### Export Compressed Model To export trained model to the **Frozen Graph**, use the following command: + ```bash python main.py \ --mode=export \ @@ -103,6 +110,7 @@ python main.py \ ``` To export trained model to the **SavedModel**, use the following command: + ```bash python main.py \ --mode=export \ @@ -112,6 +120,7 @@ python main.py \ ``` To export trained model to the **Keras H5**, use the following command: + ```bash python main.py \ --mode=export \ @@ -124,43 +133,6 @@ python main.py \ To export a model to the OpenVINO IR and run it using the Intel® Deep Learning Deployment Toolkit, refer to this [tutorial](https://software.intel.com/en-us/openvino-toolkit). -### Results - - -|Model|Compression algorithm|Dataset|Accuracy (_drop_) %|NNCF config file|Checkpoint| -| :---: | :---: | :---: | :---: | :---: | :---: | -|Inception V3|None|ImageNet|77.91|[inception_v3_imagenet.json](configs/inception_v3_imagenet.json)|-| -|Inception V3|INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations)|ImageNet|78.39 (-0.48)|[inception_v3_imagenet_int8.json](configs/quantization/inception_v3_imagenet_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/inception_v3_imagenet_int8.tar.gz)| -|Inception V3|INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations), Sparsity 61% (RB)|ImageNet|77.52 (0.39)|[inception_v3_imagenet_rb_sparsity_int8.json](configs/sparsity_quantization/inception_v3_imagenet_rb_sparsity_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/inception_v3_imagenet_rb_sparsity_int8.tar.gz)| -|Inception V3|Sparsity 54% (Magnitude)|ImageNet|77.86 (0.05)|[inception_v3_imagenet_magnitude_sparsity.json](configs/sparsity/inception_v3_imagenet_magnitude_sparsity.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/inception_v3_imagenet_magnitude_sparsity.tar.gz)| -|MobileNet V2|None|ImageNet|71.85|[mobilenet_v2_imagenet.json](configs/mobilenet_v2_imagenet.json)|-| -|MobileNet V2|INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations)|ImageNet|71.63 (0.22)|[mobilenet_v2_imagenet_int8.json](configs/quantization/mobilenet_v2_imagenet_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/mobilenet_v2_imagenet_int8.tar.gz)| -|MobileNet V2|INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations), Sparsity 52% (RB)|ImageNet|70.94 (0.91)|[mobilenet_v2_imagenet_rb_sparsity_int8.json](configs/sparsity_quantization/mobilenet_v2_imagenet_rb_sparsity_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/mobilenet_v2_imagenet_rb_sparsity_int8.tar.gz)| -|MobileNet V2| Sparsity 50% (RB)|ImageNet|71.34 (0.51)|[mobilenet_v2_imagenet_rb_sparsity.json](configs/sparsity/mobilenet_v2_imagenet_rb_sparsity.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/mobilenet_v2_imagenet_rb_sparsity.tar.gz)| -|MobileNet V2 (TensorFlow Hub MobileNet V2)|Sparsity 35% (Magnitude)|ImageNet|71.87 (-0.02)|[mobilenet_v2_hub_imagenet_magnitude_sparsity.json](configs/sparsity/mobilenet_v2_hub_imagenet_magnitude_sparsity.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/mobilenet_v2_hub_imagenet_magnitude_sparsity.tar.gz)| -|MobileNet V3 (Small)|None|ImageNet|68.38|[mobilenet_v3_small_imagenet.json](configs/mobilenet_v3_small_imagenet.json)|-| -|MobileNet V3 (Small)|INT8 (per-channel symmetric for weights, per-tensor asymmetric half-range for activations)|ImageNet|67.79 (0.59)|[mobilenet_v3_small_imagenet_int8.json](configs/quantization/mobilenet_v3_small_imagenet_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/mobilenet_v3_small_imagenet_int8.tar.gz)| -|MobileNet V3 (Small)|INT8 (per-channel symmetric for weights, per-tensor asymmetric half-range for activations) + Sparsity 42% (Magnitude)|ImageNet|67.44 (0.94)|[mobilenet_v3_small_imagenet_rb_sparsity_int8.json](configs/sparsity_quantization/mobilenet_v3_small_imagenet_rb_sparsity_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/mobilenet_v3_small_imagenet_rb_sparsity_int8.tar.gz)| -|MobileNet V3 (Large)|None|ImageNet|75.80|[mobilenet_v3_large_imagenet.json](configs/mobilenet_v3_large_imagenet.json)|-| -|MobileNet V3 (Large)|INT8 (per-channel symmetric for weights, per-tensor asymmetric half-range for activations)|ImageNet|75.04 (0.76)|[mobilenet_v3_large_imagenet_int8.json](configs/quantization/mobilenet_v3_large_imagenet_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/mobilenet_v3_large_imagenet_int8.tar.gz)| -|MobileNet V3 (Large)|INT8 (per-channel symmetric for weights, per-tensor asymmetric half-range for activations) + Sparsity 42% (RB)|ImageNet|75.24 (0.56)|[mobilenet_v3_large_imagenet_rb_sparsity_int8.json](configs/sparsity_quantization/mobilenet_v3_large_imagenet_rb_sparsity_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/mobilenet_v3_large_imagenet_rb_sparsity_int8.tar.gz)| -|ResNet-50|None|ImageNet|75.05|[resnet50_imagenet.json](configs/resnet50_imagenet.json)|-| -|ResNet-50|INT8|ImageNet|74.99 (0.06)|[resnet50_imagenet_int8.json](configs/quantization/resnet50_imagenet_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/resnet50_imagenet_int8.tar.gz)| -|ResNet-50|INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations) + Sparsity 65% (RB)|ImageNet|74.36 (0.69)|[resnet50_imagenet_rb_sparsity_int8.json](configs/sparsity_quantization/resnet50_imagenet_rb_sparsity_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/resnet50_imagenet_rb_sparsity_int8.tar.gz)| -|ResNet-50|Sparsity 80% (RB)|ImageNet|74.38 (0.67)|[resnet50_imagenet_rb_sparsity.json](configs/sparsity/resnet50_imagenet_rb_sparsity.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/resnet50_imagenet_rb_sparsity.tar.gz)| - -#### Results for filter pruning - - -|Model|Compression algorithm|Dataset|Accuracy (_drop_) %|NNCF config file|Checkpoint| -| :---: | :---: | :---: | :---: | :---: | :---: | -|ResNet-50|None|ImageNet|75.05|[resnet50_imagenet.json](configs/resnet50_imagenet.json)|-| -|ResNet-50|Filter pruning, 40%, geometric median criterion|ImageNet|74.96 (0.09)|[resnet50_imagenet_pruning_geometric_median.json](configs/pruning/resnet50_imagenet_pruning_geometric_median.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/resnet50_imagenet_pruning_geometric_median.tar.gz)| -|ResNet-50|INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations) + Filter pruning, 40%, geometric median criterion|ImageNet|75.09 (-0.04)|[resnet50_imagenet_pruning_geometric_median_int8.json](configs/pruning_quantization/resnet50_imagenet_pruning_geometric_median_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/resnet50_imagenet_pruning_geometric_median_int8.tar.gz)| - -#### Results for accuracy-aware compressed training - - -|**Model**|**Compression algorithm**|**Dataset**|**Accuracy (Drop) %**|**NNCF config file**| -| :---: | :---: | :---: | :---: | :---: | -|ResNet50|Sparsity 65% (magnitude)|ImageNet|74.37 (0.67)|[resnet50_imagenet_magnitude_sparsity_accuracy_aware.json](configs/sparsity/resnet50_imagenet_magnitude_sparsity_accuracy_aware.json)| +## Results + +Please see compression results for Tensorflow classification at our [Model Zoo page](../../../docs/ModelZoo.md#tensorflow-classification). diff --git a/examples/tensorflow/classification/main.py b/examples/tensorflow/classification/main.py index 492fba034c9..b6e63e81fee 100644 --- a/examples/tensorflow/classification/main.py +++ b/examples/tensorflow/classification/main.py @@ -178,6 +178,11 @@ def run(config): if resume_training: compression_state = load_compression_state(config.ckpt_path) + if "train" in config.mode and is_accuracy_aware_training(config): + uncompressed_model_accuracy = get_model_accuracy( + model_fn, model_params, nncf_config, validation_dataset, validation_steps + ) + with TFModelManager(model_fn, nncf_config, **model_params) as model: with strategy.scope(): compression_ctrl, compress_model = create_compressed_model(model, nncf_config, compression_state) @@ -239,6 +244,7 @@ def run(config): statistics = compress_model.accuracy_aware_fit( train_dataset, compression_ctrl, + uncompressed_model_accuracy=uncompressed_model_accuracy, nncf_config=config.nncf_config, callbacks=callbacks, initial_epoch=initial_epoch, diff --git a/examples/tensorflow/common/object_detection/architecture/darknet.py b/examples/tensorflow/common/object_detection/architecture/darknet.py index f9c728e18dc..72a4083657d 100644 --- a/examples/tensorflow/common/object_detection/architecture/darknet.py +++ b/examples/tensorflow/common/object_detection/architecture/darknet.py @@ -27,7 +27,9 @@ def DarknetConv2D_BN_Mish(self, *args, **kwargs): no_bias_kwargs.update(kwargs) return nn_ops.compose( nn_ops.DarknetConv2D(*args, **no_bias_kwargs), + # TODO(nsavelyev) replace by BatchNormalization(synchronized=True) once support for TF < 2.12 is dropped tf.keras.layers.experimental.SyncBatchNormalization(), + # TODO(nsavelyev) change to tf.keras.activations.mish after upgrade to TF 2.13 tf.keras.layers.Activation(self.mish), ) diff --git a/examples/tensorflow/common/object_detection/utils/yolo_v4_utils.py b/examples/tensorflow/common/object_detection/utils/yolo_v4_utils.py index 570dd5bad4c..b527130a850 100644 --- a/examples/tensorflow/common/object_detection/utils/yolo_v4_utils.py +++ b/examples/tensorflow/common/object_detection/utils/yolo_v4_utils.py @@ -49,7 +49,7 @@ def letterbox_resize(image, target_size): offset = (dx, dy) # create letterbox resized image - image = image.resize(padding_size, Image.BICUBIC) + image = image.resize(padding_size, Image.Resampling.BICUBIC) new_image = Image.new("RGB", target_size, (128, 128, 128)) new_image.paste(image, offset) @@ -91,7 +91,7 @@ def random_resize_crop_pad(image, target_size, aspect_ratio_jitter=0.1, scale_ji padding_w = int(rand_scale * target_w) padding_h = int(padding_w / rand_aspect_ratio) padding_size = (padding_w, padding_h) - image = image.resize(padding_size, Image.BICUBIC) + image = image.resize(padding_size, Image.Resampling.BICUBIC) # get random offset in padding image dx = int(rand(0, target_w - padding_w)) @@ -170,7 +170,7 @@ def random_horizontal_flip(image, prob=0.5): """ flip = rand() < prob if flip: - image = image.transpose(Image.FLIP_LEFT_RIGHT) + image = image.transpose(Image.Transpose.FLIP_LEFT_RIGHT) return image, flip diff --git a/examples/tensorflow/common/utils.py b/examples/tensorflow/common/utils.py index 11bb127114a..7e93d579d49 100644 --- a/examples/tensorflow/common/utils.py +++ b/examples/tensorflow/common/utils.py @@ -9,8 +9,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import atexit -import datetime import json import os import random @@ -21,7 +19,6 @@ import numpy as np import tensorflow as tf -from tensorflow.python.distribute.mirrored_strategy import MirroredStrategy from examples.common.sample_config import CustomArgumentParser from examples.common.sample_config import SampleConfig diff --git a/examples/tensorflow/object_detection/README.md b/examples/tensorflow/object_detection/README.md index 5a0cb4ca0a6..dd1adc4a608 100644 --- a/examples/tensorflow/object_detection/README.md +++ b/examples/tensorflow/object_detection/README.md @@ -6,7 +6,7 @@ The sample receives a configuration file where the training schedule, hyper-para ## Features -- RetinaNet from the official [TF repository](https://github.com/tensorflow/models/tree/master/official/vision/detection) with minor modifications (custom implementation of upsamling is replaced with equivalent tf.keras.layers.UpSampling2D). YOLOv4 from the [keras-YOLOv3-model-set](https://github.com/david8862/keras-YOLOv3-model-set) repository. +- RetinaNet from the official [TF repository](https://github.com/tensorflow/models/tree/master/official/vision/detection) with minor modifications (custom implementation of upsampling is replaced with equivalent tf.keras.layers.UpSampling2D). YOLOv4 from the [keras-YOLOv3-model-set](https://github.com/david8862/keras-YOLOv3-model-set) repository. - Support [TensorFlow Datasets (TFDS)](https://www.tensorflow.org/datasets) and TFRecords for COCO2017 dataset. - Configuration file examples for sparsity, quantization, filter pruning and quantization with sparsity. - Export to Frozen Graph or TensorFlow SavedModel that is supported by the OpenVINO™ toolkit. @@ -18,7 +18,7 @@ At this point it is assumed that you have already installed nncf. You can find i To work with the sample you should install the corresponding Python package dependencies: -``` +```bash pip install -r examples/tensorflow/requirements.txt ``` @@ -66,33 +66,35 @@ The [COCO2017](https://cocodataset.org/) dataset in TFRecords format should be s - If you did not install the package, add the repository root folder to the `PYTHONPATH` environment variable. - Go to the `examples/tensorflow/object_detection` folder. -- Download the pre-trained weights in H5 format and provide the path to them using `--weights` flag. The link to the -archive with pre-trained weights can be found in the `TensorFlow checkpoint` column of the [results](#results) table. -Select the checkpoint corresponding to the `None` compression algorithm, which includes the pre-trained weights for the -FP32 model, without applying any compression algorithms. +- Download the pre-trained weights in H5 format for either [RetinaNet](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.6.0/tensorflow/retinanet_coco.tar.gz) or [YOLOv4](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.6.0/tensorflow/yolo_v4_coco.tar.gz) and provide the path to them using `--weights` flag. - (Optional) Before compressing a model, it is highly recommended checking the accuracy of the pretrained model, use the following command: - ```bash - python main.py \ - --mode=test \ - --config=configs/quantization/retinanet_coco_int8.json \ - --weights= - --data= \ - --disable-compression - ``` + + ```bash + python main.py \ + --mode=test \ + --config=configs/quantization/retinanet_coco_int8.json \ + --weights= \ + --data= \ + --disable-compression + ``` + - Run the following command to start compression with fine-tuning on all available GPUs on the machine: + ```bash python main.py \ --mode=train \ --config=configs/quantization/retinanet_coco_int8.json \ - --weights= + --weights= \ --data= \ --log-dir=../../results/quantization/retinanet_coco_int8 ``` + - Use the `--resume` flag with the path to the checkpoint to resume training from the defined checkpoint or folder with checkpoints to resume training from the last checkpoint. ### Validate Your Model Checkpoint To estimate the test scores of your trained model checkpoint, use the following command: + ```bash python main.py \ --mode=test \ @@ -104,6 +106,7 @@ python main.py \ ### Export Compressed Model To export trained model to the **Frozen Graph**, use the following command: + ```bash python main.py \ --mode=export \ @@ -113,6 +116,7 @@ python main.py \ ``` To export trained model to the **SavedModel**, use the following command: + ```bash python main.py \ --mode=export \ @@ -122,6 +126,7 @@ python main.py \ ``` To export trained model to the **Keras H5**, use the following command: + ```bash python main.py \ --mode=export \ @@ -131,7 +136,9 @@ python main.py \ ``` ### Save Checkpoint without Optimizer + To reduce memory footprint (if no further training is scheduled) it is useful to save the checkpoint without optimizer. Use the following command: + ```bash python ../common/prepare_checkpoint.py \ --config=configs/quantization/retinanet_coco_int8.json \ @@ -144,10 +151,12 @@ python ../common/prepare_checkpoint.py \ To export a model to the OpenVINO IR and run it using the Intel® Deep Learning Deployment Toolkit, refer to this [tutorial](https://software.intel.com/en-us/openvino-toolkit). ## Train RetinaNet from scratch + - Download pre-trained ResNet-50 checkpoint from [here](https://storage.cloud.google.com/cloud-tpu-checkpoints/model-garden-vision/detection/resnet50-2018-02-07.tar.gz). - If you did not install the package, add the repository root folder to the `PYTHONPATH` environment variable. - Go to the `examples/tensorflow/object_detection` folder. - Run the following command to start training RetinaNet from scratch on all available GPUs on the machine: + ```bash python main.py \ --mode=train \ @@ -155,25 +164,10 @@ To export a model to the OpenVINO IR and run it using the Intel® Deep Learning --data= \ --log-dir=../../results/quantization/retinanet_coco_baseline \ --backbone-checkpoint= + ``` + - Export trained model to the Keras H5 format. ## Results - - -|Model|Compression algorithm|Dataset|mAP (_drop_) %|NNCF config file|Checkpoint| -| :---: | :---: | :---: | :---: | :---: | :---: | -|RetinaNet|None|COCO 2017|33.43|[retinanet_coco.json](configs/retinanet_coco.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/retinanet_coco.tar.gz)| -|RetinaNet|INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations)|COCO 2017|33.12 (0.31)|[retinanet_coco_int8.json](configs/quantization/retinanet_coco_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/retinanet_coco_int8.tar.gz)| -|RetinaNet|Magnitude sparsity (50%)|COCO 2017|33.10 (0.33)|[retinanet_coco_magnitude_sparsity.json](configs/sparsity/retinanet_coco_magnitude_sparsity.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/retinanet_coco_magnitude_sparsity.tar.gz)| -|YOLO v4|None|COCO 2017|47.07|[yolo_v4_coco.json](configs/yolo_v4_coco.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/yolo_v4_coco.tar.gz)| -|YOLO v4|INT8 (per-channel symmetric for weights, per-tensor asymmetric half-range for activations)|COCO 2017|46.20 (0.87)|[yolo_v4_coco_int8.json](configs/quantization/yolo_v4_coco_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/yolo_v4_coco_int8.tar.gz)| -|YOLO v4|Magnitude sparsity, 50%|COCO 2017|46.49 (0.58)|[yolo_v4_coco_magnitude_sparsity.json](configs/sparsity/yolo_v4_coco_magnitude_sparsity.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/yolo_v4_coco_magnitude_sparsity.tar.gz)| - -#### Results for filter pruning - - -|Model|Compression algorithm|Dataset|mAP (_drop_) %|NNCF config file|Checkpoint| -| :---: | :---: | :---: | :---: | :---: | :---: | -|RetinaNet|None|COCO 2017|33.43|[retinanet_coco.json](configs/retinanet_coco.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/retinanet_coco.tar.gz)| -|RetinaNet|Filter pruning, 40%|COCO 2017|32.72 (0.71)|[retinanet_coco_pruning_geometric_median.json](configs/pruning/retinanet_coco_pruning_geometric_median.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/retinanet_coco_pruning_geometric_median.tar.gz)| -|RetinaNet|INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations) + filter pruning 40%|COCO 2017|32.67 (0.76)|[retinanet_coco_pruning_geometric_median_int8.json](configs/pruning_quantization/retinanet_coco_pruning_geometric_median_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/retinanet_coco_pruning_geometric_median_int8.tar.gz)| + +Please see compression results for Tensorflow object detection at our [Model Zoo page](../../../docs/ModelZoo.md#tensorflow-object-detection). diff --git a/examples/tensorflow/object_detection/main.py b/examples/tensorflow/object_detection/main.py index 5c815f6f9aa..6ac73a7f0b2 100644 --- a/examples/tensorflow/object_detection/main.py +++ b/examples/tensorflow/object_detection/main.py @@ -331,9 +331,8 @@ def run(config): # Register additional parameters in the NNCFConfig for initialization # the compressed model during building - nncf_config = config.nncf_config - nncf_config = register_default_init_args( - nncf_config=nncf_config, data_loader=train_dataset, batch_size=train_builder.global_batch_size + config.nncf_config = register_default_init_args( + nncf_config=config.nncf_config, data_loader=train_dataset, batch_size=train_builder.global_batch_size ) resume_training = config.ckpt_path is not None @@ -358,7 +357,11 @@ def run(config): ) ] ) - compression_ctrl, compress_model = create_compressed_model(model, nncf_config, compression_state) + + if "train" in config.mode and is_accuracy_aware_training(config): + uncompressed_model_accuracy = config.nncf_config.get_extra_struct(ModelEvaluationArgs).eval_fn(model) + + compression_ctrl, compress_model = create_compressed_model(model, config.nncf_config, compression_state) scheduler = build_scheduler(config=config, steps_per_epoch=steps_per_epoch) optimizer = build_optimizer(config=config, scheduler=scheduler) @@ -408,7 +411,9 @@ def validate_fn(model, **kwargs): metric_result = evaluate(test_step, eval_metric, test_dist_dataset, num_test_batches, config.print_freq) return metric_result["AP"] - acc_aware_training_loop = create_accuracy_aware_training_loop(nncf_config, compression_ctrl) + acc_aware_training_loop = create_accuracy_aware_training_loop( + config.nncf_config, compression_ctrl, uncompressed_model_accuracy + ) compress_model = acc_aware_training_loop.run( compress_model, train_epoch_fn=train_epoch_fn, diff --git a/examples/tensorflow/requirements.txt b/examples/tensorflow/requirements.txt index ca9a61c5d2d..75cb4b8de74 100644 --- a/examples/tensorflow/requirements.txt +++ b/examples/tensorflow/requirements.txt @@ -2,6 +2,6 @@ addict absl-py==1.0.0 tensorflow_datasets==4.2.0 tensorflow_hub -tensorflow_addons~=0.19.0 +tensorflow_addons~=0.20.0 opencv-python -pycocotools==2.0.4 +pycocotools==2.0.6 diff --git a/examples/tensorflow/segmentation/README.md b/examples/tensorflow/segmentation/README.md index a47aef77d3f..4ef9353d0cd 100644 --- a/examples/tensorflow/segmentation/README.md +++ b/examples/tensorflow/segmentation/README.md @@ -6,7 +6,7 @@ The sample receives a configuration file where the training schedule, hyper-para ## Features -- Mask R-CNN from the official [TF repository](https://github.com/tensorflow/models/tree/master/official/vision/detection) with minor modifications (custom implementation of upsamling is replaced with equivalent tf.keras.layers.UpSampling2D). +- Mask R-CNN from the official [TF repository](https://github.com/tensorflow/models/tree/master/official/vision/detection) with minor modifications (custom implementation of upsampling is replaced with equivalent tf.keras.layers.UpSampling2D). - Support TFRecords for COCO2017 dataset. - Configuration file examples for sparsity, quantization, and quantization with sparsity. - Export to Frozen Graph or TensorFlow SavedModel that is supported by the OpenVINO™ toolkit. @@ -18,7 +18,7 @@ At this point it is assumed that you have already installed nncf. You can find i To work with the sample you should install the corresponding Python package dependencies: -``` +```bash pip install -r examples/tensorflow/requirements.txt ``` @@ -49,14 +49,13 @@ The [COCO2017](https://cocodataset.org/) dataset should be specified in the conf ### Run Instance Segmentation Sample We can run the sample after data preparation. For this follow these steps: + - If you did not install the package, add the repository root folder to the `PYTHONPATH` environment variable. - Go to the `examples/tensorflow/segmentation` folder. -- Download the pre-trained weights in checkpoint format and provide the path to them using `--weights` flag. The link to the -archive with pre-trained weights can be found in the `TensorFlow checkpoint` column of the [results](#results) table. -Select the checkpoint corresponding to the `None` compression algorithm, which includes the pre-trained weights for the -FP32 model, without applying any compression algorithms. +- Download the pre-trained Mask-R-CNN [weights](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.6.0/tensorflow/mask_rcnn_coco.tar.gz) in checkpoint format and provide the path to them using `--weights` flag. - Specify the GPUs to be used for training by setting the environment variable [`CUDA_VISIBLE_DEVICES`](https://developer.nvidia.com/blog/cuda-pro-tip-control-gpu-visibility-cuda_visible_devices/). This is necessary because training and validation during training must be performed on different GPU devices. Please note that usually only one GPU is required for validation during training. - (Optional) Before compressing a model, it is highly recommended checking the accuracy of the pretrained model, use the following command: + ```bash python evaluation.py \ --mode=test \ @@ -66,33 +65,39 @@ FP32 model, without applying any compression algorithms. --batch-size=1 \ --disable-compression ``` + - Run the following command to start compression with fine-tuning on all available GPUs on the machine: - ```bash - python train.py \ - --config=configs/quantization/mask_rcnn_coco_int8.json \ - --weights= \ - --data= \ - --log-dir=../../results/quantization/maskrcnn_coco_int8 - ``` + + ```bash + python train.py \ + --config=configs/quantization/mask_rcnn_coco_int8.json \ + --weights= \ + --data= \ + --log-dir=../../results/quantization/maskrcnn_coco_int8 + ``` + - Use the `--resume` flag with the path to the checkpoint to resume training from the defined checkpoint or folder with checkpoints to resume training from the last checkpoint. To start checkpoints validation during training follow these steps: + - If you did not install the package, add the repository root folder to the `PYTHONPATH` environment variable. - Go to the `examples/tensorflow/segmentation` folder. - Specify the GPUs to be used for validation during training by setting the environment variable [`CUDA_VISIBLE_DEVICES`](https://developer.nvidia.com/blog/cuda-pro-tip-control-gpu-visibility-cuda_visible_devices/). - Run the following command to start checkpoints validation during training: - ```bash - python evaluation.py \ - --mode=train \ - --config=configs/quantization/mask_rcnn_coco_int8.json \ - --data= \ - --batch-size=1 \ - --checkpoint-save-dir= - ``` + + ```bash + python evaluation.py \ + --mode=train \ + --config=configs/quantization/mask_rcnn_coco_int8.json \ + --data= \ + --batch-size=1 \ + --checkpoint-save-dir= + ``` ### Validate Your Model Checkpoint -To estimate the test scores of your trained model checkpoint, use the following command +To estimate the test scores of your trained model checkpoint, use the following command: + ```bash python evaluation.py \ --mode=test \ @@ -105,6 +110,7 @@ python evaluation.py \ ### Export Compressed Model To export trained model to the **Frozen Graph**, use the following command: + ```bash python evaluation.py \ --mode=export \ @@ -115,6 +121,7 @@ python evaluation.py \ ``` To export trained model to the **SavedModel**, use the following command: + ```bash python evaluation.py \ --mode=export \ @@ -129,23 +136,20 @@ python evaluation.py \ To export a model to the OpenVINO IR and run it using the Intel® Deep Learning Deployment Toolkit, refer to this [tutorial](https://software.intel.com/en-us/openvino-toolkit). ## Train MaskRCNN from scratch + - Download pre-trained ResNet-50 checkpoint from [here](https://storage.cloud.google.com/cloud-tpu-checkpoints/model-garden-vision/detection/resnet50-2018-02-07.tar.gz). - If you did not install the package, add the repository root folder to the `PYTHONPATH` environment variable. - Go to the `examples/tensorflow/segmentation` folder. - Run the following command to start training MaskRCNN from scratch on all available GPUs on the machine: - ```bash - python train.py \ - --config=configs/mask_rcnn_coco.json \ - --backbone-checkpoint= \ - --data= \ - --log-dir=../../results/quantization/maskrcnn_coco_baseline + ```bash + python train.py \ + --config=configs/mask_rcnn_coco.json \ + --backbone-checkpoint= \ + --data= \ + --log-dir=../../results/quantization/maskrcnn_coco_baseline + ``` ## Results - -|Model|Compression algorithm|Dataset| mAP (_drop_) % |NNCF config file|Checkpoint| -| :---: | :---: | :---: |:-------------------------------------:| :---: | :---: | -|Mask-R-CNN|None|COCO 2017| bbox: 37.33 segm: 33.56 |[mask_rcnn_coco.json](configs/mask_rcnn_coco.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/mask_rcnn_coco.tar.gz)| -|Mask-R-CNN|INT8 (per-tensor symmetric for weights, per-tensor asymmetric half-range for activations)|COCO 2017| bbox: 37.19 (0.14) segm: 33.54 (0.02) |[mask_rcnn_coco_int8.json](configs/quantization/mask_rcnn_coco_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/mask_rcnn_coco_int8.tar.gz)| -|Mask-R-CNN|Magnitude sparsity, 50%|COCO 2017| bbox: 36.94 (0.39) segm: 33.23 (0.33) |[mask_rcnn_coco_magnitude_sparsity.json](configs/sparsity/mask_rcnn_coco_magnitude_sparsity.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/tensorflow/mask_rcnn_coco_magnitude_sparsity.tar.gz)| +Please see compression results for Tensorflow instance segmentation at our [Model Zoo page](../../../docs/ModelZoo.md#tensorflow-instance-segmentation). diff --git a/examples/torch/README.md b/examples/torch/README.md index a53d145405d..be3c1321a12 100644 --- a/examples/torch/README.md +++ b/examples/torch/README.md @@ -1,21 +1,27 @@ +# Installation + Install the packages needed for samples by running the following in the current directory: -``` +```bash pip install -r requirements.txt ``` One of the needed package - torchvision. The version of torchvision should always match the version of installed torch package. Please refer to the [table](https://github.com/pytorch/pytorch/wiki/PyTorch-Versions#domain-version-compatibility-matrix-for-pytorch) to find compatible versions of torchvision and torch. -By default, if there is no torchvision in your Python environment it installs the package that is compatible with -the best known torch version (`BKC_TORCH_VERSION` in the code). In that case if your environment has the torch version, +By default, if there is no torchvision in your Python environment it installs the package that is compatible with +the best known torch version (`BKC_TORCH_VERSION` in the code). In that case if your environment has the torch version, which is different from best known one, you should install the corresponding torchvision package by yourself. -For example, if you need torch 1.9.1 (not best known version) with CUDA11 support, we recommend specifying the -corresponding torchvision version as follows in the root nncf directory: +For example, if you need torch 1.9.1 (not best known version) with CUDA11 support, we recommend specifying the +corresponding torchvision version as follows in the root nncf directory: -``` +```bash pip install torch==1.9.1+cu111 torchvision==0.10.1+cu111 -f https://download.pytorch.org/whl/torch_stable.html pip install .[torch] pip install -r examples/torch/requirements.txt ``` + +## Results + +Please see compression results for PyTorch models at our [Model Zoo page](../../../docs/ModelZoo.md#pytorch). diff --git a/examples/torch/classification/README.md b/examples/torch/classification/README.md index 19c34e24646..36c85eecd49 100644 --- a/examples/torch/classification/README.md +++ b/examples/torch/classification/README.md @@ -17,7 +17,7 @@ At this point it is assumed that you have already installed nncf. You can find i To work with the sample you should install the corresponding Python package dependencies: -``` +```bash pip install -r examples/torch/requirements.txt ``` @@ -25,16 +25,16 @@ pip install -r examples/torch/requirements.txt This scenario demonstrates quantization with fine-tuning of MobileNet v2 on the ImageNet dataset. -#### Dataset Preparation +### Dataset Preparation To prepare the ImageNet dataset, refer to the following [tutorial](https://github.com/pytorch/examples/tree/master/imagenet). -#### Run Classification Sample +### Run Classification Sample - If you did not install the package, add the repository root folder to the `PYTHONPATH` environment variable. - Go to the `examples/torch/classification` folder. -#### Test Pretrained Model +### Test Pretrained Model Before compressing a model, it is highly recommended checking the accuracy of the pretrained model. All models which are supported in the sample has pretrained weights for ImageNet. @@ -48,11 +48,15 @@ python main.py \ --disable-compression ``` -#### Compress Pretrained Model +### Compress Pretrained Model - Run the following command to start compression with fine-tuning on GPUs: - ``` - python main.py -m train --config configs/quantization/mobilenet_v2_imagenet_int8.json --data /data/imagenet/ --log-dir=../../results/quantization/mobilenet_v2_int8/ + + ```bash + python main.py -m train \ + --config configs/quantization/mobilenet_v2_imagenet_int8.json \ + --data /data/imagenet/ \ + --log-dir=../../results/quantization/mobilenet_v2_int8/ ``` It may take a few epochs to get the baseline accuracy results. @@ -62,90 +66,33 @@ python main.py \ - Use the `--weights` flag with the path to a compatible PyTorch checkpoint in order to load all matching weights from the checkpoint into the model - useful if you need to start compression-aware training from a previously trained uncompressed (FP32) checkpoint instead of performing compression-aware training from scratch. - Use the `--no_strip_on_export` to export not stripped model. -#### Validate Your Model Checkpoint +### Validate Your Model Checkpoint To estimate the test scores of your trained model checkpoint, use the following command: -``` -python main.py -m test --config=configs/quantization/mobilenet_v2_imagenet_int8.json --resume +```bash +python main.py -m test \ +--config=configs/quantization/mobilenet_v2_imagenet_int8.json \ +--resume ``` **WARNING**: The samples use `torch.load` functionality for checkpoint loading which, in turn, uses pickle facilities by default which are known to be vulnerable to arbitrary code execution attacks. **Only load the data you trust** -#### Export Compressed Model +### Export Compressed Model To export trained model to the ONNX format, use the following command: -``` -python main.py -m export --config=configs/quantization/mobilenet_v2_imagenet_int8.json --resume=../../results/quantization/mobilenet_v2_int8/6/checkpoints/epoch_1.pth --to-onnx=../../results/mobilenet_v2_int8.onnx +```bash +python main.py -m export \ +--config=configs/quantization/mobilenet_v2_imagenet_int8.json \ +--resume=../../results/quantization/mobilenet_v2_int8/6/checkpoints/epoch_1.pth \ +--to-onnx=../../results/mobilenet_v2_int8.onnx ``` -#### Export to OpenVINO™ Intermediate Representation (IR) +### Export to OpenVINO™ Intermediate Representation (IR) To export a model to the OpenVINO IR and run it using the Intel® Deep Learning Deployment Toolkit, refer to this [tutorial](https://software.intel.com/en-us/openvino-toolkit). - -### Results for quantization - -|Model|Compression algorithm|Dataset|Accuracy (_drop_) %|NNCF config file|Checkpoint| -| :---: | :---: | :---: | :---: | :---: | :---: | -|ResNet-50|None|ImageNet|76.15|[resnet50_imagenet.json](configs/quantization/resnet50_imagenet.json)|-| -|ResNet-50|INT8|ImageNet|76.46 (-0.31)|[resnet50_imagenet_int8.json](configs/quantization/resnet50_imagenet_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/resnet50_imagenet_int8.pth)| -|ResNet-50|INT8 (per-tensor only)|ImageNet|76.39 (-0.24)|[resnet50_imagenet_int8_per_tensor.json](configs/quantization/resnet50_imagenet_int8_per_tensor.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/resnet50_imagenet_int8_per_tensor.pth)| -|ResNet-50|Mixed, 43.12% INT8 / 56.88% INT4|ImageNet|76.05 (0.10)|[resnet50_imagenet_mixed_int_hawq.json](configs/mixed_precision/resnet50_imagenet_mixed_int_hawq.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/resnet50_imagenet_int4_int8.pth)| -|ResNet-50|INT8 + Sparsity 61% (RB)|ImageNet|75.42 (0.73)|[resnet50_imagenet_rb_sparsity_int8.json](configs/sparsity_quantization/resnet50_imagenet_rb_sparsity_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/resnet50_imagenet_rb_sparsity_int8.pth)| -|ResNet-50|INT8 + Sparsity 50% (RB)|ImageNet|75.50 (0.65)|[resnet50_imagenet_rb_sparsity50_int8.json](configs/sparsity_quantization/resnet50_imagenet_rb_sparsity50_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/resnet50_imagenet_rb_sparsity50_int8.pth)| -|Inception V3|None|ImageNet|77.33|[inception_v3_imagenet.json](configs/quantization/inception_v3_imagenet.json)|-| -|Inception V3|INT8|ImageNet|77.45 (-0.12)|[inception_v3_imagenet_int8.json](configs/quantization/inception_v3_imagenet_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/inception_v3_imagenet_int8.pth)| -|Inception V3|INT8 + Sparsity 61% (RB)|ImageNet|76.36 (0.97)|[inception_v3_imagenet_rb_sparsity_int8.json](configs/sparsity_quantization/inception_v3_imagenet_rb_sparsity_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/inception_v3_imagenet_rb_sparsity_int8.pth)| -|MobileNet V2|None|ImageNet|71.87|[mobilenet_v2_imagenet.json](configs/quantization/mobilenet_v2_imagenet.json)|-| -|MobileNet V2|INT8|ImageNet|71.07 (0.80)|[mobilenet_v2_imagenet_int8.json](configs/quantization/mobilenet_v2_imagenet_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/mobilenet_v2_imagenet_int8.pth)| -|MobileNet V2|INT8 (per-tensor only)|ImageNet|71.24 (0.63)|[mobilenet_v2_imagenet_int8_per_tensor.json](configs/quantization/mobilenet_v2_imagenet_int8_per_tensor.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/mobilenet_v2_imagenet_int8_per_tensor.pth)| -|MobileNet V2|Mixed, 58.88% INT8 / 41.12% INT4|ImageNet|70.95 (0.92)|[mobilenet_v2_imagenet_mixed_int_hawq.json](configs/mixed_precision/mobilenet_v2_imagenet_mixed_int_hawq.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/mobilenet_v2_imagenet_int4_int8.pth)| -|MobileNet V2|INT8 + Sparsity 52% (RB)|ImageNet|71.09 (0.78)|[mobilenet_v2_imagenet_rb_sparsity_int8.json](configs/sparsity_quantization/mobilenet_v2_imagenet_rb_sparsity_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/mobilenet_v2_imagenet_rb_sparsity_int8.pth)| -|MobileNet V3 small|None|ImageNet|67.66|[mobilenet_v3_small_imagenet.json](configs/quantization/mobilenet_v3_small_imagenet.json)|-| -|MobileNet V3 small|INT8|ImageNet|66.98 (0.68)|[mobilenet_v3_small_imagenet_int8.json](configs/quantization/mobilenet_v3_small_imagenet_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/mobilenet_v3_small_imagenet_int8.pth)| -|SqueezeNet V1.1|None|ImageNet|58.19|[squeezenet1_1_imagenet.json](configs/quantization/squeezenet1_1_imagenet.json)|-| -|SqueezeNet V1.1|INT8|ImageNet|58.22 (-0.03)|[squeezenet1_1_imagenet_int8.json](configs/quantization/squeezenet1_1_imagenet_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/squeezenet1_1_imagenet_int8.pth)| -|SqueezeNet V1.1|INT8 (per-tensor only)|ImageNet|58.11 (0.08)|[squeezenet1_1_imagenet_int8_per_tensor.json](configs/quantization/squeezenet1_1_imagenet_int8_per_tensor.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/squeezenet1_1_imagenet_int8_per_tensor.pth)| -|SqueezeNet V1.1|Mixed, 52.83% INT8 / 47.17% INT4|ImageNet|57.57 (0.62)|[squeezenet1_1_imagenet_mixed_int_hawq_old_eval.json](configs/mixed_precision/squeezenet1_1_imagenet_mixed_int_hawq_old_eval.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/squeezenet1_1_imagenet_int4_int8.pth)| -|ResNet-18|None|ImageNet|69.76|[resnet18_imagenet.json](configs/binarization/resnet18_imagenet.json)|-| -|ResNet-34|None|ImageNet|73.30|[resnet34_imagenet.json](configs/pruning/resnet34_imagenet.json)|-| -|GoogLeNet|None|ImageNet|69.77|[googlenet_imagenet.json](configs/pruning/googlenet_imagenet.json)|-| - - -#### Binarization - -As an example of NNCF convolution binarization capabilities, you may use the configs in `examples/torch/classification/configs/binarization` to binarize ResNet18. Use the same steps/command line parameters as for quantization (for best results, specify `--pretrained`), except for the actual binarization config path. - - -### Results for binarization - -|Model|Compression algorithm|Dataset|Accuracy (_drop_) %|NNCF config file|Checkpoint| -| :---: | :---: | :---: | :---: | :---: | :---: | -|ResNet-18|None|ImageNet|69.76|[resnet18_imagenet.json](configs/binarization/resnet18_imagenet.json)|-| -|ResNet-18|XNOR (weights), scale/threshold (activations)|ImageNet|61.67 (8.09)|[resnet18_imagenet_binarization_xnor.json](configs/binarization/resnet18_imagenet_binarization_xnor.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/resnet18_imagenet_binarization_xnor.pth)| -|ResNet-18|DoReFa (weights), scale/threshold (activations)|ImageNet|61.63 (8.13)|[resnet18_imagenet_binarization_dorefa.json](configs/binarization/resnet18_imagenet_binarization_dorefa.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/resnet18_imagenet_binarization_dorefa.pth)| - - -### Results for filter pruning -|Model|Compression algorithm|Dataset|Accuracy (_drop_) %|NNCF config file|Checkpoint| -| :---: | :---: | :---: | :---: | :---: | :---: | -|ResNet-50|None|ImageNet|76.15|[resnet50_imagenet.json](configs/quantization/resnet50_imagenet.json)|-| -|ResNet-50|Filter pruning, 40%, geometric median criterion|ImageNet|75.57 (0.58)|[resnet50_imagenet_pruning_geometric_median.json](configs/pruning/resnet50_imagenet_pruning_geometric_median.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/resnet50_imagenet_pruning_geometric_median.pth)| -|ResNet-18|None|ImageNet|69.76|[resnet18_imagenet.json](configs/binarization/resnet18_imagenet.json)|-| -|ResNet-18|Filter pruning, 40%, magnitude criterion|ImageNet|69.27 (0.49)|[resnet18_imagenet_pruning_magnitude.json](configs/pruning/resnet18_imagenet_pruning_magnitude.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/resnet18_imagenet_pruning_magnitude.pth)| -|ResNet-18|Filter pruning, 40%, geometric median criterion|ImageNet|69.31 (0.45)|[resnet18_imagenet_pruning_geometric_median.json](configs/pruning/resnet18_imagenet_pruning_geometric_median.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/resnet18_imagenet_pruning_geometric_median.pth)| -|ResNet-34|None|ImageNet|73.30|[resnet34_imagenet.json](configs/pruning/resnet34_imagenet.json)|-| -|ResNet-34|Filter pruning, 50%, geometric median criterion + KD|ImageNet|73.11 (0.19)|[resnet34_imagenet_pruning_geometric_median_kd.json](configs/pruning/resnet34_imagenet_pruning_geometric_median_kd.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/resnet34_imagenet_pruning_geometric_median_kd.pth)| -|GoogLeNet|None|ImageNet|69.77|[googlenet_imagenet.json](configs/pruning/googlenet_imagenet.json)|-| -|GoogLeNet|Filter pruning, 40%, geometric median criterion|ImageNet|69.47 (0.30)|[googlenet_imagenet_pruning_geometric_median.json](configs/pruning/googlenet_imagenet_pruning_geometric_median.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/googlenet_imagenet_pruning_geometric_median.pth)| - - -### Results for accuracy-aware compressed training -|Model|Compression algorithm|Dataset|Accuracy (Drop) %|NNCF config file| -| :---: | :---: | :---: | :---: | :---: | -|ResNet-50|None|ImageNet|76.16|[resnet50_imagenet.json](configs/quantization/resnet50_imagenet.json)| -|ResNet-50|Filter pruning, 52.5%, geometric median criterion|ImageNet|75.23 (0.93)|[resnet50_imagenet_accuracy_aware.json](configs/pruning/resnet50_imagenet_pruning_accuracy_aware.json)| -|ResNet-18|None|ImageNet|69.8|[resnet18_imagenet.json](configs/binarization/resnet18_imagenet.json)| -|ResNet-18|Filter pruning, 60%, geometric median criterion|ImageNet|69.2 (-0.6)|[resnet18_imagenet_accuracy_aware.json](configs/pruning/resnet18_imagenet_pruning_accuracy_aware.json)| +## Results + +Please see compression results for PyTorch classification at our [Model Zoo page](../../../docs/ModelZoo.md#pytorch-classification). diff --git a/examples/torch/classification/main.py b/examples/torch/classification/main.py index 2fc49d6723a..2854c894f0a 100644 --- a/examples/torch/classification/main.py +++ b/examples/torch/classification/main.py @@ -226,6 +226,9 @@ def model_eval_fn(model): model.to(config.device) + if "train" in config.mode and is_accuracy_aware_training(config): + uncompressed_model_accuracy = model_eval_fn(model) + resuming_checkpoint = None if resuming_checkpoint_path is not None: resuming_checkpoint = load_resuming_checkpoint(resuming_checkpoint_path) @@ -293,7 +296,9 @@ def configure_optimizers_fn(): optimizer, lr_scheduler = make_optimizer(params_to_optimize, config) return optimizer, lr_scheduler - acc_aware_training_loop = create_accuracy_aware_training_loop(nncf_config, compression_ctrl) + acc_aware_training_loop = create_accuracy_aware_training_loop( + nncf_config, compression_ctrl, uncompressed_model_accuracy + ) model = acc_aware_training_loop.run( model, train_epoch_fn=train_epoch_fn, diff --git a/examples/torch/common/example_logger.py b/examples/torch/common/example_logger.py index 48289c39a16..fb07f54ca5d 100644 --- a/examples/torch/common/example_logger.py +++ b/examples/torch/common/example_logger.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import logging import sys diff --git a/examples/torch/common/export.py b/examples/torch/common/export.py index 12314df8071..8d3a4c266e8 100644 --- a/examples/torch/common/export.py +++ b/examples/torch/common/export.py @@ -9,7 +9,6 @@ # See the License for the specific language governing permissions and # limitations under the License. import torch -from pkg_resources import parse_version from nncf.api.compression import CompressionAlgorithmController from nncf.torch.exporter import generate_input_names_list @@ -24,15 +23,12 @@ def export_model(ctrl: CompressionAlgorithmController, save_path: str, no_strip_ :param no_strip_on_export: Set to skip strip model before export. """ - if parse_version(torch.__version__) < parse_version("1.10"): - no_strip_on_export = True - model = ctrl.model if no_strip_on_export else ctrl.strip() model = model.eval().cpu() - input_names = generate_input_names_list(len(model.input_infos)) + input_names = generate_input_names_list(len(model.nncf.input_infos)) input_tensor_list = [] - for info in model.input_infos: + for info in model.nncf.input_infos: input_shape = tuple([1] + list(info.shape)[1:]) input_tensor_list.append(torch.rand(input_shape)) diff --git a/examples/torch/common/models/__init__.py b/examples/torch/common/models/__init__.py index cc6b689afd6..74a27ab5f05 100644 --- a/examples/torch/common/models/__init__.py +++ b/examples/torch/common/models/__init__.py @@ -1,2 +1,13 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from examples.torch.common.models.classification import * from examples.torch.common.models.segmentation import * diff --git a/examples/torch/common/models/classification/rmnet_cifar.py b/examples/torch/common/models/classification/rmnet_cifar.py index c8759629732..b977b02ce42 100644 --- a/examples/torch/common/models/classification/rmnet_cifar.py +++ b/examples/torch/common/models/classification/rmnet_cifar.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from collections import OrderedDict import torch.nn.functional as F diff --git a/examples/torch/common/models/segmentation/__init__.py b/examples/torch/common/models/segmentation/__init__.py index a15fc9b57b5..43a20be3b34 100644 --- a/examples/torch/common/models/segmentation/__init__.py +++ b/examples/torch/common/models/segmentation/__init__.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from .enet import * from .icnet import * from .unet import * diff --git a/examples/torch/common/models/segmentation/icnet.py b/examples/torch/common/models/segmentation/icnet.py index d9e088860d9..6fafce1b532 100644 --- a/examples/torch/common/models/segmentation/icnet.py +++ b/examples/torch/common/models/segmentation/icnet.py @@ -21,7 +21,7 @@ import torch import torch.nn.functional as F from numpy import lcm -from pkg_resources import parse_version +from packaging import version from torch import nn from examples.torch.common.example_logger import logger @@ -393,7 +393,7 @@ def forward(self, inputs): fused_features_ds4 = F.interpolate(fused_features_sub421, self._input_size_hw_ds4, **self.sampling_params) label_scores_ds4 = self.conv6_cls(fused_features_ds4) label_scores = F.interpolate(label_scores_ds4, self._input_size_hw, **self.sampling_params) - if is_tracing_state() and parse_version(torch.__version__) >= parse_version("1.1.0"): + if is_tracing_state() and version.parse(torch.__version__) >= version.parse("1.1.0"): # While exporting, add extra post-processing layers into the graph # so that the model outputs class probabilities instead of class scores softmaxed = F.softmax(label_scores, dim=1) diff --git a/examples/torch/common/models/segmentation/unet.py b/examples/torch/common/models/segmentation/unet.py index 1d45afc6308..6148294b9b0 100644 --- a/examples/torch/common/models/segmentation/unet.py +++ b/examples/torch/common/models/segmentation/unet.py @@ -14,7 +14,7 @@ import torch import torch.nn.functional as F -from pkg_resources import parse_version +from packaging import version from torch import nn from examples.torch.common.example_logger import logger @@ -85,7 +85,7 @@ def forward(self, x): x = up(x, blocks[-i - 1]) x = self.last(x) - if is_tracing_state() and parse_version(torch.__version__) >= parse_version("1.1.0"): + if is_tracing_state() and version.parse(torch.__version__) >= version.parse("1.1.0"): # While exporting, add extra post-processing layers into the graph # so that the model outputs class probabilities instead of class scores softmaxed = F.softmax(x, dim=1) diff --git a/examples/torch/object_detection/README.md b/examples/torch/object_detection/README.md index 18c9657262f..88eb11380fc 100644 --- a/examples/torch/object_detection/README.md +++ b/examples/torch/object_detection/README.md @@ -2,7 +2,7 @@ This sample demonstrates DL model compression capabilities for object detection task. -## Features: +## Features - Vanilla SSD300 / SSD512 (+ Batch Normalization), MobileNetSSD-300 - VOC2007 / VOC2012, COCO datasets @@ -17,7 +17,7 @@ At this point it is assumed that you have already installed nncf. You can find i To work with the sample you should install the corresponding Python package dependencies: -``` +```bash pip install -r examples/torch/requirements.txt ``` @@ -25,16 +25,17 @@ pip install -r examples/torch/requirements.txt This scenario demonstrates quantization with fine-tuning of SSD300 on VOC dataset. -#### Dataset preparation +### Dataset preparation - Download and extract in one folder train/val+test VOC2007 and train/val VOC2012 data from [here](https://pjreddie.com/projects/pascal-voc-dataset-mirror/) - In the future, `` means the path to this folder. -#### Run object detection sample +### Run object detection sample - If you did not install the package then add the repository root folder to the `PYTHONPATH` environment variable - Navigate to the `examples/torch/object_detection` folder - (Optional) Before compressing a model, it is highly recommended checking the accuracy of the pretrained model, use the following command: + ```bash python main.py \ --mode=test \ @@ -42,6 +43,7 @@ This scenario demonstrates quantization with fine-tuning of SSD300 on VOC datase --data= \ --disable-compression ``` + - Run the following command to start compression with fine-tuning on GPUs: `python main.py -m train --config configs/ssd300_vgg_voc_int8.json --data --log-dir=../../results/quantization/ssd300_int8 --weights=`It may take a few epochs to get the baseline accuracy results. - Use `--weights` flag with the path to a compatible PyTorch checkpoint in order to load all matching weights from the checkpoint into the model - useful if you need to start compression-aware training from a previously trained uncompressed (FP32) checkpoint instead of performing compression-aware training from scratch. This flag is optional, but highly recommended to use. @@ -49,7 +51,7 @@ This scenario demonstrates quantization with fine-tuning of SSD300 on VOC datase - Use `--resume` flag with the path to a previously saved model to resume training. - Use the `--no_strip_on_export` to export not stripped model. -#### Validate your model checkpoint +### Validate your model checkpoint To estimate the test scores of your trained model checkpoint use the following command: `python main.py -m test --config=configs/ssd300_vgg_voc_int8.json --data --resume ` @@ -57,32 +59,15 @@ If you want to validate an FP32 model checkpoint, make sure the compression algo **WARNING**: The samples use `torch.load` functionality for checkpoint loading which, in turn, uses pickle facilities by default which are known to be vulnerable to arbitrary code execution attacks. **Only load the data you trust** -#### Export compressed model +### Export compressed model To export trained model to ONNX format use the following command: `python main.py -m export --config configs/ssd300_vgg_voc_int8.json --data --resume --to-onnx=../../results/ssd300_int8.onnx` -#### Export to OpenVINO Intermediate Representation (IR) +### Export to OpenVINO Intermediate Representation (IR) To export a model to OpenVINO IR and run it using Intel Deep Learning Deployment Toolkit please refer to this [tutorial](https://software.intel.com/en-us/openvino-toolkit). - -### Results - -|Model|Compression algorithm|Dataset|mAP (_drop_) %|NNCF config file|Checkpoint| -| :---: | :---: | :---: | :---: | :---: | :---: | -|SSD300-MobileNet|None|VOC12+07 train, VOC07 eval|62.23|[ssd300_mobilenet_voc.json](configs/ssd300_mobilenet_voc.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/ssd300_mobilenet_voc.pth)| -|SSD300-MobileNet|INT8 + Sparsity 70% (Magnitude)|VOC12+07 train, VOC07 eval|62.95 (-0.72)|[ssd300_mobilenet_voc_magnitude_int8.json](configs/ssd300_mobilenet_voc_magnitude_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/ssd300_mobilenet_voc_magnitude_sparsity_int8.pth)| -|SSD300-VGG-BN|None|VOC12+07 train, VOC07 eval|78.28|[ssd300_vgg_voc.json](configs/ssd300_vgg_voc.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/ssd300_vgg_voc.pth)| -|SSD300-VGG-BN|INT8|VOC12+07 train, VOC07 eval|77.81 (0.47)|[ssd300_vgg_voc_int8.json](configs/ssd300_vgg_voc_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/ssd300_vgg_voc_int8.pth)| -|SSD300-VGG-BN|INT8 + Sparsity 70% (Magnitude)|VOC12+07 train, VOC07 eval|77.66 (0.62)|[ssd300_vgg_voc_magnitude_sparsity_int8.json](configs/ssd300_vgg_voc_magnitude_sparsity_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/ssd300_vgg_voc_magnitude_sparsity_int8.pth)| -|SSD512-VGG-BN|None|VOC12+07 train, VOC07 eval|80.26|[ssd512_vgg_voc.json](configs/ssd512_vgg_voc.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/ssd512_vgg_voc.pth)| -|SSD512-VGG-BN|INT8|VOC12+07 train, VOC07 eval|80.04 (0.22)|[ssd512_vgg_voc_int8.json](configs/ssd512_vgg_voc_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/ssd512_vgg_voc_int8.pth)| -|SSD512-VGG-BN|INT8 + Sparsity 70% (Magnitude)|VOC12+07 train, VOC07 eval|79.68 (0.58)|[ssd512_vgg_voc_magnitude_sparsity_int8.json](configs/ssd512_vgg_voc_magnitude_sparsity_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/ssd512_vgg_voc_magnitude_sparsity_int8.pth)| - - -#### Results for filter pruning -|Model|Compression algorithm|Dataset|mAP (_drop_) %|NNCF config file|Checkpoint| -| :---: | :---: | :---: | :---: | :---: | :---: | -|SSD300-VGG-BN|None|VOC12+07 train, VOC07 eval|78.28|[ssd300_vgg_voc.json](configs/ssd300_vgg_voc.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/ssd300_vgg_voc.pth)| -|SSD300-VGG-BN|Filter pruning, 40%, geometric median criterion|VOC12+07 train, VOC07 eval|78.35 (-0.07)|[ssd300_vgg_voc_pruning_geometric_median.json](configs/ssd300_vgg_voc_pruning_geometric_median.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/ssd300_vgg_voc_pruning_geometric_median.pth)| +## Results + +Please see compression results for PyTorch object detection at our [Model Zoo page](../../../docs/ModelZoo.md#pytorch-object-detection). diff --git a/examples/torch/object_detection/eval.py b/examples/torch/object_detection/eval.py index ffa221b410c..1bd078578aa 100644 --- a/examples/torch/object_detection/eval.py +++ b/examples/torch/object_detection/eval.py @@ -187,7 +187,7 @@ def extract_gt_bboxes(classname, dataset, gt, imagenames): difficult.append(x["difficult"]) else: difficult.append(False) - difficult = np.array(difficult).astype(np.bool) + difficult = np.array(difficult).astype(bool) det = [False] * len(img_gt_objects_for_class) npos = npos + sum(~difficult) class_gt[imagename] = {"bbox": bbox, "difficult": difficult, "det": det} diff --git a/examples/torch/object_detection/layers/__init__.py b/examples/torch/object_detection/layers/__init__.py index 53a3f4b5160..7430d153244 100644 --- a/examples/torch/object_detection/layers/__init__.py +++ b/examples/torch/object_detection/layers/__init__.py @@ -1,2 +1,13 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from .functions import * from .modules import * diff --git a/examples/torch/object_detection/layers/extensions/__init__.py b/examples/torch/object_detection/layers/extensions/__init__.py index 18ec4e7ba9c..e78d45d9a95 100644 --- a/examples/torch/object_detection/layers/extensions/__init__.py +++ b/examples/torch/object_detection/layers/extensions/__init__.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import os.path import torch diff --git a/examples/torch/object_detection/layers/functions/__init__.py b/examples/torch/object_detection/layers/functions/__init__.py index e772e17b4a4..996ef5af0ba 100644 --- a/examples/torch/object_detection/layers/functions/__init__.py +++ b/examples/torch/object_detection/layers/functions/__init__.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from .detection import DetectionOutput from .prior_box import PriorBox diff --git a/examples/torch/object_detection/layers/modules/__init__.py b/examples/torch/object_detection/layers/modules/__init__.py index 55892c487ab..0052de9b73e 100644 --- a/examples/torch/object_detection/layers/modules/__init__.py +++ b/examples/torch/object_detection/layers/modules/__init__.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from .l2norm import L2Norm from .multibox_loss import MultiBoxLoss diff --git a/examples/torch/object_detection/main.py b/examples/torch/object_detection/main.py index ea75d8efc7a..c44d58ad98f 100644 --- a/examples/torch/object_detection/main.py +++ b/examples/torch/object_detection/main.py @@ -56,6 +56,7 @@ from examples.torch.object_detection.model import build_ssd from nncf.api.compression import CompressionStage from nncf.common.accuracy_aware_training import create_accuracy_aware_training_loop +from nncf.config.structures import ModelEvaluationArgs from nncf.config.utils import is_accuracy_aware_training from nncf.torch import create_compressed_model from nncf.torch import load_state @@ -197,7 +198,11 @@ def model_eval_fn(model): resuming_checkpoint = None if resuming_checkpoint_path is not None: resuming_checkpoint = load_resuming_checkpoint(resuming_checkpoint_path) - compression_ctrl, net = create_model(config, resuming_checkpoint) + net = create_model(config) + if "train" in config.mode and is_accuracy_aware_training(config): + with torch.no_grad(): + uncompressed_model_accuracy = config.nncf_config.get_extra_struct(ModelEvaluationArgs).eval_fn(net) + compression_ctrl, net = compress_model(net, config, resuming_checkpoint) if config.distributed: config.batch_size //= config.ngpus_per_node config.workers //= config.ngpus_per_node @@ -264,7 +269,9 @@ def configure_optimizers_fn(): optimizer, lr_scheduler = make_optimizer(params_to_optimize, config) return optimizer, lr_scheduler - acc_aware_training_loop = create_accuracy_aware_training_loop(nncf_config, compression_ctrl) + acc_aware_training_loop = create_accuracy_aware_training_loop( + nncf_config, compression_ctrl, uncompressed_model_accuracy + ) net = acc_aware_training_loop.run( net, train_epoch_fn=train_epoch_fn, @@ -356,7 +363,7 @@ def create_train_data_loader(batch_size): return test_data_loader, train_data_loader, init_data_loader -def create_model(config: SampleConfig, resuming_checkpoint: dict = None): +def create_model(config: SampleConfig): input_info_list = create_input_infos(config.nncf_config) image_size = input_info_list[0].shape[-1] ssd_net = build_ssd(config.model, config.ssd_params, image_size, config.num_classes, config) @@ -367,8 +374,13 @@ def create_model(config: SampleConfig, resuming_checkpoint: dict = None): load_state(ssd_net, sd) ssd_net.to(config.device) + + return ssd_net + + +def compress_model(model: torch.nn.Module, config: SampleConfig, resuming_checkpoint: dict = None): model_state_dict, compression_state = extract_model_and_compression_states(resuming_checkpoint) - compression_ctrl, compressed_model = create_compressed_model(ssd_net, config.nncf_config, compression_state) + compression_ctrl, compressed_model = create_compressed_model(model, config.nncf_config, compression_state) if model_state_dict is not None: load_state(compressed_model, model_state_dict, is_resume=True) compressed_model, _ = prepare_model_for_execution(compressed_model, config) diff --git a/examples/torch/object_detection/utils/augmentations.py b/examples/torch/object_detection/utils/augmentations.py index 7fb39c03a72..d37fc3d5e9e 100644 --- a/examples/torch/object_detection/utils/augmentations.py +++ b/examples/torch/object_detection/utils/augmentations.py @@ -254,7 +254,8 @@ def __call__(self, image: np.ndarray, target: List[Dict]) -> Tuple[np.ndarray, L boxes = np.asarray([x["bbox"] for x in target]) labels = np.asarray([x["label_idx"] for x in target]) # randomly choose a mode - mode = random.choice(self.sample_options) + r_ind = int(random.randint(len(self.sample_options))) + mode = self.sample_options[r_ind] if mode is None: return image, target diff --git a/examples/torch/requirements.txt b/examples/torch/requirements.txt index fbce1467f0f..cf4ff6631de 100644 --- a/examples/torch/requirements.txt +++ b/examples/torch/requirements.txt @@ -3,8 +3,8 @@ pillow>=8.0.1 tensorboard>=2.1 matplotlib>=3.3.3 defusedxml>=0.7.0rc1 -mlflow>=1.12.1,<2.4.0 +mlflow>=2.5.0,<2.7.0 returns>0.14 opencv-python>=4.4.0.46 -torchvision>=0.10.0,<0.15 # the minor version should always match the torch minor version that is installed via NNCF's `pip install nncf[torch]`; TV minor version is torch minor version +1 +torchvision>=0.10.0,<0.16 # the minor version should always match the torch minor version that is installed via NNCF's `pip install nncf[torch]`; TV minor version is torch minor version +1 efficientnet_pytorch diff --git a/examples/torch/semantic_segmentation/README.md b/examples/torch/semantic_segmentation/README.md index d3aff8755eb..17f54beb7e6 100644 --- a/examples/torch/semantic_segmentation/README.md +++ b/examples/torch/semantic_segmentation/README.md @@ -2,7 +2,7 @@ This sample demonstrates DL model compression capabilities for semantic segmentation problem -## Features: +## Features - UNet and ICNet with implementations as close as possible to the original papers - Loaders for CamVid, Cityscapes (20-class), Mapillary Vistas(20-class), Pascal VOC (reuses the loader integrated into torchvision) @@ -17,7 +17,7 @@ At this point it is assumed that you have already installed nncf. You can find i To work with the sample you should install the corresponding Python package dependencies: -``` +```bash pip install -r examples/torch/requirements.txt ``` @@ -25,15 +25,16 @@ pip install -r examples/torch/requirements.txt This scenario demonstrates quantization with fine-tuning of UNet on Mapillary Vistas dataset. -#### Dataset preparation +### Dataset preparation - Obtain a copy of Mapillary Vistas train/val data [here](https://www.mapillary.com/dataset/vistas/) -#### Run semantic segmentation sample +### Run semantic segmentation sample - If you did not install the package then add the repository root folder to the `PYTHONPATH` environment variable - Navigate to the `examples/torch/segmentation` folder - (Optional) Before compressing a model, it is highly recommended checking the accuracy of the pretrained model, use the following command: + ```bash python main.py \ --mode=test \ @@ -43,6 +44,7 @@ This scenario demonstrates quantization with fine-tuning of UNet on Mapillary Vi --batch-size=1 \ --disable-compression ``` + - Run the following command to start compression with fine-tuning on GPUs: `python main.py -m train --config configs/unet_mapillary_int8.json --data --weights ` @@ -56,7 +58,7 @@ It may take a few epochs to get the baseline accuracy results. om scratch. - Use the `--no_strip_on_export` to export not stripped model. -#### Validate your model checkpoint +### Validate your model checkpoint To estimate the test scores of your trained model checkpoint use the following command: `python main.py -m test --config=configs/unet_mapillary_int8.json --resume ` @@ -64,34 +66,15 @@ If you want to validate an FP32 model checkpoint, make sure the compression algo **WARNING**: The samples use `torch.load` functionality for checkpoint loading which, in turn, uses pickle facilities by default which are known to be vulnerable to arbitrary code execution attacks. **Only load the data you trust** -#### Export compressed model +### Export compressed model To export trained model to ONNX format use the following command: `python main.py --mode export --config configs/unet_mapillary_int8.json --data --resume --to-onnx unet_int8.onnx` -#### Export to OpenVINO Intermediate Representation (IR) +### Export to OpenVINO Intermediate Representation (IR) To export a model to OpenVINO IR and run it using Intel Deep Learning Deployment Toolkit please refer to this [tutorial](https://software.intel.com/en-us/openvino-toolkit). - -### Results - -|Model|Compression algorithm|Dataset|mIoU (_drop_) %|NNCF config file|Checkpoint| -| :---: | :---: | :---: | :---: | :---: | :---: | -|UNet|None|CamVid|71.95|[unet_camvid.json](configs/unet_camvid.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/unet_camvid.pth)| -|UNet|INT8|CamVid|71.89 (0.06)|[unet_camvid_int8.json](configs/unet_camvid_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/unet_camvid_int8.pth)| -|UNet|INT8 + Sparsity 60% (Magnitude)|CamVid|72.46 (-0.51)|[unet_camvid_magnitude_sparsity_int8.json](configs/unet_camvid_magnitude_sparsity_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/unet_camvid_magnitude_sparsity_int8.pth)| -|ICNet|None|CamVid|67.89|[icnet_camvid.json](configs/icnet_camvid.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/icnet_camvid.pth)| -|ICNet|INT8|CamVid|67.89 (0.00)|[icnet_camvid_int8.json](configs/icnet_camvid_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/icnet_camvid_int8.pth)| -|ICNet|INT8 + Sparsity 60% (Magnitude)|CamVid|67.16 (0.73)|[icnet_camvid_magnitude_sparsity_int8.json](configs/icnet_camvid_magnitude_sparsity_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/icnet_camvid_magnitude_sparsity_int8.pth)| -|UNet|None|Mapillary|56.24|[unet_mapillary.json](configs/unet_mapillary.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/unet_mapillary.pth)| -|UNet|INT8|Mapillary|56.09 (0.15)|[unet_mapillary_int8.json](configs/unet_mapillary_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/unet_mapillary_int8.pth)| -|UNet|INT8 + Sparsity 60% (Magnitude)|Mapillary|55.69 (0.55)|[unet_mapillary_magnitude_sparsity_int8.json](configs/unet_mapillary_magnitude_sparsity_int8.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/unet_mapillary_magnitude_sparsity_int8.pth)| - - - -#### Results for filter pruning -|Model|Compression algorithm|Dataset|mIoU (_drop_) %|NNCF config file|Checkpoint| -| :---: | :---: | :---: | :---: | :---: | :---: | -|UNet|None|Mapillary|56.24|[unet_mapillary.json](configs/unet_mapillary.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/unet_mapillary.pth)| -|UNet|Filter pruning, 25%, geometric median criterion|Mapillary|55.64 (0.60)|[unet_mapillary_pruning_geometric_median.json](configs/unet_mapillary_pruning_geometric_median.json)|[Link](https://storage.openvinotoolkit.org/repositories/nncf/models/v2.5.0/torch/unet_mapillary_pruning_geometric_median.pth)| +## Results + +Please see compression results for PyTorch semantic segmentation at our [Model Zoo page](../../../docs/ModelZoo.md#pytorch-semantic-segmentation). diff --git a/examples/torch/semantic_segmentation/datasets/__init__.py b/examples/torch/semantic_segmentation/datasets/__init__.py index 60aba6f12dd..e6a42186e03 100644 --- a/examples/torch/semantic_segmentation/datasets/__init__.py +++ b/examples/torch/semantic_segmentation/datasets/__init__.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from .camvid import CamVid from .cityscapes import Cityscapes from .mapillary import Mapillary diff --git a/examples/torch/semantic_segmentation/main.py b/examples/torch/semantic_segmentation/main.py index c97d5b645f4..15f328d630c 100644 --- a/examples/torch/semantic_segmentation/main.py +++ b/examples/torch/semantic_segmentation/main.py @@ -532,6 +532,9 @@ def autoq_test_fn(model, eval_loader): model.to(config.device) + if is_accuracy_aware_training(config) and "train" in config.mode: + uncompressed_model_accuracy = model_eval_fn(model) + resuming_checkpoint = None if resuming_checkpoint_path is not None: resuming_checkpoint = load_resuming_checkpoint(resuming_checkpoint_path) @@ -583,7 +586,9 @@ def configure_optimizers_fn(): optimizer, lr_scheduler = make_optimizer(params_to_optimize, config) return optimizer, lr_scheduler - acc_aware_training_loop = create_accuracy_aware_training_loop(config, compression_ctrl) + acc_aware_training_loop = create_accuracy_aware_training_loop( + config, compression_ctrl, uncompressed_model_accuracy + ) model = acc_aware_training_loop.run( model, train_epoch_fn=train_epoch_fn, diff --git a/examples/torch/semantic_segmentation/metric/__init__.py b/examples/torch/semantic_segmentation/metric/__init__.py index 84b5e836748..a906e4bff4f 100644 --- a/examples/torch/semantic_segmentation/metric/__init__.py +++ b/examples/torch/semantic_segmentation/metric/__init__.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from .confusionmatrix import ConfusionMatrix from .iou import IoU from .metric import Metric diff --git a/nncf/__init__.py b/nncf/__init__.py index 65a47864066..07fac90b908 100644 --- a/nncf/__init__.py +++ b/nncf/__init__.py @@ -11,52 +11,53 @@ """ Neural Network Compression Framework (NNCF) for enhanced OpenVINO™ inference. """ + +from nncf.common.logging import nncf_logger from nncf.common.logging.logger import disable_logging from nncf.common.logging.logger import set_log_level +from nncf.common.strip import strip from nncf.config import NNCFConfig from nncf.data import Dataset from nncf.parameters import DropType from nncf.parameters import ModelType from nncf.parameters import TargetDevice from nncf.quantization import QuantizationPreset +from nncf.quantization import compress_weights from nncf.quantization import quantize from nncf.quantization import quantize_with_accuracy_control +from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters from nncf.scopes import IgnoredScope from nncf.version import __version__ -_LOADED_FRAMEWORKS = {"torch": True, "tensorflow": True, "onnx": True, "openvino": True} # fmt: off +_SUPPORTED_FRAMEWORKS = ["torch", "tensorflow", "onnx", "openvino"] -try: - import torch -except ImportError: - _LOADED_FRAMEWORKS["torch"] = False -try: - import tensorflow as tf -except ImportError: - _LOADED_FRAMEWORKS["tensorflow"] = False +from importlib.util import find_spec as _find_spec # pylint:disable=wrong-import-position +from pathlib import Path as _Path # pylint:disable=wrong-import-position -try: - import onnx -except ImportError: - _LOADED_FRAMEWORKS["onnx"] = False +_AVAILABLE_FRAMEWORKS = {} -try: - import openvino.runtime as ov_runtime -except ImportError: - _LOADED_FRAMEWORKS["openvino"] = False - -from nncf.common.logging import nncf_logger +for fw_name in _SUPPORTED_FRAMEWORKS: + spec = _find_spec(fw_name) + framework_present = False + if spec is not None and spec.origin is not None: + origin_path = _Path(spec.origin) + here = _Path(__file__) + if origin_path not in here.parents: + # if the framework is not present, spec may still be not None because + # it found our nncf.*backend_name* subpackage, and spec.origin will point to a folder in NNCF code + framework_present = True + _AVAILABLE_FRAMEWORKS[fw_name] = framework_present -if not any(_LOADED_FRAMEWORKS.values()): +if not any(_AVAILABLE_FRAMEWORKS.values()): nncf_logger.error( "Neither PyTorch, TensorFlow, ONNX or OpenVINO Python packages have been found in your Python " "environment.\n" "Please install one of the supported frameworks above in order to use NNCF on top of it.\n" - "See the installation guide at https://github.com/openvinotoolkit/nncf#installation for help." + "See the installation guide at https://github.com/openvinotoolkit/nncf#installation-guide for help." ) else: nncf_logger.info( f"NNCF initialized successfully. Supported frameworks detected: " - f"{', '.join([name for name, loaded in _LOADED_FRAMEWORKS.items() if loaded])}" + f"{', '.join([name for name, loaded in _AVAILABLE_FRAMEWORKS.items() if loaded])}" ) diff --git a/nncf/api/compression.py b/nncf/api/compression.py index 6814fb60820..028af95bb0f 100644 --- a/nncf/api/compression.py +++ b/nncf/api/compression.py @@ -260,7 +260,8 @@ def strip(self, do_copy: bool = True) -> TModel: while still preserving the functioning of the model object as a compressed model. :param do_copy: If True (default), will return a copy of the currently associated model object. If False, - will return the currently associated model object "stripped" in-place. + will return the currently associated model object "stripped" in-place. + :return: The stripped model. """ return self.strip_model(self.model, do_copy) diff --git a/nncf/common/accuracy_aware_training/runner.py b/nncf/common/accuracy_aware_training/runner.py index 1e544890c66..884ca5bb92a 100644 --- a/nncf/common/accuracy_aware_training/runner.py +++ b/nncf/common/accuracy_aware_training/runner.py @@ -20,6 +20,7 @@ from nncf.api.compression import CompressionAlgorithmController from nncf.api.compression import CompressionStage from nncf.common.logging import nncf_logger +from nncf.common.plotting import noninteractive_plotting from nncf.common.utils.helpers import configure_accuracy_aware_paths from nncf.common.utils.tensorboard import prepare_for_tensorboard from nncf.config.schemata.defaults import AA_COMPRESSION_RATE_STEP_REDUCTION_FACTOR @@ -486,20 +487,17 @@ def update_training_history(self, compression_rate, metric_value): self._compressed_training_history.append((compression_rate, accuracy_budget)) if IMG_PACKAGES_AVAILABLE: - backend = plt.get_backend() - plt.switch_backend("agg") - plt.ioff() - fig = plt.figure() - plt.plot(self.compressed_training_history.keys(), self.compressed_training_history.values()) - buf = io.BytesIO() - plt.savefig(buf, format="jpeg") - buf.seek(0) - image = PIL.Image.open(buf) - self.add_tensorboard_image( - "compression/accuracy_aware/acc_budget_vs_comp_rate", image, len(self.compressed_training_history) - ) - plt.close(fig) - plt.switch_backend(backend) + with noninteractive_plotting(): + fig = plt.figure() + plt.plot(self.compressed_training_history.keys(), self.compressed_training_history.values()) + buf = io.BytesIO() + plt.savefig(buf, format="jpeg") + buf.seek(0) + image = PIL.Image.open(buf) + self.add_tensorboard_image( + "compression/accuracy_aware/acc_budget_vs_comp_rate", image, len(self.compressed_training_history) + ) + plt.close(fig) @property def compressed_training_history(self): diff --git a/nncf/common/accuracy_aware_training/training_loop.py b/nncf/common/accuracy_aware_training/training_loop.py index 8c456c95062..c9c06f7cb48 100644 --- a/nncf/common/accuracy_aware_training/training_loop.py +++ b/nncf/common/accuracy_aware_training/training_loop.py @@ -30,7 +30,6 @@ from nncf.common.utils.registry import Registry from nncf.config.config import NNCFConfig from nncf.config.extractors import extract_accuracy_aware_training_params -from nncf.config.structures import ModelEvaluationArgs TModel = TypeVar("TModel") TensorboardWriterType = TypeVar("TensorboardWriterType") @@ -392,7 +391,7 @@ def run( ) if prev_compression_rate_step == self.runner.compression_rate_step: nncf_logger.info( - f"Compression rate step value is kept unchanged: " f"{self.runner.compression_rate_step:.3f}" + f"Compression rate step value is kept unchanged: {self.runner.compression_rate_step:.3f}" ) else: nncf_logger.info( @@ -405,7 +404,7 @@ def run( if self.runner.compression_rate_target > self.runner.maximal_compression_rate: self.runner.compression_rate_target = self.runner.maximal_compression_rate nncf_logger.info( - f"Reached maximal possible compression rate: " f"{self.runner.maximal_compression_rate}" + f"Reached maximal possible compression rate: {self.runner.maximal_compression_rate}" ) break @@ -474,7 +473,7 @@ def _run_initial_training_phase(self, model): def _update_target_compression_rate(self, runner, force_update=False): best_accuracy_budget = runner.best_val_metric_value - runner.minimal_tolerable_accuracy nncf_logger.info( - f"Training epoch count: {runner.training_epoch_count}, " f"patience epochs: {runner.patience_epochs}" + f"Training epoch count: {runner.training_epoch_count}, patience epochs: {runner.patience_epochs}" ) if runner.training_epoch_count >= runner.patience_epochs or best_accuracy_budget >= 0.0 or force_update: runner.compression_rate_target += self._determine_compression_rate_step_value(runner) @@ -549,24 +548,19 @@ class AccuracyAwareTrainingMode: def create_accuracy_aware_training_loop( nncf_config: NNCFConfig, compression_ctrl: CompressionAlgorithmController, - uncompressed_model_accuracy: float = None, + uncompressed_model_accuracy: float, **additional_runner_args, ) -> BaseEarlyExitCompressionTrainingLoop: """ Creates an accuracy aware training loop corresponding to NNCFConfig and CompressionAlgorithmController. :param: nncf_config: An instance of the NNCFConfig. :param: compression_ctrl: An instance of CompressionAlgorithmController. - :param: uncompressed_model_accuracy: If provided, will take this as the value of the target accuracy metric for the - original (uncompressed) model for purposes of matching the compressed model metric to this baseline. If not - provided, the uncompressed model accuracy will be measured during this function using `ModelEvaluationArgs.eval_fn` - callable as provided by the nncf_config. + :param: uncompressed_model_accuracy: The value of the target accuracy metric for the original (uncompressed) model + for purposes of matching the compressed model metric to this baseline :return: Accuracy aware training loop. """ accuracy_aware_training_params = extract_accuracy_aware_training_params(nncf_config) accuracy_aware_training_mode = accuracy_aware_training_params.get("mode") - if uncompressed_model_accuracy is None: - eval_fn = nncf_config.get_extra_struct(ModelEvaluationArgs).eval_fn - uncompressed_model_accuracy = eval_fn(compression_ctrl.model) if accuracy_aware_training_mode == AccuracyAwareTrainingMode.EARLY_EXIT: return EarlyExitCompressionTrainingLoop( nncf_config, compression_ctrl, uncompressed_model_accuracy, **additional_runner_args diff --git a/nncf/common/compression.py b/nncf/common/compression.py index 39be2d75d7a..9a11803ead8 100644 --- a/nncf/common/compression.py +++ b/nncf/common/compression.py @@ -25,6 +25,7 @@ from nncf.config.extractors import extract_algo_specific_config from nncf.config.extractors import extract_bn_adaptation_init_params from nncf.config.extractors import has_bn_section +from nncf.config.schemata.defaults import VALIDATE_SCOPES TModel = TypeVar("TModel") @@ -201,8 +202,9 @@ def __init__(self, config: NNCFConfig, should_init: bool = True): self.should_init = should_init self._algo_config = self._get_algo_specific_config_section() - self.ignored_scopes = self.config.get("ignored_scopes") + self.validate_scopes = self._algo_config.get("validate_scopes", VALIDATE_SCOPES) + self.ignored_scopes = self.config.get("ignored_scopes") if "ignored_scopes" in self._algo_config: algo_ignored_scopes = self._algo_config["ignored_scopes"] if self.ignored_scopes is not None: diff --git a/nncf/common/deprecation.py b/nncf/common/deprecation.py index 71328d09f28..8684c2ca3b0 100644 --- a/nncf/common/deprecation.py +++ b/nncf/common/deprecation.py @@ -1,21 +1,20 @@ -"""" - Copyright (c) 2022 Intel Corporation - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import functools import inspect import warnings from typing import Callable, Type, TypeVar -from pkg_resources import parse_version +from packaging import version def warning_deprecated(msg): @@ -38,8 +37,8 @@ def __init__(self, msg: str = None, start_version: str = None, end_version: str :param msg: Custom message to be added after the boilerplate deprecation text. """ self.msg = msg - self.start_version = parse_version(start_version) if start_version is not None else None - self.end_version = parse_version(end_version) if end_version is not None else None + self.start_version = version.parse(start_version) if start_version is not None else None + self.end_version = version.parse(end_version) if end_version is not None else None def __call__(self, fn_or_class: ClassOrFn) -> ClassOrFn: name = fn_or_class.__module__ + "." + fn_or_class.__name__ diff --git a/nncf/common/factory.py b/nncf/common/factory.py index 063b9734f3a..3089a781d17 100644 --- a/nncf/common/factory.py +++ b/nncf/common/factory.py @@ -15,8 +15,12 @@ from nncf.common.graph.graph import NNCFGraph from nncf.common.graph.model_transformer import ModelTransformer from nncf.common.graph.transformations.command_creation import CommandCreator +from nncf.common.tensor_statistics import aggregator from nncf.common.utils.backend import BackendType +from nncf.common.utils.backend import get_available_backends from nncf.common.utils.backend import get_backend +from nncf.common.utils.backend import is_openvino_compiled_model +from nncf.data.dataset import Dataset TModel = TypeVar("TModel") @@ -40,7 +44,7 @@ def create(model: TModel) -> NNCFGraph: return GraphConverter.create_nncf_graph(model) if model_backend == BackendType.TORCH: - return model.nncf.get_original_graph() + return model.nncf.get_graph() raise RuntimeError("Cannot create backend-specific graph because {} is not supported!".format(model_backend)) @@ -63,7 +67,7 @@ def create(model: TModel) -> ModelTransformer: return OVModelTransformer(model) if model_backend == BackendType.TORCH: - from nncf.torch.nncf_network import PTModelTransformer + from nncf.torch.model_transformer import PTModelTransformer return PTModelTransformer(model) raise RuntimeError( @@ -77,9 +81,15 @@ def create(model: TModel) -> Engine: """ Factory method to create backend-specific Engine instance based on the input model. - :param model: backend-specific model instance - :return: backend-specific Engine instance + :param model: backend-specific model instance. + :return: backend-specific Engine instance. """ + available_backends = get_available_backends() + if BackendType.OPENVINO in available_backends and is_openvino_compiled_model(model): + from nncf.openvino.engine import OVCompiledModelEngine + + return OVCompiledModelEngine(model) + model_backend = get_backend(model) if model_backend == BackendType.ONNX: from nncf.onnx.engine import ONNXEngine @@ -113,3 +123,30 @@ def create(model: TModel) -> CommandCreator: raise RuntimeError( "Cannot create backend-specific command creator because {} is not supported!".format(model_backend) ) + + +class StatisticsAggregatorFactory: + @staticmethod + def create(model: TModel, dataset: Dataset) -> aggregator.StatisticsAggregator: + """ + Factory method to create backend-specific `StatisticsAggregator` instance based on the input model. + + :param model: backend-specific model instance + :return: backend-specific `StatisticsAggregator` instance + """ + model_backend = get_backend(model) + if model_backend == BackendType.ONNX: + from nncf.onnx.statistics.aggregator import ONNXStatisticsAggregator + + return ONNXStatisticsAggregator(dataset) + if model_backend == BackendType.OPENVINO: + from nncf.openvino.statistics.aggregator import OVStatisticsAggregator + + return OVStatisticsAggregator(dataset) + if model_backend == BackendType.TORCH: + from nncf.torch.statistics.aggregator import PTStatisticsAggregator + + return PTStatisticsAggregator(dataset) + raise RuntimeError( + "Cannot create backend-specific statistics aggregator because {} is not supported!".format(model_backend) + ) diff --git a/nncf/common/graph/graph.py b/nncf/common/graph/graph.py index aa289b553b7..2d696c9ee2f 100644 --- a/nncf/common/graph/graph.py +++ b/nncf/common/graph/graph.py @@ -10,7 +10,7 @@ # limitations under the License. from collections import defaultdict from copy import deepcopy -from typing import Any, Callable, Dict, Generator, KeysView, List, Tuple, Type, ValuesView +from typing import Any, Callable, Dict, Generator, KeysView, List, Optional, Tuple, Type, ValuesView import networkx as nx import networkx.algorithms.isomorphism as iso @@ -33,51 +33,73 @@ class NNCFNode: Class describing nodes used in NNCFGraph. """ - def __init__(self, node_id: int, node_name: NNCFNodeName, data: dict = None): - self.node_id = node_id - self.data = data if data else {} - self.data[NNCFGraph.NODE_NAME_ATTR] = node_name + ID_NODE_ATTR = "id" + NODE_NAME_ATTR = "node_name" + KEY_NODE_ATTR = "key" + NODE_TYPE_ATTR = "type" + METATYPE_ATTR = "metatype" + LAYER_NAME_ATTR = "layer_name" + LAYER_ATTRIBUTES = "layer_attributes" + IGNORED_ALGOS_ATTR = "ignored_algos" + IS_IN_ITERATION_SCOPE_NODE_ATTR = "is_in_iteration_scope" + IS_INTEGER_INPUT_NODE_ATTR = "is_integer_input" + IS_SHARED_ATTR = "is_shared" + + def __init__(self, attributes: Dict[str, Any]): + self._attributes = attributes + + @property + def attributes(self) -> Dict[str, Any]: + return self._attributes + + @property + def node_id(self) -> int: + return self._attributes[NNCFNode.ID_NODE_ATTR] + + @property + def node_key(self) -> str: + return self._attributes[NNCFNode.KEY_NODE_ATTR] @property def node_name(self) -> NNCFNodeName: - return self.data.get(NNCFGraph.NODE_NAME_ATTR) + return self._attributes[NNCFNode.NODE_NAME_ATTR] @property def metatype(self) -> Type[OperatorMetatype]: - return self.data.get(NNCFGraph.METATYPE_ATTR) + return self._attributes[NNCFNode.METATYPE_ATTR] @property def node_type(self) -> str: - return self.data.get(NNCFGraph.NODE_TYPE_ATTR) + return self._attributes[NNCFNode.NODE_TYPE_ATTR] @property - def layer_name(self) -> LayerName: - return self.data.get(NNCFGraph.LAYER_NAME_ATTR) + def layer_name(self) -> Optional[LayerName]: + return self._attributes.get(NNCFNode.LAYER_NAME_ATTR) @layer_name.setter - def layer_name(self, data: Any) -> None: - self.data[NNCFGraph.LAYER_NAME_ATTR] = data + def layer_name(self, value: str) -> None: + self._attributes[NNCFNode.LAYER_NAME_ATTR] = value @property - def layer_attributes(self) -> BaseLayerAttributes: - return self.data.get(NNCFGraph.LAYER_ATTRIBUTES) + def layer_attributes(self) -> Optional[BaseLayerAttributes]: + return self._attributes.get(NNCFNode.LAYER_ATTRIBUTES) @layer_attributes.setter - def layer_attributes(self, data: Any) -> None: - self.data[NNCFGraph.LAYER_ATTRIBUTES] = data + def layer_attributes(self, value: BaseLayerAttributes) -> None: + self._attributes[NNCFNode.LAYER_ATTRIBUTES] = value @property def ignored_algorithms(self) -> List[str]: - return self.data.get(NNCFGraph.IGNORED_ALGOS_ATTR, []) + return self._attributes[NNCFNode.IGNORED_ALGOS_ATTR] def is_in_iteration_scope(self) -> bool: - return self.data.get(NNCFGraph.IS_IN_ITERATION_SCOPE_NODE_ATTR, False) + return self._attributes[NNCFNode.IS_IN_ITERATION_SCOPE_NODE_ATTR] def is_integer_input(self) -> bool: - return self.data.get(NNCFGraph.IS_INTEGER_INPUT_NODE_ATTR, False) + return self._attributes[NNCFNode.IS_INTEGER_INPUT_NODE_ATTR] def is_shared(self) -> bool: - return self.data.get(NNCFGraph.IS_SHARED_ATTR, False) + return self._attributes[NNCFNode.IS_SHARED_ATTR] def __repr__(self): return str(self) @@ -89,13 +111,7 @@ def __hash__(self): return hash(str(self)) def __eq__(self, other): - return ( - isinstance(other, NNCFNode) - and self.node_id == other.node_id - and self.data == other.data - and self.node_type == other.node_type - and self.layer_attributes == other.layer_attributes - ) + return isinstance(other, NNCFNode) and self.attributes == other.attributes class NNCFGraphEdge: @@ -113,6 +129,7 @@ def __init__( output_port_id: int, tensor_shape: List[int], dtype: Dtype, + parallel_input_port_ids: List[int], ): """ :param from_node: An NNCFNode that sources the directed edge. @@ -128,6 +145,7 @@ def __init__( self.output_port_id = output_port_id self.tensor_shape = tensor_shape self.dtype = dtype + self.parallel_input_port_ids = parallel_input_port_ids def __str__(self): return str(self.from_node) + " -> " + str(self.tensor_shape) + " -> " + str(self.to_node) @@ -160,31 +178,26 @@ class NNCFGraph: providing some useful methods for graph traversal. """ - ID_NODE_ATTR = "id" - KEY_NODE_ATTR = "key" - NODE_NAME_ATTR = "node_name" - NODE_TYPE_ATTR = "type" - METATYPE_ATTR = "metatype" - LAYER_NAME_ATTR = "layer_name" - LAYER_ATTRIBUTES = "layer_attributes" ACTIVATION_SHAPE_EDGE_ATTR = "activation_shape" INPUT_PORT_ID_EDGE_ATTR = "input_port_id" OUTPUT_PORT_ID_EDGE_ATTR = "output_port_id" - IGNORED_ALGOS_ATTR = "ignored_algos" - IS_IN_ITERATION_SCOPE_NODE_ATTR = "is_in_iteration_scope" - IS_INTEGER_INPUT_NODE_ATTR = "is_integer_input" DTYPE_EDGE_ATTR = "dtype" - IS_SHARED_ATTR = "is_shared" + PARALLEL_INPUT_PORT_IDS_ATTR = "parallel_input_ports" def __init__(self): self._nx_graph = nx.DiGraph() self._node_id_to_key_dict = {} + self._nodes: Dict[str, NNCFNode] = {} self._input_nncf_nodes = {} # type: Dict[int, NNCFNode] self._output_nncf_nodes = {} # type: Dict[int, NNCFNode] self._node_ids_vs_layer_names = {} # type: Dict[int, LayerName] self._layer_name_vs_shared_nodes = defaultdict(list) # type: Dict[LayerName, List[NNCFNode]] + @property + def nodes(self) -> Dict[str, NNCFNode]: + return self._nodes + def get_node_by_id(self, node_id: int) -> NNCFNode: """ :param node_id: Id of the node. @@ -197,7 +210,7 @@ def get_node_by_key(self, key: str) -> NNCFNode: :param key: key (node_name) of the node. :return: NNCFNode in a graph with such key. """ - return self._nx_node_to_nncf_node(self._nx_graph.nodes[key]) + return self._nodes[key] def get_input_nodes(self) -> List[NNCFNode]: """ @@ -251,12 +264,7 @@ def get_all_nodes(self) -> List[NNCFNode]: """ Returns list of all graph nodes. """ - all_nodes = [] - for node_key in self.get_all_node_keys(): - nx_node = self._nx_graph.nodes[node_key] - nncf_node = self._nx_node_to_nncf_node(nx_node) - all_nodes.append(nncf_node) - return all_nodes + return list(self._nodes.values()) def get_all_simple_paths( self, start_node_name: NNCFNodeName, end_node_name: NNCFNodeName @@ -276,12 +284,6 @@ def get_all_simple_paths( end_node_key = self.get_node_key_by_id(end_node.node_id) return nx.all_simple_paths(self._nx_graph, start_node_key, end_node_key) - @staticmethod - def _nx_node_to_nncf_node(nx_node: dict) -> NNCFNode: - return NNCFNode( - node_id=nx_node[NNCFGraph.ID_NODE_ATTR], node_name=nx_node[NNCFGraph.NODE_NAME_ATTR], data=nx_node - ) - @staticmethod def _get_edge_boundaries( match: List[str], graph: nx.DiGraph @@ -308,7 +310,7 @@ def get_next_nodes(self, node: NNCFNode) -> List[NNCFNode]: :return: List of consumer nodes of provided node. """ nx_node_keys = self._nx_graph.succ[self._node_id_to_key_dict[node.node_id]] - return [self._nx_node_to_nncf_node(self._nx_graph.nodes[key]) for key in nx_node_keys] + return [self._nodes[key] for key in nx_node_keys] def get_previous_nodes(self, node: NNCFNode) -> List[NNCFNode]: """ @@ -319,7 +321,7 @@ def get_previous_nodes(self, node: NNCFNode) -> List[NNCFNode]: """ nx_node_keys = self._nx_graph.pred[self._node_id_to_key_dict[node.node_id]] - return [self._nx_node_to_nncf_node(self._nx_graph.nodes[key]) for key in nx_node_keys] + return [self._nodes[key] for key in nx_node_keys] def get_input_edges(self, node: NNCFNode) -> List[NNCFGraphEdge]: """ @@ -380,10 +382,10 @@ def add_nncf_node( node_name: str, node_type: str, node_metatype: Type[OperatorMetatype], - layer_attributes: BaseLayerAttributes = None, - node_id_override: int = None, - layer_name: LayerName = None, - ignored_algorithms: List[str] = None, + layer_attributes: Optional[BaseLayerAttributes] = None, + node_id_override: Optional[int] = None, + layer_name: Optional[LayerName] = None, + ignored_algorithms: Optional[List[str]] = None, is_in_iteration_scope: bool = False, is_integer_input: bool = False, is_shared: bool = False, @@ -430,25 +432,26 @@ def add_nncf_node( self._node_id_to_key_dict[node_id] = node_key attrs = { - NNCFGraph.ID_NODE_ATTR: node_id, - NNCFGraph.NODE_NAME_ATTR: node_name, - NNCFGraph.KEY_NODE_ATTR: node_key, - NNCFGraph.NODE_TYPE_ATTR: node_type, - NNCFGraph.LAYER_NAME_ATTR: layer_name, - NNCFGraph.METATYPE_ATTR: node_metatype, - NNCFGraph.IS_SHARED_ATTR: is_shared, - NNCFGraph.IS_IN_ITERATION_SCOPE_NODE_ATTR: is_in_iteration_scope, - NNCFGraph.IS_INTEGER_INPUT_NODE_ATTR: is_integer_input, + NNCFNode.ID_NODE_ATTR: node_id, + NNCFNode.NODE_NAME_ATTR: node_name, + NNCFNode.KEY_NODE_ATTR: node_key, + NNCFNode.NODE_TYPE_ATTR: node_type, + NNCFNode.LAYER_NAME_ATTR: layer_name, + NNCFNode.METATYPE_ATTR: node_metatype, + NNCFNode.IS_SHARED_ATTR: is_shared, + NNCFNode.IS_IN_ITERATION_SCOPE_NODE_ATTR: is_in_iteration_scope, + NNCFNode.IS_INTEGER_INPUT_NODE_ATTR: is_integer_input, } if layer_attributes is not None: - attrs[NNCFGraph.LAYER_ATTRIBUTES] = layer_attributes + attrs[NNCFNode.LAYER_ATTRIBUTES] = layer_attributes if ignored_algorithms is None: ignored_algorithms = [] - attrs[NNCFGraph.IGNORED_ALGOS_ATTR] = ignored_algorithms + attrs[NNCFNode.IGNORED_ALGOS_ATTR] = ignored_algorithms self._nx_graph.add_node(node_key, **attrs) - node = NNCFNode(node_id, node_name, data=self._nx_graph.nodes[node_key]) + node = NNCFNode(self._nx_graph.nodes[node_key]) + self._nodes[node_key] = node if node.metatype in INPUT_NOOP_METATYPES: self._input_nncf_nodes[node_id] = node @@ -470,6 +473,7 @@ def add_edge_between_nncf_nodes( input_port_id: int, output_port_id: int, dtype: Dtype, + parallel_input_port_ids: Optional[List[int]] = None, ): """ Adds a directed edge between two `NNCFNode`s that are already present in the graph. @@ -482,6 +486,7 @@ def add_edge_between_nncf_nodes( :param output_port_id: Specifies the index among the possible outputs of the `from_node_id` node' that this tensor should correspond to. :param dtype: The data type of the tensor. + :param parallel_input_port_ids: Input ports for parallel edges, if any should be present for this edge. """ from_node_key = self._node_id_to_key_dict[from_node_id] to_node_key = self._node_id_to_key_dict[to_node_id] @@ -505,6 +510,7 @@ def add_edge_between_nncf_nodes( NNCFGraph.INPUT_PORT_ID_EDGE_ATTR: input_port_id, NNCFGraph.OUTPUT_PORT_ID_EDGE_ATTR: output_port_id, NNCFGraph.DTYPE_EDGE_ATTR: dtype, + NNCFGraph.PARALLEL_INPUT_PORT_IDS_ATTR: [] if parallel_input_port_ids is None else parallel_input_port_ids, } self._nx_graph.add_edge(from_node_key, to_node_key, **attrs) @@ -513,9 +519,9 @@ def topological_sort(self) -> List[NNCFNode]: Returns nodes in topologically sorted order, additionally sorted in ascending node ID order. """ return [ - self._nx_node_to_nncf_node(self._nx_graph.nodes[node_name]) + self._nodes[node_name] for node_name in nx.lexicographical_topological_sort( - self._nx_graph, key=lambda x: self._nx_graph.nodes[x][NNCFGraph.ID_NODE_ATTR] + self._nx_graph, key=lambda x: self._nx_graph.nodes[x][NNCFNode.ID_NODE_ATTR] ) ] @@ -542,7 +548,7 @@ def get_graph_for_structure_analysis(self, extended: bool = False) -> nx.DiGraph out_graph = nx.DiGraph() for node_name, node in self._nx_graph.nodes.items(): visualization_node_name = node_name.replace(__RESERVED_DOT_CHARACTER, __CHARACTER_REPLACE_TO) - attrs_node = {"id": node[NNCFGraph.ID_NODE_ATTR], "type": node[NNCFGraph.NODE_TYPE_ATTR]} + attrs_node = {"id": node[NNCFNode.ID_NODE_ATTR], "type": node[NNCFNode.NODE_TYPE_ATTR]} for attr in ["color", "label", "style"]: if attr in node: attrs_node[attr] = node[attr] @@ -612,7 +618,7 @@ def get_node_by_name(self, name: NNCFNodeName) -> NNCFNode: def __eq__(self, other: "NNCFGraph"): nm = iso.categorical_node_match( - [NNCFGraph.ID_NODE_ATTR, NNCFGraph.KEY_NODE_ATTR, NNCFGraph.LAYER_ATTRIBUTES], [None, None, None] + [NNCFNode.ID_NODE_ATTR, NNCFNode.KEY_NODE_ATTR, NNCFNode.LAYER_ATTRIBUTES], [None, None, None] ) em = iso.categorical_edge_match( [NNCFGraph.ACTIVATION_SHAPE_EDGE_ATTR, NNCFGraph.INPUT_PORT_ID_EDGE_ATTR], [None, None] @@ -643,12 +649,13 @@ def get_nncf_graph_pattern_io(self, match: List[str]) -> NNCFGraphPatternIO: to_node_key = nx_edge[1] data = nx_edge[2] nncf_edge = NNCFGraphEdge( - self._nx_node_to_nncf_node(self._nx_graph.nodes[from_node_key]), - self._nx_node_to_nncf_node(self._nx_graph.nodes[to_node_key]), + self._nodes[from_node_key], + self._nodes[to_node_key], input_port_id=data[NNCFGraph.INPUT_PORT_ID_EDGE_ATTR], output_port_id=data[NNCFGraph.OUTPUT_PORT_ID_EDGE_ATTR], tensor_shape=data[NNCFGraph.ACTIVATION_SHAPE_EDGE_ATTR], dtype=data[NNCFGraph.DTYPE_EDGE_ATTR], + parallel_input_port_ids=data[NNCFGraph.PARALLEL_INPUT_PORT_IDS_ATTR], ) if from_node_key in match: output_nncf_edges.append(nncf_edge) @@ -683,6 +690,7 @@ def get_edge(self, from_node: NNCFNode, to_node: NNCFNode) -> NNCFGraphEdge: data[NNCFGraph.OUTPUT_PORT_ID_EDGE_ATTR], data[NNCFGraph.ACTIVATION_SHAPE_EDGE_ATTR], data[NNCFGraph.DTYPE_EDGE_ATTR], + data[NNCFGraph.PARALLEL_INPUT_PORT_IDS_ATTR], ) def get_all_edges(self) -> Generator[NNCFGraphEdge, None, None]: @@ -697,23 +705,28 @@ def remove_nodes_from(self, nodes: List[NNCFNode]) -> None: :param nodes: List of NNCFNodes to remove. """ for node in nodes: - self._nx_graph.remove_node(node.data["key"]) + self._nx_graph.remove_node(node.node_key) + del self._nodes[node.node_key] self._node_id_to_key_dict = {} for node_key, node in self._nx_graph.nodes.items(): self._node_id_to_key_dict[node["id"]] = node_key - def find_matching_nodes(self, patterns: GraphPattern) -> List[NNCFNode]: + def find_matching_subgraphs(self, patterns: GraphPattern, strict: bool = True) -> List[List[NNCFNode]]: """ - Returns nodes of matched pattern in patterns. + Returns subgraphs of matched pattern in patterns. :param patterns: Instance of GraphPattern containing all patterns. - :return: Nodes that are matched patterns. + :param strict: If True returns only strict matched subgraphs, if False - all matched subgraphs. + :return: List of subgraphs that are matching by pattern matching. + Subgraph is a ordered list of nodes of matched subgraph The returned nodes order relies on DiGraphMatcher isomorphic subgraphs matching logic from networkX package. DiGraphMatcher does not guarantee a specific order for returning isomorphic subgraphs. """ output = [] - for matched_subgraph in find_subgraphs_matching_pattern(self._nx_graph, patterns): + for matched_subgraph in find_subgraphs_matching_pattern(self._nx_graph, patterns, strict): + subgraph_list = [] for node_key in matched_subgraph: - output.append(self.get_node_by_key(node_key)) + subgraph_list.append(self.get_node_by_key(node_key)) + output.append(subgraph_list) return output diff --git a/nncf/common/graph/graph_matching.py b/nncf/common/graph/graph_matching.py index c1940926b1b..1e8991fed99 100644 --- a/nncf/common/graph/graph_matching.py +++ b/nncf/common/graph/graph_matching.py @@ -8,8 +8,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -from typing import List, Set +from typing import Dict, List import networkx as nx import networkx.algorithms.isomorphism as ism @@ -17,13 +16,48 @@ from nncf.common.graph.patterns import GraphPattern -def is_subgraph_has_inner_outgoing_edges( - graph: nx.DiGraph, full_subgraph_with_non_pattern_nodes: List[str], pattern_subgraph: List[str] -) -> bool: +def _are_nodes_matched(node_1, node_2) -> bool: + for attr in node_2: + if attr == GraphPattern.LABEL_ATTR: + continue + if attr == GraphPattern.METATYPE_ATTR: + # GraphPattern.ANY_PATTERN_NODE_TYPE and GraphPattern.NON_PATTERN_NODE_TYPE + # are matched to any node type. + if GraphPattern.ANY_PATTERN_NODE_TYPE in node_2[attr] or GraphPattern.NON_PATTERN_NODE_TYPE in node_2[attr]: + continue + # Torch and TF pattern mapping based on 'type' section, + # While ONNX mapping based on metatypes - + # to support all of them, we need to check the existane of the attributes + if GraphPattern.NODE_TYPE_ATTR in node_1: + if node_1[GraphPattern.NODE_TYPE_ATTR] in node_2[attr]: + continue + if node_1[attr] not in node_2[attr]: + return False + return True + + +def _sort_patterns_by_len(pattern: nx.DiGraph) -> int: """ - Checks out whether the 'pattern_subgraph' has outgoing edges, - that aren't connected with nodes from full_subgraph_with_non_pattern_nodes. + Sort patterns by their length. GraphPattern.NON_PATTERN_NODE_TYPE is not counted as a pattern node. + """ + non_pattern_nodes = [ + node_id + for node_id, node_data in pattern.nodes(data=True) + if GraphPattern.NON_PATTERN_NODE_TYPE in node_data.get(GraphPattern.METATYPE_ATTR, []) + ] + return len(pattern) - len(non_pattern_nodes) + + +def _is_subgraph_matching_strict(graph: nx.DiGraph, pattern: nx.DiGraph, subgraph: Dict[str, str]) -> bool: + """ + Checks out whether the matched subgraph has: + 1) External predecessors of starting nodes. + 2) External successors of the last nodes. + 3) External successors or predecessors of the nodes which are not starting and last. + If any of these conditions is True, than returns False, otherwise - True. + The checks are skipped for NON_PATTERN_NODE_TYPE. Example: + This subgraph matching is not strict. (conv2d + BN + ReLU pattern): ... | @@ -37,119 +71,83 @@ def is_subgraph_has_inner_outgoing_edges( | ... :param graph: The model graph. - :param full_subgraph_with_non_pattern_nodes: A subgraph of the model graph including the nodes outside the pattern. - :param pattern_subgraph: A subgraph of the model. - :return: True if the subgraph contains outgoing edges starting not from the last node, - False - otherwise. - """ - first_node = pattern_subgraph[0] - last_node = pattern_subgraph[-1] - for node_key in pattern_subgraph: - if node_key == last_node: - predecessors = list(graph.pred[node_key].keys()) - if any(predecessor not in full_subgraph_with_non_pattern_nodes for predecessor in predecessors): - return True - elif node_key == first_node: - successors = list(graph.succ[node_key].keys()) - if any(successor not in full_subgraph_with_non_pattern_nodes for successor in successors): - return True - else: - successors = list(graph.succ[node_key].keys()) - predecessors = list(graph.pred[node_key].keys()) - if any(successors_key not in full_subgraph_with_non_pattern_nodes for successors_key in successors): - return True - if any(predecessor not in full_subgraph_with_non_pattern_nodes for predecessor in predecessors): - return True - return False - - -def find_subgraphs_matching_pattern(graph: nx.DiGraph, pattern_graph: GraphPattern) -> List[List[str]]: + :param pattern: The matched pattern. + :param subgraph: A subgraph of the model graph including the nodes outside the pattern. + :return: If any of three conditions is True than returns False, otherwise - True. """ - Find a list of subgraphs for the particular graph that match the pattern expression. - :param graph: The model graph. - :param pattern_graph: A graph consists of patterns for layer fusing logic. - :return: A list of subgraphs, matching the pattern expression. - Each subgraph is defined as a list of node keys. + starting_nodes = [] + last_nodes = [] + for node in pattern.nodes: + if not pattern.pred[node] and pattern.succ[node]: + starting_nodes.append(node) + if pattern.pred[node] and not pattern.succ[node]: + last_nodes.append(node) + + for node_from_graph, node_from_pattern in subgraph.items(): + if GraphPattern.NON_PATTERN_NODE_TYPE in pattern.nodes[node_from_pattern].get(GraphPattern.METATYPE_ATTR, []): + continue + predecessors_keys = graph.pred[node_from_graph].keys() + successor_keys = graph.succ[node_from_graph].keys() + has_external_successors = any(successor_key not in subgraph for successor_key in successor_keys) + has_external_predcessors = any(predecessor_key not in subgraph for predecessor_key in predecessors_keys) + if node_from_pattern in starting_nodes and has_external_successors: + return False + if node_from_pattern in last_nodes and has_external_predcessors: + return False + if (node_from_pattern not in last_nodes and node_from_pattern not in starting_nodes) and ( + has_external_successors or has_external_predcessors + ): + return False + return True + + +def _copy_subgraph_excluding_non_pattern_node(subgraph: Dict[str, str], pattern_graph: GraphPattern) -> Dict[str, str]: """ + Copies a matching subgraph excluding the nodes having GraphPattern.NON_PATTERN_NODE_TYPE. - def are_nodes_matching(node_1, node_2): - for attr in node_2: - if attr == GraphPattern.LABEL_ATTR: - continue - if attr == GraphPattern.METATYPE_ATTR: - # GraphPattern.ANY_PATTERN_NODE_TYPE and GraphPattern.NON_PATTERN_NODE_TYPE - # are matched to any node type. - - if ( - GraphPattern.ANY_PATTERN_NODE_TYPE in node_2[attr] - or GraphPattern.NON_PATTERN_NODE_TYPE in node_2[attr] - ): - continue - # Torch and TF pattern mapping based on 'type' section, - # While ONNX mapping based on metatypes - - # to support all of them, we need to check the existane of the attributes - if GraphPattern.NODE_TYPE_ATTR in node_1: - if node_1[GraphPattern.NODE_TYPE_ATTR] in node_2[attr]: - continue - if node_1[attr] not in node_2[attr]: - return False - return True - - def are_edges_matching(edge_1, edge_2): - for attr in edge_2: - if edge_1[attr] not in edge_2[attr]: - return False - return True - - subgraphs = [] # type: List[List[str]] - visited_nodes = set() # type: Set[str] - patterns = [] # type: List[nx.DiGraph] - for c in nx.weakly_connected_components(pattern_graph.graph): - patterns.append(pattern_graph.graph.subgraph(c)) - - def sort_patterns(pattern: nx.DiGraph): - """ - Sort patterns by their length, - keeping in mind that if node type is GraphPattern.NON_PATTERN_NODE_TYPE it shouldn't count. - """ - pattern_len = len(pattern) - for node in pattern.nodes: - if GraphPattern.NON_PATTERN_NODE_TYPE in pattern_graph.graph.nodes.get(node)[GraphPattern.METATYPE_ATTR]: - pattern_len -= 1 - return pattern_len - - # Get all patterns sorted by their lengths - # as we want match the longest patterns first - - patterns = sorted(patterns, key=sort_patterns, reverse=True) + :param subgraph: Subgraph + :param pattern_graph: A graph consists of patterns to match. + :return: New subgraph without excluded nodes. + """ + output = {} + for node_from_graph, node_from_pattern in subgraph.items(): + pattern_node = pattern_graph.graph.nodes[node_from_pattern] + pattern_node_types = pattern_node.get(GraphPattern.METATYPE_ATTR, []) + if GraphPattern.NON_PATTERN_NODE_TYPE not in pattern_node_types: + output[node_from_graph] = node_from_pattern + return output + + +def find_subgraphs_matching_pattern( + graph: nx.DiGraph, pattern_graph: GraphPattern, strict: bool = True +) -> List[List[str]]: + """ + Finds a list of nodes which define a subgraph matched a pattern in pattern_graph. + Nodes in each subgraph is stored in lexicographical_topological_sort. + :param graph: The model graph. + :param pattern_graph: A graph consists of patterns to match. + :param strict: If True returns only strict matched subgraphs, if False - all matched subgraphs. + :return: A list of subgraphs are matched to the patterns. Each subgraph is defined as a list of node keys. + """ + subgraphs = [] + matched_nodes = set() + patterns = pattern_graph.get_weakly_connected_subgraphs() + patterns = sorted(patterns, key=_sort_patterns_by_len, reverse=True) for pattern in patterns: - matcher = ism.DiGraphMatcher(graph, pattern, node_match=are_nodes_matching, edge_match=are_edges_matching) + matcher = ism.DiGraphMatcher(graph, pattern, node_match=_are_nodes_matched) for subgraph in matcher.subgraph_isomorphisms_iter(): - # Bottleneck that need to sort by id for result consistency - pattern_subgraph = list( - nx.lexicographical_topological_sort(graph.subgraph(subgraph), key=lambda x: int(x.split()[0])) - ) - - full_subgraph_with_non_pattern_nodes = pattern_subgraph[:] - outside_pattern_nodes = [] - - # If some nodes are outside the pattern - remove them from pattern_subgraph - - for node, pattern_node_id in matcher.mapping.items(): - pattern_node = pattern_graph.graph.nodes[pattern_node_id] - pattern_node_types = pattern_node.get(GraphPattern.METATYPE_ATTR) - if GraphPattern.NON_PATTERN_NODE_TYPE in pattern_node_types: - outside_pattern_nodes.append(node) - for node in outside_pattern_nodes: - pattern_subgraph.remove(node) - - is_visited_node = any(node in visited_nodes for node in pattern_subgraph) - if is_visited_node: + if strict and not _is_subgraph_matching_strict(graph, pattern, subgraph): continue - if is_subgraph_has_inner_outgoing_edges(graph, full_subgraph_with_non_pattern_nodes, pattern_subgraph): + + subgraph = _copy_subgraph_excluding_non_pattern_node(subgraph, pattern_graph) + is_any_node_matched = any(node in matched_nodes for node in subgraph) + + if is_any_node_matched: continue - visited_nodes.update(pattern_subgraph) - subgraphs.append(pattern_subgraph) - return subgraphs if subgraphs else [] + matched_nodes.update(subgraph) + sorted_nodes_subgraph = list(nx.lexicographical_topological_sort(graph.subgraph(subgraph))) + subgraphs.append(sorted_nodes_subgraph) + + return subgraphs diff --git a/nncf/common/graph/layer_attributes.py b/nncf/common/graph/layer_attributes.py index 9f1ba0ece2d..ce934c23b8c 100644 --- a/nncf/common/graph/layer_attributes.py +++ b/nncf/common/graph/layer_attributes.py @@ -8,6 +8,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + from abc import ABC from abc import abstractmethod from dataclasses import dataclass @@ -26,6 +27,9 @@ class BaseLayerAttributes(ABC): of modules/layers. """ + def __eq__(self, __o: object) -> bool: + return isinstance(__o, self.__class__) and self.__dict__ == __o.__dict__ + class MultipleInputLayerAttributes(BaseLayerAttributes): def __init__(self, axis: int): @@ -35,9 +39,6 @@ def __init__(self, axis: int): """ self.axis = axis - def __eq__(self, other: Any): - return isinstance(other, MultipleInputLayerAttributes) and self.axis == other.axis - class MultipleOutputLayerAttributes(BaseLayerAttributes): def __init__(self, chunks: Union[int, List], axis: int): @@ -49,25 +50,19 @@ def __init__(self, chunks: Union[int, List], axis: int): self.chunks = chunks self.axis = axis - def __eq__(self, other: Any): - return ( - isinstance(other, MultipleOutputLayerAttributes) and self.chunks == other.chunks and self.axis == other.axis - ) - class WeightedLayerAttributes(BaseLayerAttributes): - def __init__(self, weight_requires_grad: bool, dtype: Dtype = Dtype.FLOAT): + def __init__(self, weight_requires_grad: bool, dtype: Dtype = Dtype.FLOAT, with_bias: bool = False): """ :param weight_requires_grad: Is True if gradients need to be computed for the corresponding Tensor, False otherwise. :param dtype: is an object that represents the type of data. + :param with_bias: Operation include bias. """ self.weight_requires_grad = weight_requires_grad self.dtype = dtype - - def __eq__(self, other: Any): - return isinstance(other, WeightedLayerAttributes) and self.weight_requires_grad == other.weight_requires_grad + self.with_bias = with_bias @abstractmethod def get_weight_shape(self) -> List[int]: @@ -88,7 +83,13 @@ class GenericWeightedLayerAttributes(WeightedLayerAttributes): of the exact meaning of the weight indices. """ - def __init__(self, weight_requires_grad: bool, weight_shape: List[int], filter_dimension_idx: int = 0): + def __init__( + self, + weight_requires_grad: bool, + weight_shape: List[int], + filter_dimension_idx: int = 0, + with_bias: bool = False, + ): """ :param weight_requires_grad: Is True if gradients need to be computed for the corresponding Tensor, @@ -96,7 +97,7 @@ def __init__(self, weight_requires_grad: bool, weight_shape: List[int], filter_d :param weight_shape: shape of weight tensor. :param filter_dimension_idx: the axis along which the filters are stored. """ - super().__init__(weight_requires_grad) + super().__init__(weight_requires_grad=weight_requires_grad, with_bias=with_bias) self.weight_shape = weight_shape self.filter_dimension_idx = filter_dimension_idx @@ -108,25 +109,23 @@ def get_target_dim_for_compression(self) -> int: class LinearLayerAttributes(WeightedLayerAttributes): - def __init__(self, weight_requires_grad: bool, in_features: int, out_features: int, bias: bool = True): + def __init__(self, weight_requires_grad: bool, in_features: int, out_features: int, with_bias: bool = True): """ :param weight_requires_grad: Is True if gradients need to be computed for the corresponding Tensor, False otherwise. :param in_features: number of input channels in the layer's input. :param out_features: number of channels produced by the layer. - :param bias: If set to ``False``, the layer doesn't learn an additive bias. """ - super().__init__(weight_requires_grad) + super().__init__(weight_requires_grad, with_bias=with_bias) self.in_features = in_features self.out_features = out_features - self.bias = bias def get_weight_shape(self) -> List[int]: return [self.out_features, self.in_features] def get_bias_shape(self) -> int: - return self.out_features if self.bias is True else 0 + return self.out_features if self.with_bias is True else 0 def get_target_dim_for_compression(self) -> int: return 0 @@ -140,9 +139,11 @@ def __init__( out_channels: int, kernel_size: Tuple[int, ...], stride: Tuple[int, ...], + dilations: Tuple[int, ...], groups: int, transpose: bool, padding_values: Tuple[int, ...], + with_bias: bool = False, ): """ @@ -155,28 +156,18 @@ def __init__( :param groups: number of blocked connections from input channels to output channels. :param transpose: If set to `True`, the layer is an ordinary convolution, otherwise - transpose one. :param padding_values: defines the amount of padding applied to the layer's input. + :param with_bias: Operation include bias. """ - super().__init__(weight_requires_grad) + super().__init__(weight_requires_grad=weight_requires_grad, with_bias=with_bias) self.in_channels = in_channels self.out_channels = out_channels self.kernel_size = kernel_size self.stride = stride + self.dilations = dilations self.groups = groups self.transpose = transpose self.padding_values = padding_values - def __eq__(self, other: Any): - return ( - isinstance(other, ConvolutionLayerAttributes) - and super().__eq__(other) - and self.in_channels == other.in_channels - and self.out_channels == other.out_channels - and self.kernel_size == other.kernel_size - and self.stride == other.stride - and self.groups == other.groups - and self.transpose == other.transpose - ) - def get_weight_shape(self) -> List[int]: if not self.transpose: return [self.out_channels, self.in_channels // self.groups, *self.kernel_size] @@ -202,14 +193,6 @@ def __init__(self, weight_requires_grad: bool, num_channels: int, num_groups: in self.num_channels = num_channels self.num_groups = num_groups - def __eq__(self, other: Any): - return ( - isinstance(other, GroupNormLayerAttributes) - and super().__eq__(other) - and self.num_channels == other.num_channels - and self.num_groups == other.num_groups - ) - def get_weight_shape(self) -> List[int]: return [self.num_channels] @@ -238,14 +221,6 @@ class TransposeLayerAttributes(BaseLayerAttributes): dim0: int dim1: int - def __eq__(self, other: Any) -> bool: - return ( - isinstance(other, TransposeLayerAttributes) - and super().__eq__(other) - and self.dim0 == other.dim0 - and self.dim1 == other.dim1 - ) - @dataclass class PermuteLayerAttributes(BaseLayerAttributes): @@ -253,15 +228,7 @@ class PermuteLayerAttributes(BaseLayerAttributes): :param permutation: the desired ordering of dimensions. """ - permutation: List[int] - - def __eq__(self, other: Any) -> bool: - return ( - isinstance(other, PermuteLayerAttributes) - and super().__eq__(other) - and len(self.permutation) == len(other.permutation) - and (l == r for l, r in zip(self.permutation, other.permutation)) - ) + permutation: Tuple[int, ...] @dataclass @@ -282,3 +249,14 @@ class PadLayerAttributes(BaseLayerAttributes): mode: str = "constant" value: float = 0 + + +@dataclass +class ConvertDtypeLayerAttributes(BaseLayerAttributes): + """ + :param src_dtype: node input data type. + :param dst_dtype: node output data type. + """ + + src_dtype: Any + dst_dtype: Any diff --git a/nncf/common/graph/operator_metatypes.py b/nncf/common/graph/operator_metatypes.py index 6a8f1d7b64f..305a3b66668 100644 --- a/nncf/common/graph/operator_metatypes.py +++ b/nncf/common/graph/operator_metatypes.py @@ -22,6 +22,7 @@ class OperatorMetatype: :param name: The name of the operator. :param hw_config_names: The names of the hardware configurations. :param output_channel_axis: The axis along which the output channels of the operator are arranged. + :param ignored_input_ports: Input ports of the operations that should not be considered for purposes of compression. """ name: str = "" diff --git a/nncf/common/graph/patterns/patterns.py b/nncf/common/graph/patterns/patterns.py index 1e3a71565e7..ae39925ad8d 100644 --- a/nncf/common/graph/patterns/patterns.py +++ b/nncf/common/graph/patterns/patterns.py @@ -291,12 +291,11 @@ class HWFusedPatternNames(Enum): # BLOCK PATTERNS ADD_SCALE_SHIFT_OUTPUT = PatternDesc("add_scale_shift_output") BATCH_INDEX = PatternDesc("batch_index") - EQUAL_LOGICALNOT = PatternDesc("equal_logicalnot") - FC_BN_HSWISH_ACTIVATION = PatternDesc("fc_bn_hswish_activation") LINEAR_WITH_BIAS = PatternDesc("linear_with_bias") MVN_SCALE_SHIFT = PatternDesc("mvn_scale_shift") NORMALIZE_L2_MULTIPLY = PatternDesc("normalize_l2_multiply") SCALE_SHIFT = PatternDesc("scale_shift") + SHIFT_SCALE = PatternDesc("shift_scale") SE_BLOCK = PatternDesc("se_block") SOFTMAX_DIV = PatternDesc("softmax_div") @@ -340,12 +339,15 @@ class HWFusedPatternNames(Enum): LINEAR_ACTIVATIONS_SCALE_SHIFT = PatternDesc("linear_activations_scale_shift") LINEAR_ARITHMETIC = PatternDesc("linear_arithmetic") LINEAR_ARITHMETIC_ACTIVATIONS = PatternDesc("linear_arithmetic_activations") + # Found in PicoDet models + LINEAR_ARITHMETIC_ACTIVATIONS_ARITHMETIC = PatternDesc("linear_arithmetic_activations_arithmetic") LINEAR_BATCH_NORM = PatternDesc("linear_batch_norm") LINEAR_BATCH_NORM_ACTIVATIONS = PatternDesc("linear_batch_norm_activations") LINEAR_BATCH_NORM_SCALE_SHIFT_ACTIVATIONS = PatternDesc("linear_batch_norm_scale_shift_activations") LINEAR_SCALE_SHIFT_ACTIVATIONS = PatternDesc("linear_scale_shift_activations") LINEAR_CONST_MULTIPLY = PatternDesc("linear_const_multiply") LINEAR_SQUEEZE_ACTIVATIONS = PatternDesc("linear_squeeze_activations") + LINEAR_ACTIVATIONS_UNSQUEEZE_BN_SQUEEZE = PatternDesc("linear_activations_unsqueeze_bn_squeeze") SCALE_SHIFT_ACTIVATIONS = PatternDesc("scale_shift_activations") MVN_SCALE_SHIFT_ACTIVATIONS = PatternDesc("mvn_scale_shift_activations") @@ -379,22 +381,16 @@ class HWFusedPatternNames(Enum): "linear_biased_activation_elementwise", devices=[TargetDevice.ANY, TargetDevice.CPU, TargetDevice.GPU] ) - # TRANSFORMERS - MATMUL_SOFTMAX_MATMUL = PatternDesc("matmul_softmax_matmul", model_types=[ModelType.TRANSFORMER]) - SOFTMAX_RESHAPE_MATMUL = PatternDesc("softmax_reshape_matmul", model_types=[ModelType.TRANSFORMER]) - SOFTMAX_RESHAPE_TRANSPOSE_GATHER_MATMUL = PatternDesc( - "softmax_reshape_transpose_gather_matmul", model_types=[ModelType.TRANSFORMER] - ) - SOFTMAX_RESHAPE_TRANSPOSE_MATMUL = PatternDesc( - "softmax_reshape_transpose_matmul", model_types=[ModelType.TRANSFORMER] - ) - STABLE_DIFFUSION = PatternDesc("stable_diffusion", model_types=[ModelType.TRANSFORMER]) - class IgnoredPatternNames(Enum): """ Describes the patterns, which nodes should be ignored during FakeQuantize placement. """ - SOFTMAX_MATMUL = PatternDesc("softmax_matmul", model_types=[ModelType.TRANSFORMER]) - SOFTMAX_RESHAPE_MATMUL = PatternDesc("softmax_reshape_matmul", model_types=[ModelType.TRANSFORMER]) + MULTIHEAD_ATTENTION_OUTPUT = PatternDesc( + "multihead_attention_output", + model_types=[ModelType.TRANSFORMER], + devices=[TargetDevice.ANY, TargetDevice.CPU, TargetDevice.GPU, TargetDevice.VPU], + ) + FC_BN_HSWISH_ACTIVATION = PatternDesc("fc_bn_hswish_activation") + EQUAL_LOGICALNOT = PatternDesc("equal_logicalnot") diff --git a/nncf/common/hardware/config.py b/nncf/common/hardware/config.py index b235ec7ccd2..691c3db469e 100644 --- a/nncf/common/hardware/config.py +++ b/nncf/common/hardware/config.py @@ -39,6 +39,7 @@ class HWConfigType(Enum): "CPU": HWConfigType.CPU.value, "VPU": HWConfigType.VPU.value, "GPU": HWConfigType.GPU.value, + "CPU_SPR": HWConfigType.CPU.value, } @@ -55,8 +56,6 @@ def get_hw_config_type(target_device: str) -> Optional[HWConfigType]: """ if target_device == "TRIAL": return None - if target_device == "CPU_SPR": - raise ValueError(f"{target_device} target device is not supported yet") return HWConfigType(HW_CONFIG_TYPE_TARGET_DEVICE_MAP[target_device]) diff --git a/nncf/common/hardware/configs/cpu.json b/nncf/common/hardware/configs/cpu.json index e19df2657f5..db001c8ad07 100644 --- a/nncf/common/hardware/configs/cpu.json +++ b/nncf/common/hardware/configs/cpu.json @@ -252,6 +252,12 @@ "weights": "q8_w_sym" } }, + { + "type": "ReduceSum", + "quantization": { + "activations": "q8_a" + } + }, {"type": "Flatten"}, {"type": "Squeeze"}, {"type": "Unsqueeze"}, @@ -266,7 +272,12 @@ {"type": "Pad"}, {"type": "ConvertLike"}, // NNCF-specific extensions are below: - {"type": "Embedding"}, // relying on the rule to use default config for weighted ops with unspecified quantization + { + "type": "Embedding", + "quantization": { + "weights": ["q8_w_sym", "q8_w_asym"] + } + }, {"type": "EmbeddingBag"} ] } diff --git a/nncf/common/hardware/configs/gpu.json b/nncf/common/hardware/configs/gpu.json index 1b2308f4f5b..9e6d0e3378b 100644 --- a/nncf/common/hardware/configs/gpu.json +++ b/nncf/common/hardware/configs/gpu.json @@ -234,6 +234,12 @@ "scales": "unified" } }, + { + "type": "ReduceSum", + "quantization": { + "activations": "q8_a" + } + }, {"type": "Flatten"}, {"type": "Squeeze"}, {"type": "Unsqueeze"}, diff --git a/nncf/common/insertion_point_graph.py b/nncf/common/insertion_point_graph.py index 56c58c7b1d5..4ae0027fff7 100644 --- a/nncf/common/insertion_point_graph.py +++ b/nncf/common/insertion_point_graph.py @@ -19,6 +19,7 @@ from nncf.common.graph import Dtype from nncf.common.graph import NNCFGraph from nncf.common.graph import NNCFNodeName +from nncf.common.graph.graph import NNCFNode from nncf.common.graph.graph_matching import find_subgraphs_matching_pattern from nncf.common.graph.operator_metatypes import INPUT_NOOP_METATYPES from nncf.common.graph.patterns import GraphPattern @@ -128,8 +129,14 @@ def __init__( for edge in self._base_nx_graph.edges: input_port_id = self._base_nx_graph.edges[edge][NNCFGraph.INPUT_PORT_ID_EDGE_ATTR] dtype = self._base_nx_graph.edges[edge][NNCFGraph.DTYPE_EDGE_ATTR] + parallel_input_port_ids = self._base_nx_graph.edges[edge][NNCFGraph.PARALLEL_INPUT_PORT_IDS_ATTR] from_node, to_node = edge - attrs = {INPUT_PORT_ID: input_port_id, self.IS_INTEGER_PATH_EDGE_ATTR: dtype is Dtype.INTEGER} + + attrs = { + INPUT_PORT_ID: input_port_id, + self.IS_INTEGER_PATH_EDGE_ATTR: dtype is Dtype.INTEGER, + NNCFGraph.PARALLEL_INPUT_PORT_IDS_ATTR: parallel_input_port_ids, + } self.add_edge(from_node, to_node, **attrs) node_keys_working_set = [deepcopy(node_key) for node_key in nx.lexicographical_topological_sort(self)] @@ -148,7 +155,14 @@ def __init__( pre_hook_ips = list(target_node_name_vs_pre_hook_ips[original_node.node_name]) pre_hook_ips = sorted(pre_hook_ips, key=lambda x: x.input_port_id) in_edges = list(self.in_edges(operator_node_key)) - input_port_id_vs_edge = {self.edges[edge][INPUT_PORT_ID]: edge for edge in in_edges} + input_port_id_vs_edge = {} + for edge in in_edges: + input_port_id = self.edges[edge][INPUT_PORT_ID] + input_port_id_vs_edge[input_port_id] = edge + for parallel_input_port_id in self.edges[edge][NNCFGraph.PARALLEL_INPUT_PORT_IDS_ATTR]: + input_port_id_vs_edge[parallel_input_port_id] = edge + + encountered_input_edges = set() for pre_hook_point in pre_hook_ips: edge = input_port_id_vs_edge[pre_hook_point.input_port_id] original_edge_attrs = self.edges[edge] @@ -162,11 +176,14 @@ def __init__( self.add_node(ip_node_key, **pre_hook_ip_attrs) - self.remove_edge(from_node_key, to_node_key) + encountered_input_edges.add(edge) self.add_edge(from_node_key, ip_node_key, **original_edge_attrs) self.add_edge(ip_node_key, operator_node_key, **original_edge_attrs) operator_node[InsertionPointGraph.ASSOCIATED_IP_NODE_KEYS_NODE_ATTR].add(ip_node_key) + for edge in encountered_input_edges: + self.remove_edge(*edge) + if original_node.node_name in target_node_name_vs_post_hook_ips: post_hook_ips = target_node_name_vs_post_hook_ips[original_node.node_name] assert len(post_hook_ips) == 1, "Multiple post-hooks for a single NNCFGraph node are not supported!" @@ -278,8 +295,7 @@ def get_input_nodes(self) -> List[str]: continue if data[InsertionPointGraph.IS_MERGED_NODE_ATTR]: for nncf_node in data[InsertionPointGraph.MERGED_NNCF_NODE_LIST_NODE_ATTR]: - node_k = nncf_node.data[NNCFGraph.KEY_NODE_ATTR] - if self._base_nx_graph.nodes[node_k][NNCFGraph.METATYPE_ATTR] in INPUT_NOOP_METATYPES: + if self._base_nx_graph.nodes[nncf_node.node_key][NNCFNode.METATYPE_ATTR] in INPUT_NOOP_METATYPES: output.append(node) break elif data[InsertionPointGraph.REGULAR_NODE_REF_NODE_ATTR].metatype in INPUT_NOOP_METATYPES: @@ -300,8 +316,7 @@ def get_merged_node_from_single_node_key(self, node_key: str) -> str: continue if data[InsertionPointGraph.IS_MERGED_NODE_ATTR]: for nncf_node in data[InsertionPointGraph.MERGED_NNCF_NODE_LIST_NODE_ATTR]: - node_k = nncf_node.data[NNCFGraph.KEY_NODE_ATTR] - if node_key == node_k: + if node_key == nncf_node.node_key: return node return node_key diff --git a/nncf/common/plotting.py b/nncf/common/plotting.py new file mode 100644 index 00000000000..bcb189561e3 --- /dev/null +++ b/nncf/common/plotting.py @@ -0,0 +1,22 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from contextlib import contextmanager + +from matplotlib import pyplot as plt + + +@contextmanager +def noninteractive_plotting(): + backend = plt.get_backend() + plt.switch_backend("agg") + plt.ioff() + yield + plt.switch_backend(backend) diff --git a/nncf/common/pruning/mask_propagation.py b/nncf/common/pruning/mask_propagation.py index 0dcea245395..9fa3b7a3d32 100644 --- a/nncf/common/pruning/mask_propagation.py +++ b/nncf/common/pruning/mask_propagation.py @@ -29,7 +29,7 @@ class MaskPropagationAlgorithm: """ Algorithm responsible for propagation masks across all nodes in the graph. - Before call mask_propagation() you need set node.data['output_masks'] + Before call mask_propagation() you need set node.attributes['output_masks'] for nodes that have masks already defined. """ @@ -100,7 +100,7 @@ def symbolic_mask_propagation( for node in self._graph.topological_sort(): if node.node_id in can_be_closing_convs and can_prune_after_analysis[node.node_id]: # Set output mask - node.data["output_mask"] = SymbolicMask(get_output_channels(node), node.node_id) + node.attributes["output_mask"] = SymbolicMask(get_output_channels(node), node.node_id) # Propagate masks cls = self.get_meta_operation_by_type_name(node.node_type) cls.mask_propagation(node, self._graph, SymbolicMaskProcessor) @@ -140,7 +140,7 @@ def symbolic_mask_propagation( # Clean nodes masks for node in self._graph.get_all_nodes(): - node.data["output_mask"] = None + node.attributes["output_mask"] = None return can_prune_by_dim diff --git a/nncf/common/pruning/operations.py b/nncf/common/pruning/operations.py index f5e31cee8f6..4caadd69a81 100644 --- a/nncf/common/pruning/operations.py +++ b/nncf/common/pruning/operations.py @@ -74,7 +74,7 @@ def accept_pruned_input(cls, node: NNCFNode) -> bool: def mask_propagation( cls, node: NNCFNode, graph: NNCFGraph, tensor_processor: Type[NNCFPruningBaseTensorProcessor] ) -> None: - node.data["output_mask"] = None + node.attributes["output_mask"] = None class OutputPruningOp(BasePruningOp): @@ -86,7 +86,7 @@ def accept_pruned_input(cls, node: NNCFNode) -> bool: def mask_propagation( cls, node: NNCFNode, graph: NNCFGraph, tensor_processor: Type[NNCFPruningBaseTensorProcessor] ) -> None: - node.data["output_mask"] = None + node.attributes["output_mask"] = None class IdentityMaskForwardPruningOp(BasePruningOp): @@ -113,14 +113,14 @@ def mask_propagation( cls, node: NNCFNode, graph: NNCFGraph, tensor_processor: Type[NNCFPruningBaseTensorProcessor] ) -> None: input_masks = get_input_masks(node, graph) - output_mask = node.data.get("output_mask", None) + output_mask = node.attributes.get("output_mask", None) if is_grouped_conv(node): output_mask = None if is_prunable_depthwise_conv(node): output_mask = input_masks[0] - node.data["output_mask"] = output_mask + node.attributes["output_mask"] = output_mask class TransposeConvolutionPruningOp(BasePruningOp): @@ -135,7 +135,7 @@ def mask_propagation( cls, node: NNCFNode, graph: NNCFGraph, tensor_processor: Type[NNCFPruningBaseTensorProcessor] ) -> None: input_masks = get_input_masks(node, graph) - output_mask = node.data.get("output_mask", None) + output_mask = node.attributes.get("output_mask", None) # In case of group convs we can't prune by output filters if is_grouped_conv(node): @@ -143,7 +143,7 @@ def mask_propagation( if is_prunable_depthwise_conv(node): output_mask = input_masks[0] - node.data["output_mask"] = output_mask + node.attributes["output_mask"] = output_mask class LinearPruningOp(BasePruningOp): @@ -155,8 +155,8 @@ def accept_pruned_input(cls, node: NNCFNode) -> bool: def mask_propagation( cls, node: NNCFNode, graph: NNCFGraph, tensor_processor: Type[NNCFPruningBaseTensorProcessor] ) -> None: - output_mask = node.data.get("output_mask", None) - node.data["output_mask"] = output_mask + output_mask = node.attributes.get("output_mask", None) + node.attributes["output_mask"] = output_mask class BatchNormPruningOp(BasePruningOp): @@ -187,7 +187,7 @@ def mask_propagation( if cls.accept_pruned_input(node): identity_mask_propagation(node, graph) else: - node.data["output_mask"] = None + node.attributes["output_mask"] = None class LayerNormPruningOp(BasePruningOp): @@ -222,7 +222,7 @@ def generate_output_mask( """ input_edges = graph.get_input_edges(node) previous_nodes = [edge.from_node for edge in input_edges] - input_masks = [input_node.data["output_mask"] for input_node in previous_nodes] + input_masks = [input_node.attributes["output_mask"] for input_node in previous_nodes] input_masks = [ input_mask[node.node_name] if isinstance(input_mask, dict) else input_mask for input_mask in input_masks ] @@ -249,7 +249,7 @@ def mask_propagation( cls, node: NNCFNode, graph: NNCFGraph, tensor_processor: Type[NNCFPruningBaseTensorProcessor] ) -> None: result_mask = cls.generate_output_mask(node, graph, tensor_processor) - node.data["output_mask"] = result_mask + node.attributes["output_mask"] = result_mask class SplitPruningOp(BasePruningOp): @@ -328,7 +328,7 @@ def mask_propagation( cls, node: NNCFNode, graph: NNCFGraph, tensor_processor: Type[NNCFPruningBaseTensorProcessor] ) -> None: result_masks = cls.generate_output_masks(node, graph, tensor_processor) - node.data["output_mask"] = result_masks + node.attributes["output_mask"] = result_masks class PadPruningOp(IdentityMaskForwardPruningOp): @@ -354,7 +354,7 @@ def mask_propagation( if output_mask is not None: output_mask = tensor_processor.elementwise_mask_propagation(input_masks) - node.data["output_mask"] = output_mask + node.attributes["output_mask"] = output_mask class ReshapePruningOp(BasePruningOp): @@ -386,9 +386,9 @@ def mask_propagation( elif cls._is_not_mixing_dim(node): identity_mask_propagation(node, graph) else: - node.data["output_mask"] = None + node.attributes["output_mask"] = None else: - node.data["output_mask"] = None + node.attributes["output_mask"] = None class FlattenPruningOp(BasePruningOp): @@ -415,7 +415,7 @@ def mask_propagation(cls, node: NNCFNode, graph: NNCFGraph, tensor_processor: Ty assert flatten_channels % mask_len == 0 output_mask = tensor_processor.repeat(input_mask, repeats=flatten_channels // mask_len) - node.data["output_mask"] = output_mask + node.attributes["output_mask"] = output_mask class StopMaskForwardPruningOp(BasePruningOp): @@ -427,4 +427,4 @@ def accept_pruned_input(cls, node: NNCFNode) -> bool: def mask_propagation( cls, node: NNCFNode, graph: NNCFGraph, tensor_processor: Type[NNCFPruningBaseTensorProcessor] ) -> None: - node.data["output_mask"] = None + node.attributes["output_mask"] = None diff --git a/nncf/common/pruning/shape_pruning_processor.py b/nncf/common/pruning/shape_pruning_processor.py index bc473ab1a30..9bb099cb17f 100644 --- a/nncf/common/pruning/shape_pruning_processor.py +++ b/nncf/common/pruning/shape_pruning_processor.py @@ -184,7 +184,7 @@ def get_next_nodes( # 1. Propagate symbolic masks throught the net for pruned_layer_info in pruning_groups.get_all_nodes(): node = graph.get_node_by_id(pruned_layer_info.nncf_node_id) - node.data["output_mask"] = SymbolicMask(get_output_channels(node), node.node_id) + node.attributes["output_mask"] = SymbolicMask(get_output_channels(node), node.node_id) MaskPropagationAlgorithm(graph, self._pruning_operations_metatype, SymbolicMaskProcessor).mask_propagation() @@ -209,6 +209,6 @@ def get_next_nodes( # 3. Clean graph output shapes for node in graph.get_all_nodes(): - node.data["output_shape"] = None + node.attributes["output_shape"] = None return next_nodes diff --git a/nncf/common/pruning/statistics.py b/nncf/common/pruning/statistics.py index 7ffab8fe99d..006fd4b5b0b 100644 --- a/nncf/common/pruning/statistics.py +++ b/nncf/common/pruning/statistics.py @@ -146,7 +146,7 @@ def to_str(self) -> str: ) pretty_string = ( - f"{self.model_statistics.to_str()}\n" f"Statistics of the filter pruning algorithm:\n{algorithm_string}" + f"{self.model_statistics.to_str()}\nStatistics of the filter pruning algorithm:\n{algorithm_string}" ) return pretty_string @@ -190,15 +190,15 @@ def to_str(self) -> str: rows=[ [ "Pruned layers count / prunable layers count", - f"{self.pruned_layers_num} /" f" {self.prunable_layers_num}", + f"{self.pruned_layers_num} / {self.prunable_layers_num}", ], [ "GFLOPs minimum possible after pruning / total", - f"{self.min_possible_flops / self._giga:.3f} /" f" {self.total_flops / self._giga:.3f}", + f"{self.min_possible_flops / self._giga:.3f} / {self.total_flops / self._giga:.3f}", ], [ "MParams minimum possible after pruning / total", - f"{self.min_possible_params / self._mega:.3f} /" f" {self.total_params / self._mega:.3f}", + f"{self.min_possible_params / self._mega:.3f} / {self.total_params / self._mega:.3f}", ], ], ) diff --git a/nncf/common/pruning/utils.py b/nncf/common/pruning/utils.py index 9205b37b358..fcded91c95d 100644 --- a/nncf/common/pruning/utils.py +++ b/nncf/common/pruning/utils.py @@ -40,7 +40,7 @@ def is_batched_linear(node: NNCFNode, graph: NNCFGraph) -> bool: Returns `True` if a feeded linear node output tensor has no more than two dimensions. A linear layer has more than two output dimensions means, that this linear layer multiplies several input matrices feeded by batch dimensions - from the left/right or both inputs. Batch input dimentions are elements of [:-2] slice. + from the left/right or both inputs. Batch input dimensions are elements of [:-2] slice. :param node: NNCFNode to check. :param graph: NNCFGraph which feeded node is belonged to. @@ -133,7 +133,7 @@ def get_rounded_pruned_element_number(total: int, sparsity_rate: float, multiple Always rounds number of remaining elements up. :param total: Total elements number. - :param sparsity_rate: Prorortion of zero elements in total. + :param sparsity_rate: Proportion of zero elements in total. :param multiple_of: Number of remaining elements must be a multiple of `multiple_of`. :return: Number of elements to be zeroed. """ @@ -255,7 +255,7 @@ class PruningAnalysisReason(Enum): DIMENSION_MISMATCH = "of dimension mismatch" CLOSING_CONV_MISSING = "closing convolution missing" IN_GROUP_OF_UNPRUNABLE = "is in the group with non prunable layers" - BATCHED_LINEAR = "linear node has bathced dimension(s)" + BATCHED_LINEAR = "linear node has batched dimension(s)" INCOMPATIBLE_DIMS_IN_CLUSTER = "channels in cluster nodes have different values" @classmethod @@ -367,7 +367,7 @@ def get_input_masks(node: NNCFNode, graph: NNCFGraph) -> List[Optional[NNCFTenso :return: Input masks. """ retval = [] - input_masks = [input_edge.from_node.data["output_mask"] for input_edge in graph.get_input_edges(node)] + input_masks = [input_edge.from_node.attributes["output_mask"] for input_edge in graph.get_input_edges(node)] for input_mask in input_masks: retval.append(input_mask[node.node_name] if isinstance(input_mask, dict) else input_mask) return retval @@ -416,4 +416,4 @@ def identity_mask_propagation(node: NNCFNode, graph: NNCFGraph) -> None: input_masks = [None] assert len(input_masks) == 1 - node.data["output_mask"] = input_masks[0] + node.attributes["output_mask"] = input_masks[0] diff --git a/nncf/common/quantization/config_assignment.py b/nncf/common/quantization/config_assignment.py index a2c78b95236..abaddf95427 100644 --- a/nncf/common/quantization/config_assignment.py +++ b/nncf/common/quantization/config_assignment.py @@ -94,19 +94,15 @@ def assign_qconfig_lists_to_modules( qconfig_list = [default_qconfig] elif HWConfig.is_qconf_list_corresponding_to_unspecified_op(qconfig_list): continue # The module will not have its weights quantized - try: - local_constraints = global_weight_constraints - for overridden_scope, scoped_override_dict in scope_overrides_dict.items(): - if matches_any(node.node_name, overridden_scope): - scope_constraints = QuantizationConstraints.from_config_dict(scoped_override_dict) - local_constraints = local_constraints.get_updated_constraints(scope_constraints) - qconfig_list = local_constraints.constrain_qconfig_list(qconfig_list) - except RuntimeError as e: - err_msg = "Quantization parameter constraints specified in NNCF config are incompatible with HW " - err_msg += "capabilities as specified in HW config type '{}'. ".format(hw_config.target_device) - err_msg += "First conflicting quantizer location: {}".format(str(node.node_name)) - raise RuntimeError(err_msg) from e + local_constraints = global_weight_constraints + for overridden_scope, scoped_override_dict in scope_overrides_dict.items(): + if matches_any(node.node_name, overridden_scope): + scope_constraints = QuantizationConstraints.from_config_dict(scoped_override_dict) + local_constraints = local_constraints.get_updated_constraints(scope_constraints) + qconfig_list = local_constraints.constrain_qconfig_list( + node.node_name, hw_config.target_device, qconfig_list + ) retval[node] = qconfig_list return retval diff --git a/nncf/common/quantization/quantizer_propagation/graph.py b/nncf/common/quantization/quantizer_propagation/graph.py index 88e183b5477..3baba6c1e44 100644 --- a/nncf/common/quantization/quantizer_propagation/graph.py +++ b/nncf/common/quantization/quantizer_propagation/graph.py @@ -12,11 +12,14 @@ from collections import deque from copy import copy from copy import deepcopy +from dataclasses import dataclass from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type, Union import networkx as nx +from nncf import nncf_logger from nncf.common.graph import INPUT_NOOP_METATYPES +from nncf.common.graph import OUTPUT_NOOP_METATYPES from nncf.common.graph import NNCFNode from nncf.common.graph import NNCFNodeName from nncf.common.graph import OperatorMetatype @@ -26,7 +29,6 @@ from nncf.common.insertion_point_graph import InsertionPointGraphNodeType from nncf.common.insertion_point_graph import PostHookInsertionPoint from nncf.common.insertion_point_graph import PreHookInsertionPoint -from nncf.common.logging import nncf_logger from nncf.common.quantization.quantizer_propagation.grouping import UnifiedScalePropagatingQuantizerGroupManager from nncf.common.quantization.quantizer_propagation.structs import IgnoreReason from nncf.common.quantization.quantizer_propagation.structs import PropagatingQuantizer @@ -72,18 +74,22 @@ class QuantizerPropagationStateGraph(nx.DiGraph): BARRIER_NODE_KEY_POSTFIX = "BARRIER" def __init__( - self, ip_graph: InsertionPointGraph, ignored_scopes: List[str] = None, target_scopes: List[str] = None + self, + ip_graph: InsertionPointGraph, + ignored_scopes: Dict[str, IgnoreReason] = None, + target_scopes: List[str] = None, ): super().__init__() ip_graph = deepcopy(ip_graph) self._created_prop_quantizer_counter = 0 - self._ignored_scopes = deepcopy(ignored_scopes) + self._ignored_scopes = list(ignored_scopes.keys()) if ignored_scopes is not None else None self._target_scopes = deepcopy(target_scopes) self.ignored_node_keys = {} # type: Dict[str, IgnoreReason] self._unified_scale_group_manager = UnifiedScalePropagatingQuantizerGroupManager() self._input_node_keys_vs_nncf_nodes = {} # type: Dict[str, NNCFNode] + self._output_node_keys_vs_nncf_nodes = {} # type: Dict[str, NNCFNode] self._pqs_after_weight_dependent_output_quantized_nodes = {} # type: Dict[PropagatingQuantizer, str] self.op_node_keys_to_underlying_nodes_mapping = {} # type: Dict[str, List[NNCFNode]] @@ -131,7 +137,9 @@ def __init__( if ignored: qpg_node[self.IS_IN_IGNORED_SCOPES] = True - self.ignored_node_keys[node_key] = IgnoreReason.USER_REQUESTED + self.ignored_node_keys[node_key] = ignored_scopes.get( + primary_node.node_name, IgnoreReason.USER_REQUESTED + ) # TODO (vshampor): do we need here NoopMetatype qpg_node[self.OPERATOR_METATYPE_NODE_ATTR] = NoopMetatype else: @@ -139,6 +147,8 @@ def __init__( if nncf_node_ref.metatype in INPUT_NOOP_METATYPES: self._input_node_keys_vs_nncf_nodes[node_key] = nncf_node_ref + if nncf_node_ref.metatype in OUTPUT_NOOP_METATYPES: + self._output_node_keys_vs_nncf_nodes[node_key] = nncf_node_ref if nncf_node_ref.is_in_iteration_scope(): iteration_scope_node_keys.append(node_key) @@ -153,6 +163,15 @@ def __init__( for barred_node_key in list(self.ignored_node_keys.keys()) + iteration_scope_node_keys: self._add_barrier_after_node(barred_node_key) + self._branch_nodes_directly_dominating_outputs = None + + def get_input_node_keys(self) -> List[str]: + """ + Returns graph input node keys. + + :return: List of the input node keys. + """ + return self._input_node_keys_vs_nncf_nodes.keys() def get_node_keys_by_metatype(self, metatype: Type[OperatorMetatype]) -> List[str]: """ @@ -167,8 +186,9 @@ def get_node_keys_by_metatype(self, metatype: Type[OperatorMetatype]) -> List[st output.append(node) return output + @staticmethod def _insertion_point_to_quant_insertion_point( - self, ip: Union[PreHookInsertionPoint, PostHookInsertionPoint] + ip: Union[PreHookInsertionPoint, PostHookInsertionPoint] ) -> QuantizationInsertionPointBase: if isinstance(ip, PreHookInsertionPoint): return ActivationQuantizationInsertionPoint(ip.target_node_name, input_port_id=ip.input_port_id) @@ -209,7 +229,7 @@ def get_barrier_node_key(node_key: str) -> str: def mark_act_quantizer_as_dependent_on_weights(self, pq: PropagatingQuantizer, operator_node_key: str): """ Marks a given propagating quantizer corresponding to input activation quantization - of some downstream op as depenedent on weights of an operation that gives its weights directly + of some downstream op as dependent on weights of an operation that gives its weights directly as outputs (such as Embedding). The quantizer marked in this manner will be later considered for removal if the weights of the weight-as-outputs operation are quantized in a compatible way (i.e. with the same quantizer configuration) as is required by the propagating activation @@ -233,8 +253,8 @@ def mark_act_quantizer_as_dependent_on_weights(self, pq: PropagatingQuantizer, o and self._pqs_after_weight_dependent_output_quantized_nodes[pq] != operator_node_key ): raise RuntimeError( - "Propagating quantizer {} is already marked as depending on node {} weight " - "quantization!".format(pq.id, operator_node_key) + f"Propagating quantizer {pq.id} is already marked as depending on node " + f"{operator_node_key} weight quantization!" ) self._pqs_after_weight_dependent_output_quantized_nodes[pq] = operator_node_key @@ -754,17 +774,40 @@ def get_paths_to_immediately_dominating_insertion_points( self, insertion_point_node_key: str ) -> List[PropagationPath]: group_dict = self.get_paths_to_immediately_dominating_insertion_points_grouped_by_unified_scales( - insertion_point_node_key, set() + insertion_point_node_key, set(), {} ) return group_dict[None] def get_paths_to_immediately_dominating_insertion_points_grouped_by_unified_scales( - self, insertion_point_node_key: str, unified_scale_op_metatypes: Set[Type[OperatorMetatype]] + self, + insertion_point_node_key: str, + unified_scale_op_metatypes: Set[Type[OperatorMetatype]], + scales_unification_map: Dict[OperatorMetatype, OperatorMetatype], ) -> Dict[Optional[int], List[PropagationPath]]: """Paths are lists of edges.""" next_group_idx = 0 paths = {} + def followed_by_weighted_types(curr_node_key, curr_node_metatype) -> bool: + nodes_queue = deque(self.successors(curr_node_key)) + while nodes_queue: + next_node_key = nodes_queue.popleft() + next_node = self.nodes[next_node_key] + next_node_type = next_node[QuantizerPropagationStateGraph.NODE_TYPE_NODE_ATTR] + if next_node_type != QuantizerPropagationStateGraphNodeType.OPERATOR: + nodes_queue.extend(self.successors(next_node_key)) + else: + next_node_metatype = next_node[QuantizerPropagationStateGraph.OPERATOR_METATYPE_NODE_ATTR] + next_node_trait = next_node[QuantizerPropagationStateGraph.QUANTIZATION_TRAIT_NODE_ATTR] + if ( + next_node_trait == QuantizationTrait.QUANTIZATION_AGNOSTIC + or next_node_metatype in unified_scale_op_metatypes + ): + nodes_queue.extend(self.successors(next_node_key)) + if next_node_metatype in scales_unification_map[curr_node_metatype]: + return True + return False + def recursive_helper(curr_edge, curr_path, all_paths, curr_group): nonlocal next_group_idx curr_path.append(curr_edge) @@ -780,11 +823,14 @@ def recursive_helper(curr_edge, curr_path, all_paths, curr_group): if curr_node_type == QuantizerPropagationStateGraphNodeType.OPERATOR: metatype = curr_node[QuantizerPropagationStateGraph.OPERATOR_METATYPE_NODE_ATTR] - if ( - metatype in unified_scale_op_metatypes - and curr_group is None - and len(self.in_edges(curr_node_key)) > 1 - ): + unify_conditions = [ + metatype in unified_scale_op_metatypes, + curr_group is None, + len(self.in_edges(curr_node_key)) > 1, + ] + if scales_unification_map is not None and metatype in scales_unification_map: + unify_conditions.append(followed_by_weighted_types(curr_node_key, metatype)) + if all(unify_conditions): curr_group = next_group_idx next_group_idx += 1 @@ -821,6 +867,55 @@ def traverse_fn( self.traverse_graph(node_key, traverse_fn, retval) return retval + def _build_branch_direct_output_dominators_info(self) -> Set[str]: + """ + Traverses the graph backwards starting from outputs. If there is a path from an output to a branching node + that only passes through quantization-agnostic ops, then this branching node is directly dominating an output. + :return: The set of node names that directly dominate at least one output. + """ + + @dataclass + class LocalState: + global_result_ref: Set[str] + encountered_quantizer_aware_ops: bool = False + + def traverse_fn(curr_node_key: str, local_state: LocalState) -> Tuple[bool, LocalState]: + curr_node = self.nodes[curr_node_key] + if len(list(self.successors(curr_node_key))) > 1: + if not local_state.encountered_quantizer_aware_ops: + local_state.global_result_ref.add(curr_node_key) + return True, local_state + + curr_node_type = curr_node[QuantizerPropagationStateGraph.NODE_TYPE_NODE_ATTR] + if curr_node_type == QuantizerPropagationStateGraphNodeType.OPERATOR: + node_trait = curr_node[QuantizerPropagationStateGraph.QUANTIZATION_TRAIT_NODE_ATTR] + op_meta = curr_node[QuantizerPropagationStateGraph.OPERATOR_METATYPE_NODE_ATTR] + if op_meta not in OUTPUT_NOOP_METATYPES and node_trait in [ + QuantizationTrait.INPUTS_QUANTIZABLE, + QuantizationTrait.OUTPUT_QUANTIZATION_AS_WEIGHTS, + QuantizationTrait.NON_QUANTIZABLE, + ]: + local_state.encountered_quantizer_aware_ops = True + return False, local_state + + visited_node_keys = set() + result = set() + for output_node_key in self._output_node_keys_vs_nncf_nodes: + output_state = LocalState(result) + self._traverse_graph_recursive_helper( + output_node_key, visited_node_keys, traverse_fn, output_state, traverse_backward=True, visit_once=False + ) + return result + + def is_branching_node_dominating_outputs(self, from_node_key: str) -> bool: + """ + Checks that all branches outgoing from the branching node can be quantized + (They do not contain an output that should not be quantized). + """ + if self._branch_nodes_directly_dominating_outputs is None: + self._branch_nodes_directly_dominating_outputs = self._build_branch_direct_output_dominators_info() + return from_node_key in self._branch_nodes_directly_dominating_outputs + def get_visualized_graph(self): out_graph = nx.DiGraph() unified_scale_group_vs_pq_node_id_dict = {} # type: Dict[int, List[str]] @@ -941,18 +1036,21 @@ def _traverse_graph_recursive_helper( visited_node_keys: Set[str], traverse_function: Callable[[str, Any], Tuple[bool, Any]], output: Any, - traverse_forward: bool, + traverse_backward: bool = False, + visit_once: bool = True, ): """This is DFS, and may fail with 'maximum recursion depth exceeded' for complex graphs.""" is_finished, output = traverse_function(curr_node_key, output) - visited_node_keys.add(curr_node_key) - next_node_keys_indexer = self.succ if traverse_forward else self.pred + if visit_once: + visited_node_keys.add(curr_node_key) + next_node_keys_indexer = self.pred if traverse_backward else self.succ if not is_finished: for node_key in next_node_keys_indexer[curr_node_key]: - if node_key not in visited_node_keys: - self._traverse_graph_recursive_helper( - node_key, visited_node_keys, traverse_function, output, traverse_forward - ) + if visit_once and node_key in visited_node_keys: + continue + self._traverse_graph_recursive_helper( + node_key, visited_node_keys, traverse_function, output, traverse_backward, visit_once + ) return output def _get_next_prop_quantizer_id(self): @@ -1293,9 +1391,9 @@ def _handle_output_quantizers_for_weights_as_outputs_ops( all_qp_ids_in_unified_scale_group = {qp_id_for_current_pq} for act_qp_id in all_qp_ids_in_unified_scale_group: curr_act_qconfigs = setup.quantization_points[act_qp_id].possible_qconfigs - curr_intersection_of_qconfigs = [ - qconf for qconf in curr_intersection_of_qconfigs if qconf in curr_act_qconfigs - ] + curr_intersection_of_qconfigs = self._get_weight_and_activation_qconfig_list_intersection( + curr_intersection_of_qconfigs, curr_act_qconfigs + ) # Do further filtering for per-tensor quantizations only. # TODO: relax the requirement to allow the scale shape of the weight-as-output quantizer @@ -1321,7 +1419,7 @@ def _handle_output_quantizers_for_weights_as_outputs_ops( [str(setup.quantization_points[qp_id]) for qp_id in all_qp_ids_in_unified_scale_group] ) nncf_logger.debug( - f"Unifying weight quantizer ranges of {wao_op_node_key} " f"with {unified_scale_qp_printable_str}" + f"Unifying weight quantizer ranges of {wao_op_node_key} with {unified_scale_qp_printable_str}" ) # The activation quantizer is now unnecessary since we could find a matching weight quantization @@ -1332,6 +1430,27 @@ def _handle_output_quantizers_for_weights_as_outputs_ops( setup.discard(qp_id_for_current_pq, keep_shared_input_qps=True) return setup + @staticmethod + def _get_weight_and_activation_qconfig_list_intersection( + weight_qconfig_options: List[QuantizerConfig], activation_qconfig_options: List[QuantizerConfig] + ) -> List[QuantizerConfig]: + """ + Returns special intersection between weight and activation quantization configurations. + + :param weight_qconfig_options: List of QuantizerConfig associated with weights. + :param activation_qconfig_options: List of QuantizerConfig associated with activations. + :return: Special intersection between configurations. + """ + act_qconfig_extend_list = [] + for act_qconfig in activation_qconfig_options: + if act_qconfig.signedness_to_force is None: + for signedness_to_force_position in [True, False]: + act_qconfig_updated = deepcopy(act_qconfig) + act_qconfig_updated.signedness_to_force = signedness_to_force_position + act_qconfig_extend_list.append(act_qconfig_updated) + act_qconfig_extend_list += activation_qconfig_options + return [qconf for qconf in weight_qconfig_options if qconf in act_qconfig_extend_list] + def run_consistency_check(self) -> bool: all_pqs = self.collect_all_propagating_quantizers() diff --git a/nncf/common/quantization/quantizer_propagation/solver.py b/nncf/common/quantization/quantizer_propagation/solver.py index b98b7e0b3d9..2746af06a55 100644 --- a/nncf/common/quantization/quantizer_propagation/solver.py +++ b/nncf/common/quantization/quantizer_propagation/solver.py @@ -15,7 +15,6 @@ from collections import deque from copy import deepcopy from enum import Enum -from functools import partial from typing import Deque, Dict, List, Optional, Set, Tuple import networkx as nx @@ -24,8 +23,6 @@ from nncf.common.graph import OUTPUT_NOOP_METATYPES from nncf.common.graph import NNCFNodeName from nncf.common.graph import OperatorMetatype -from nncf.common.graph.graph import NNCFGraph -from nncf.common.graph.operator_metatypes import UnknownMetatype from nncf.common.graph.transformations.commands import TargetPoint from nncf.common.hardware.config import HWConfig from nncf.common.insertion_point_graph import InsertionPointGraph @@ -84,7 +81,7 @@ class FinalizedQuantizationProposal: """ Describes a version of QuantizationProposal in which a single quantizer configuration has been chosen (using one or the other way of disambiguation) for each quantization point in the setup that was made available - in the original QuantizationProposel + in the original QuantizationProposal """ def __init__( @@ -120,7 +117,7 @@ def __init__( ): """ :param quantizer_setup: The MultiConfigQuantizerSetup object obtained from a quantizer propagation solver. - :param quant_prop_graph: The QuantizerPropagationStateGraph whose state correspoinds to the `quantizer_setup`, + :param quant_prop_graph: The QuantizerPropagationStateGraph whose state corresponds to the `quantizer_setup`, also obtained from the solver :param quantization_point_id_vs_prop_quantizer: A mapping of the quantization point IDs in `quantizer_setup` to propagating quantizers registered in `quant_prop_graph`. @@ -220,22 +217,18 @@ def __init__( ): self._quant_prop_graph = quant_prop_graph self._post_processing_marker_metatypes = post_processing_marker_metatypes - self._quantizable_layer_node_keys = [ - q_nodes.node.data[NNCFGraph.KEY_NODE_ATTR] for q_nodes in quantizable_layer_nodes - ] + self._quantizable_layer_node_keys = [q_nodes.node.node_key for q_nodes in quantizable_layer_nodes] self._post_processing_marker_encountered = False def _is_node_has_underlying_weights(self, node_key: str) -> bool: + if not self._is_node_operator(node_key): + return False underlying_nncf_nodes = self._quant_prop_graph.op_node_keys_to_underlying_nodes_mapping[node_key] for node in underlying_nncf_nodes: - if node.data[NNCFGraph.KEY_NODE_ATTR] in self._quantizable_layer_node_keys: + if node.node_key in self._quantizable_layer_node_keys: return True return False - def _check_if_postprocessing(self, node_metatype: OperatorMetatype) -> None: - if node_metatype in self._post_processing_marker_metatypes: - self._post_processing_marker_encountered = True - def _get_node_metatype(self, node_key: str) -> OperatorMetatype: node = self._quant_prop_graph.nodes[node_key] return node.get(self._quant_prop_graph.OPERATOR_METATYPE_NODE_ATTR) @@ -244,101 +237,68 @@ def _is_node_operator(self, node_key: str) -> bool: node = self._quant_prop_graph.nodes[node_key] return node.get(self._quant_prop_graph.NODE_TYPE_NODE_ATTR) == QuantizerPropagationStateGraphNodeType.OPERATOR - def _get_ignored_node_keys(self, node_keys: List[str]) -> List[str]: - output = [] - for node_key, node_metatype in zip(node_keys, map(self._get_node_metatype, node_keys)): - if node_metatype not in self._post_processing_marker_metatypes: - output.append(node_key) - return output - def get_post_processing_node_keys(self) -> Set[str]: """ Finds out the nodes of the QuantizerPropagationStateGraph, which are in post-processing part of the model. - Starting from the output nodes all the nodes are added, until the quantizable nodes with weights are faced. + Starting from the output nodes all the nodes are added to path, + until the quantizable nodes with weights are faced. If the path with the nodes has the post-processing marker node, - all the nodes in this path will be added into ignored. + all the nodes in this path (except outputs and nodes with weights) will be added into ignored. + :return: Set of the node keys to be ignored. """ - - visited_nodes = set() - - def backward_traverse_function( - node_key: str, output: List[str], visited_nodes: Set[str] - ) -> Tuple[bool, List[str]]: - """ - Realizes the search of the quantization ignored nodes in graph. - Only QuantizerPropagationStateGraphNodeType.OPERATOR nodes are processed during the traversing. - If the current node is in the list of the quantizable nodes with weights, - the traversing is being stopped. - The new forward traversing from the current node starts. - If the quantizable nodes with weights is faced in forward traversing faced, - the original backward traversing is being stopped. - - :param node_key: node key to check, whether the traversing has to be stopped or not - and whether the node should be added to the traversed path. - :param output: Path contains the list of the visited nodes. - :param visited_nodes: Set stores whether the particular node was visited before or not. - :return: The first value shows whether the traversing finished, - the second one is traversing path containing the visited nodes. - """ - - def forward_traverse_function( - node_key: str, output: List[str], visited_nodes: Set[str] - ) -> Tuple[bool, List[bool]]: - # If the node is not operator - if not self._is_node_operator(node_key): - return False, output - if node_key in visited_nodes: - return True, output - output.append(node_key) - if self._is_node_has_underlying_weights(node_key): - return True, output - return False, output - - # If the node is not operator - if not self._is_node_operator(node_key): - return False, output - if node_key in visited_nodes: - return True, output - - node_metatype = self._get_node_metatype(node_key) - # If the node weight quantizable - if self._is_node_has_underlying_weights(node_key): - visited_nodes.add(node_key) - return True, output - if node_metatype in list(OUTPUT_NOOP_METATYPES.values()) + list(INPUT_NOOP_METATYPES.values()): - visited_nodes.add(node_key) - return False, output - self._check_if_postprocessing(node_metatype) - partial_forward_traverse_function = partial(forward_traverse_function, visited_nodes=visited_nodes) - forward_visited_node_keys = self._quant_prop_graph.traverse_graph( - node_key, partial_forward_traverse_function, output=[], traverse_forward=True - ) - # If in the path there are nodes with weights should stop the main backward traversing - for forward_visited_node_key in forward_visited_node_keys: - if self._is_node_has_underlying_weights(forward_visited_node_key): - visited_nodes.add(node_key) - return True, output - output.append(node_key) - return False, output - - partial_backward_traverse_function = partial(backward_traverse_function, visited_nodes=visited_nodes) - output = set() - output_nodes = [] for output_metatype in OUTPUT_NOOP_METATYPES.values(): output_nodes.extend(self._quant_prop_graph.get_node_keys_by_metatype(output_metatype)) - for start_node_key in output_nodes: - self._post_processing_marker_encountered = False - node_keys = self._quant_prop_graph.traverse_graph( - start_node_key, partial_backward_traverse_function, output=[], traverse_forward=False - ) - if self._post_processing_marker_encountered: - ignored_node_keys = self._get_ignored_node_keys(node_keys) - output.update(ignored_node_keys) + def get_ignored_operations(output_nodes: List[str]) -> Tuple[Set[str], Set[str]]: + stack = [([start_node_key], False) for start_node_key in output_nodes] + ignored_operations = set() + + def _extend_ignored_operations(path: List[str]): + for node in path: + if ( + self._is_node_operator(node) + and not self._is_node_has_underlying_weights(node) + and node not in output_nodes + ): + ignored_operations.add(node) + + visited = set() + while stack: + path, post_proc_encountered = stack.pop() + node_key = path[-1] + visited.add(node_key) + if ( + self._is_node_operator(node_key) + and self._get_node_metatype(node_key) in self._post_processing_marker_metatypes + ): + post_proc_encountered = True - return output + if ( + self._is_node_has_underlying_weights(node_key) + or node_key in self._quant_prop_graph.get_input_node_keys() + ): + if post_proc_encountered: + _extend_ignored_operations(path) + else: + for input_key in self._quant_prop_graph.predecessors(node_key): + if input_key in visited and post_proc_encountered and input_key in ignored_operations: + # We have already visited input node, encountered post_processing node in current path, + # and marked input node as ignored, then we can add entire path to ignored_operations + _extend_ignored_operations(path) + elif input_key in visited and not post_proc_encountered and input_key not in ignored_operations: + # We have already visited input node + # but did not add it to ignored_operations (no post_processing node above) + # and did not encounter post_processing node in current path, + # then we can stop traversal + pass + else: + stack.append((path + [input_key], post_proc_encountered)) + return ignored_operations + + ignored_ops = get_ignored_operations(output_nodes) + return ignored_ops class QuantizerPropagationSolver: @@ -346,7 +306,7 @@ class QuantizerPropagationSolver: Analyzes a fresh QuantizerPropagationStateGraph object according to HW configuration supplied in the initializer and produces the list of insertion commands that correspond to the final state of the quantizer propagation graph - when the model has the most contol flow graph edges quantized according to HW + when the model has the most control flow graph edges quantized according to HW capabilities. """ @@ -358,7 +318,7 @@ class QuantizerPropagationSolver: def __init__( self, - activation_ignored_scopes: List[str] = None, + activation_ignored_scopes: Dict[str, IgnoreReason] = None, weight_ignored_scopes: List[str] = None, activation_target_scopes: List[str] = None, weight_target_scopes: List[str] = None, @@ -373,12 +333,15 @@ def __init__( run_consistency_checks: bool = False, quantize_outputs: bool = False, post_processing_marker_metatypes: List[OperatorMetatype] = None, + metatypes_to_ignore: List[OperatorMetatype] = None, + scales_unification_map: Dict[OperatorMetatype, OperatorMetatype] = None, ): """ Initializes the solver with parameters affecting the resulting quantizer setup. - :param activation_ignored_scopes: A list of strings to match against NNCFGraph node names - and ignore matching nodes. Ignored nodes will not have quantizers applied to their activation inputs + :param activation_ignored_scopes: A dict with key as node name and value as ignore reason + to match against NNCFGraph node names and ignore matching nodes. + Ignored nodes will not have quantizers applied to their activation inputs (even if required by node's metatype and HW config), and the downstream quantizers will not propagate upwards through the corresponding node. :param weight_ignored_scopes: A list of strings to match against NNCFGraph node names @@ -404,7 +367,7 @@ def __init__( quantizable weights, along with the corresponding allowed quantizer configurations. Required to build a complete quantizer setup and impacts the activation quantizer propagation in certain cases. - :param scope_overrides: A dictionary of quantization configuration overides for inputs to matching + :param scope_overrides: A dictionary of quantization configuration overrides for inputs to matching operation nodes. :param global_constraints: Global quantizer configuration constraints that will be applied to what is specified in the HW config to limit the initial set of possible quantizer configurations @@ -412,15 +375,19 @@ def __init__( :param additional_unified_scale_op_scopes: A list of strings to match against NNCFGraph node names, inputs of which must be always quantized with a single scale, i.e. with a single set of trainable quantizer parameters. - :param run_consistency_checks: Whether to run internal consistency checks at each propagataion step. + :param run_consistency_checks: Whether to run internal consistency checks at each propagation step. :param quantize_outputs: Whether to insert additional quantizers right before each of the model outputs. - :param post_processing_marker_metatypes: The framework specific NNCF Metatypes, which are markers for - the model post-processing part. They are used for automatic ignoring post-processing nodes. - The seeking post-processing nodes algorithm uses traversing through the model graph from the output nodes. - During traversing all the visited nodes are added, until the quantizable nodes with weights are faced. - If the path with the nodes has the post-processing marker node, - all the nodes in this path will be added into ignored. - If None automatic ignoring will be skipped. + :param post_processing_marker_metatypes: The framework specific NNCF metatypes, which are markers for + the model post-processing part. They are used for automatic ignoring post-processing nodes. + The seeking post-processing nodes algorithm uses traversing through the model graph from the output nodes. + During traversing all the visited nodes are added, until the quantizable nodes with weights are faced. + If the path with the nodes has the post-processing marker node, + all the nodes in this path will be added into ignored. + If None automatic ignoring will be skipped. + :param metatypes_to_ignore: The framework specific NNCF metatypes, + which should be automatically ignored. + :param scales_unification_map: The framework-specific map with NNCF metatypes, which generating a quantizer + that can be unified if it so requires based on metatype. """ if default_trait_to_metatype_map is None: self._default_trait_to_metatype_map = {} @@ -463,7 +430,7 @@ def __init__( # Will handle the "wildcard" quantization situation for the time being if default_qconfig_list is not None: for op_meta, qconf_list in self._operator_allowed_qconfigs_map.items(): - trait = self._operator_quantization_trait_map.get(op_meta, QuantizationTrait.QUANTIZATION_AGNOSTIC) + trait = self._operator_quantization_trait_map.get(op_meta, QuantizationTrait.NON_QUANTIZABLE) if trait == QuantizationTrait.INPUTS_QUANTIZABLE: if HWConfig.is_qconf_list_corresponding_to_unspecified_op(qconf_list): self._operator_allowed_qconfigs_map[op_meta] = default_qconfig_list @@ -475,12 +442,14 @@ def __init__( self._num_potential_quantized_activations = 0 self._quantizable_layer_nodes = quantizable_layer_nodes self._post_processing_marker_metatypes = post_processing_marker_metatypes + self._metatypes_to_ignore = metatypes_to_ignore + self._scales_unification_map = scales_unification_map def _filter_by_weight_ignored_target_scopes( self, quantizable_layer_nodes: List[QuantizableWeightedLayerNode], - weight_ignored_scopes: Dict[QuantizerGroup, List[str]], - weight_target_scopes: Dict[QuantizerGroup, List[str]], + weight_ignored_scopes: List[str], + weight_target_scopes: List[str], ) -> Dict[NNCFNodeName, List[QuantizerConfig]]: if quantizable_layer_nodes is None: return {} @@ -496,6 +465,7 @@ def _filter_by_weight_ignored_target_scopes( nncf_logger.debug(f"Ignored adding weight quantizer for: {node_name}") return weight_quantizable_node_names_vs_qconfigs + # pylint:disable=too-many-branches def run_on_ip_graph(self, ip_graph: InsertionPointGraph) -> QuantizationProposal: """ The main function to be used on an InsertionPointGraph to produce @@ -513,6 +483,10 @@ def run_on_ip_graph(self, ip_graph: InsertionPointGraph) -> QuantizationProposal """ self._num_potential_quantized_activations = 0 quant_prop_graph = QuantizerPropagationStateGraph(ip_graph, self._ignored_scopes, self._target_scopes) + if self._metatypes_to_ignore is not None: + for metatype in self._metatypes_to_ignore: + for node_key in quant_prop_graph.get_node_keys_by_metatype(metatype): + self._add_node_to_ignored(node_key, quant_prop_graph) if self._post_processing_marker_metatypes is not None: post_processing_node_locator = PostprocessingNodeLocator( quant_prop_graph, self._quantizable_layer_nodes, self._post_processing_marker_metatypes @@ -567,6 +541,11 @@ def run_on_ip_graph(self, ip_graph: InsertionPointGraph) -> QuantizationProposal def _add_node_to_ignored(self, node_key: str, quant_prop_graph: QuantizerPropagationStateGraph) -> None: quant_prop_graph.ignored_node_keys[node_key] = IgnoreReason.AUTOGENERATED quant_prop_graph.nodes[node_key][quant_prop_graph.IS_IN_IGNORED_SCOPES] = True + # If node has weights, also remove the weight quantizers + underlying_nncf_nodes = quant_prop_graph.op_node_keys_to_underlying_nodes_mapping[node_key] + for node in underlying_nncf_nodes: + if node.node_name in self._weight_quantizable_node_names_vs_qconfigs: + self._weight_quantizable_node_names_vs_qconfigs.pop(node.node_name) def _map_quantization_points_to_prop_quantizers( self, @@ -703,7 +682,7 @@ def propagation_step( # pylint:disable=too-many-branches # pylint:disable=too-many-statements curr_node_key = curr_prop_quantizer.current_location_node_key - curr_node = quant_prop_graph.nodes[curr_prop_quantizer.current_location_node_key] + curr_node = quant_prop_graph.nodes[curr_node_key] curr_node_type = curr_node[QuantizerPropagationStateGraph.NODE_TYPE_NODE_ATTR] assert QuantizerPropagationStateGraph.is_insertion_point(curr_node_type) @@ -751,7 +730,7 @@ def propagation_step( # only concat unified scale groups appear here unified_scale_grouped_paths = ( quant_prop_graph.get_paths_to_immediately_dominating_insertion_points_grouped_by_unified_scales( - curr_node_key, self._unified_scales_operation_set + curr_node_key, self._unified_scales_operation_set, self._scales_unification_map ) ) @@ -850,13 +829,7 @@ def set_allowed_quantization_types_for_operator_nodes( quant_det_id = node[QuantizerPropagationStateGraph.OPERATOR_METATYPE_NODE_ATTR] if quant_det_id is None: nncf_logger.debug(f"Unknown metatype for operator node: {node_key}") - trait = QuantizationTrait.QUANTIZATION_AGNOSTIC - elif quant_det_id is UnknownMetatype: - trait = QuantizationTrait.NON_QUANTIZABLE - else: - trait = self._operator_quantization_trait_map.get( - quant_det_id, QuantizationTrait.QUANTIZATION_AGNOSTIC - ) + trait = self._operator_quantization_trait_map.get(quant_det_id, QuantizationTrait.NON_QUANTIZABLE) node[QuantizerPropagationStateGraph.QUANTIZATION_TRAIT_NODE_ATTR] = trait if trait == QuantizationTrait.INPUTS_QUANTIZABLE: node[ @@ -886,7 +859,7 @@ def get_operator_quantization_traits_map(self) -> Dict[OperatorMetatype, Quantiz trait = default_trait break else: - trait = QuantizationTrait.QUANTIZATION_AGNOSTIC + trait = QuantizationTrait.NON_QUANTIZABLE else: trait = QuantizationTrait.INPUTS_QUANTIZABLE retval[op_meta] = trait @@ -901,11 +874,11 @@ def _get_trait_for_op_meta_not_specified_in_hw_config(self, op_meta: OperatorMet trait = default_trait break else: - trait = QuantizationTrait.QUANTIZATION_AGNOSTIC + trait = QuantizationTrait.NON_QUANTIZABLE nncf_logger.debug( f"Operation metatype {op_meta} encountered, but it has no default " f"quantization trait and the HW config entry is not given for it - " - f"assuming quantization-agnostic." + f"assuming non-quantizable." ) else: # There IS a valid HW config name for the metatype, but it is deliberately not specified @@ -1106,14 +1079,9 @@ def _filter_qconfigs_according_to_scope( local_constraints = local_constraints.get_updated_constraints(scope_constraints) if self._hw_config is not None: - try: - constrained_config_list = local_constraints.constrain_qconfig_list(qconf_list) - except RuntimeError as e: - err_msg = "Quantization parameter constraints specified in NNCF config are incompatible with HW " - err_msg += "capabilities as specified in HW config type '{}'. ".format(self._hw_config.target_device) - err_msg += "First conflicting quantizer location: " - err_msg += nncf_node_name - raise RuntimeError(err_msg) from e + constrained_config_list = local_constraints.constrain_qconfig_list( + nncf_node_name, self._hw_config.target_device, qconf_list + ) else: constrained_config_list = [local_constraints.apply_constraints_to(qconfig) for qconfig in qconf_list] @@ -1141,9 +1109,8 @@ def _setup_initial_quantizers_for_operator_node( and metatype not in OUTPUT_NOOP_METATYPES ): return - quant_det_id = node[QuantizerPropagationStateGraph.OPERATOR_METATYPE_NODE_ATTR] - qconf_list = self.get_allowed_quantizer_configs_for_operator(quant_det_id) - if quant_det_id in OUTPUT_NOOP_METATYPES: + qconf_list = self.get_allowed_quantizer_configs_for_operator(metatype) + if metatype in OUTPUT_NOOP_METATYPES: qconf_list = deepcopy(self.default_global_qconfig_list) assert qconf_list is not None @@ -1153,7 +1120,7 @@ def _setup_initial_quantizers_for_operator_node( else: qconf_list = [deepcopy(DEFAULT_QUANTIZER_CONFIG)] - is_unified_scale = quant_det_id in self._unified_scales_operation_set + is_unified_scale = metatype in self._unified_scales_operation_set if is_unified_scale: # Filtering out the per-channel cases in the unified scale scenario. # In order to support unified per-channel scales, we will need to handle a situation @@ -1166,7 +1133,7 @@ def _setup_initial_quantizers_for_operator_node( # 2. transpose input tensors to the quantization modules on the fly to accommodate scale, # or vice versa, transpose scale to accommodate shape; need to handle exporting as well per_tensor_qconf_list = list(filter(lambda x: x.per_channel is False, qconf_list)) - op_meta_name = quant_det_id.__class__.__name__ + op_meta_name = metatype.__class__.__name__ if len(per_tensor_qconf_list) != len(qconf_list): if not per_tensor_qconf_list: raise RuntimeError( @@ -1198,9 +1165,7 @@ def _setup_initial_quantizers_for_operator_node( if not edge[QuantizerPropagationStateGraph.IS_INTEGER_PATH_EDGE_ATTR]: pred_ip_key_vs_qconf_dict[pred_ip_key] = qconf_list else: - nncf_logger.debug( - f"Detected integer input {pred_ip_key} - won't set up " f"a propagating quantizer for it" - ) + nncf_logger.debug(f"Detected integer input {pred_ip_key} - won't set up a propagating quantizer for it") if not pred_ip_key_vs_qconf_dict: # All inputs to the operator were integer @@ -1258,6 +1223,10 @@ def check_branching_transition( that branches downwards. :return: The TransitionStatus indicating in which fashion the transition should occur. """ + is_dominating_outputs = quant_prop_graph.is_branching_node_dominating_outputs(branching_node_key) + if is_dominating_outputs and not self._quantize_outputs: + return TransitionStatus.SHOULD_NOT_TRANSITION + dom_op_node_keys = quant_prop_graph.get_non_quant_agnostic_op_nodes_immediately_dominated_by_node( branching_node_key ) diff --git a/nncf/common/quantization/quantizer_removal.py b/nncf/common/quantization/quantizer_removal.py index fad84deddd4..5a698e49e4d 100644 --- a/nncf/common/quantization/quantizer_removal.py +++ b/nncf/common/quantization/quantizer_removal.py @@ -110,16 +110,16 @@ def revert_operations_to_floating_point_precision( transformation_layout.register(command_creator.create_command_to_remove_quantizer(node)) for node in operations: - original_bias = node.data.get("original_bias", None) + original_bias = node.attributes.get("original_bias", None) if original_bias is not None: transformation_layout.register( command_creator.create_command_to_update_bias(node, original_bias, quantized_model_graph) ) - if node.layer_attributes is not None: + if node.layer_attributes and node.layer_attributes.constant_attributes is not None: weight_port_ids = node.layer_attributes.get_const_port_ids() for port_id in weight_port_ids: - original_weight = node.data.get(f"original_weight.{port_id}", None) + original_weight = node.attributes.get(f"original_weight.{port_id}", None) if original_weight is not None: transformation_layout.register( command_creator.create_command_to_update_weight(node, original_weight, port_id) diff --git a/nncf/common/quantization/structs.py b/nncf/common/quantization/structs.py index 95e5ea3e9d8..f0eda4e8f6a 100644 --- a/nncf/common/quantization/structs.py +++ b/nncf/common/quantization/structs.py @@ -18,6 +18,7 @@ from nncf.common.utils.api_marker import api from nncf.config.schemata.defaults import QUANTIZATION_BITS from nncf.config.schemata.defaults import QUANTIZATION_PER_CHANNEL +from nncf.parameters import TargetDevice @api() @@ -224,7 +225,9 @@ def from_config_dict(cls, config_dict: Dict) -> "QuantizationConstraints": signedness_to_force=config_dict.get("signed"), ) - def constrain_qconfig_list(self, quantizer_config_list: List[QuantizerConfig]) -> List[QuantizerConfig]: + def constrain_qconfig_list( + self, node_name: NNCFNodeName, target_device: TargetDevice, quantizer_config_list: List[QuantizerConfig] + ) -> List[QuantizerConfig]: assert quantizer_config_list is not None constrained_quantizer_config_list = list(filter(self.is_config_compatible, quantizer_config_list)) @@ -233,7 +236,10 @@ def constrain_qconfig_list(self, quantizer_config_list: List[QuantizerConfig]) - # It means that the qconfig from overrides must be selected as final config # even if it is not valid in hw-config. if not constrained_quantizer_config_list: - raise RuntimeError() + err_msg = f"Quantization parameter constraints specified in NNCF config are incompatible \ + with HW capabilities as specified in HW config type '{target_device}'. \ + First conflicting quantizer location: {node_name}" + raise ValueError(err_msg) return constrained_quantizer_config_list diff --git a/nncf/common/scopes.py b/nncf/common/scopes.py index 2359b0d2c06..736ff07ab02 100644 --- a/nncf/common/scopes.py +++ b/nncf/common/scopes.py @@ -15,6 +15,7 @@ from nncf.common.graph import NNCFGraph from nncf.common.graph import NNCFNode from nncf.common.graph import NNCFNodeName +from nncf.common.logging import nncf_logger from nncf.common.quantization.structs import QuantizerId from nncf.scopes import IgnoredScope from nncf.scopes import convert_ignored_scope_to_list @@ -103,6 +104,7 @@ def check_scopes_in_graph( graph: NNCFGraph, ignored_scopes: Union[IgnoredScope, List[str]], target_scopes: Optional[List[str]] = None, + validate_scopes: bool = True, ) -> None: """ Raise RuntimeError in case if ignored/target scope names do not match model graph. @@ -111,6 +113,8 @@ def check_scopes_in_graph( :param ignored_scopes: The instance of IgnoredScope or a list of strings specifying a denylist for the serializable_id. :param target_scopes: A list of strings specifying an allowlist for the serializable_id. + :param validate_scopes: If set to True, then a RuntimeError will be raised if the names of the + ignored/target scopes do not match the names of the scopes in the model graph. """ node_list = graph.get_all_nodes() not_matched_ignored_scopes = get_not_matched_scopes(ignored_scopes, node_list) @@ -132,4 +136,6 @@ def check_scopes_in_graph( "scopes in terms of the names there." ) - raise RuntimeError(err_message) + if validate_scopes: + raise RuntimeError(err_message) + nncf_logger.info(err_message) diff --git a/nncf/common/sparsity/statistics.py b/nncf/common/sparsity/statistics.py index f1442dd539d..6d1887dc04c 100644 --- a/nncf/common/sparsity/statistics.py +++ b/nncf/common/sparsity/statistics.py @@ -166,7 +166,7 @@ def to_str(self) -> str: ) pretty_string = ( - f"{self.model_statistics.to_str()}\n\n" f"Statistics of the RB-sparsity algorithm:\n{algorithm_string}" + f"{self.model_statistics.to_str()}\n\n Statistics of the RB-sparsity algorithm:\n{algorithm_string}" ) return pretty_string diff --git a/nncf/common/statistics.py b/nncf/common/statistics.py index 4883a75f5a6..de9be01125e 100644 --- a/nncf/common/statistics.py +++ b/nncf/common/statistics.py @@ -124,7 +124,7 @@ def register(self, algorithm_name: str, stats: Statistics): ] if algorithm_name not in available_algorithms: raise ValueError( - "Can not register statistics for the algorithm. " f"Unknown name of the algorithm: {algorithm_name}." + f"Can not register statistics for the algorithm. Unknown name of the algorithm: {algorithm_name}." ) self._storage[algorithm_name] = stats diff --git a/nncf/common/strip.py b/nncf/common/strip.py new file mode 100644 index 00000000000..6de10c6b98d --- /dev/null +++ b/nncf/common/strip.py @@ -0,0 +1,39 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from typing import TypeVar + +from nncf.common.utils.api_marker import api +from nncf.common.utils.backend import BackendType +from nncf.common.utils.backend import get_backend + +TModel = TypeVar("TModel") + + +@api(canonical_alias="nncf.strip") +def strip(model: TModel, do_copy: bool = True) -> TModel: + """ + Returns the model object with as much custom NNCF additions as possible removed + while still preserving the functioning of the model object as a compressed model. + + :param model: The compressed model. + :param do_copy: If True (default), will return a copy of the currently associated model object. If False, + will return the currently associated model object "stripped" in-place. + :return: The stripped model. + """ + model_backend = get_backend(model) + if model_backend == BackendType.TORCH: + from nncf.torch import strip as strip_pt + + return strip_pt(model, do_copy) + + raise RuntimeError(f"Method `strip` does not support for {model_backend.value} backend.") diff --git a/nncf/common/tensor_statistics/aggregator.py b/nncf/common/tensor_statistics/aggregator.py index 7840bb3ffeb..7066fccf8b0 100644 --- a/nncf/common/tensor_statistics/aggregator.py +++ b/nncf/common/tensor_statistics/aggregator.py @@ -15,8 +15,8 @@ from tqdm import tqdm -from nncf.common.factory import EngineFactory -from nncf.common.factory import ModelTransformerFactory +from nncf.common import factory +from nncf.common.graph.graph import NNCFGraph from nncf.common.graph.transformations.layout import TransformationLayout from nncf.common.tensor import NNCFTensor from nncf.common.tensor_statistics.statistic_point import StatisticPointsContainer @@ -33,22 +33,26 @@ class StatisticsAggregator(ABC): def __init__(self, dataset: Dataset): self.dataset = dataset - self.stat_subset_size = 0 + self.stat_subset_size = None self.statistic_points = StatisticPointsContainer() - def collect_statistics(self, model: TModel) -> None: + def collect_statistics(self, model: TModel, graph: NNCFGraph) -> None: """ Collects statistics for registered StatisticPoints. The statistics are stored in self.statistic_points. - :param model: backend-specific model instance + :param model: Backend-specific model instance. + :param graph: Model graph. """ - model_transformer = ModelTransformerFactory.create(model) + if not self.statistic_points: + return - merged_statistics = self._get_merged_statistic_points(self.statistic_points, model) + model_transformer = factory.ModelTransformerFactory.create(model) + + merged_statistics = self._get_merged_statistic_points(self.statistic_points, model, graph) transformation_layout = self._get_transformation_layout_extra_outputs(merged_statistics) model_with_outputs = model_transformer.transform(transformation_layout) - engine = EngineFactory.create(model_with_outputs) + engine = factory.EngineFactory.create(model_with_outputs) for input_data in tqdm( islice(self.dataset.get_inference_data(), self.stat_subset_size), @@ -74,7 +78,10 @@ def register_statistic_points(self, statistic_points: StatisticPointsContainer) for _statistic_point in _statistic_points: for _, tensor_collectors in _statistic_point.algorithm_to_tensor_collectors.items(): for tensor_collector in tensor_collectors: - self.stat_subset_size = max(self.stat_subset_size, tensor_collector.num_samples) + if self.stat_subset_size is None: + self.stat_subset_size = tensor_collector.num_samples + elif tensor_collector.num_samples is not None: + self.stat_subset_size = max(self.stat_subset_size, tensor_collector.num_samples) @abstractmethod def _register_statistics(self, outputs: Dict[str, NNCFTensor], statistic_points: StatisticPointsContainer) -> None: @@ -99,7 +106,7 @@ def _get_transformation_layout_extra_outputs( @staticmethod @abstractmethod def _get_merged_statistic_points( - statistic_points: StatisticPointsContainer, model: TModel + statistic_points: StatisticPointsContainer, model: TModel, graph: NNCFGraph ) -> StatisticPointsContainer: """ Creates a new StatisticPointContainer that has no duplicated tensor collectors for one @@ -109,6 +116,7 @@ def _get_merged_statistic_points( :param statistic_points: Registered statistic points with possible tensor collectors duplicates. :param model: Backend-specific target model. + :param graph: Model graph. :return: Merged statistic points container bounded with given statistic point container. """ diff --git a/nncf/common/tensor_statistics/collectors.py b/nncf/common/tensor_statistics/collectors.py index a91d1d27307..907ae30fec8 100644 --- a/nncf/common/tensor_statistics/collectors.py +++ b/nncf/common/tensor_statistics/collectors.py @@ -486,9 +486,9 @@ def _shape(self): return self._all_shapes[0] -class BatchStatisticCollector(OfflineTensorStatisticCollector): +class RawStatisticCollector(OfflineTensorStatisticCollector): """ - Collects tensor samples, where each tensor is averaged along the batch axis (and only that axis). + Collects tensor samples, where each tensor represented in raw format. Each sample stays available for usage in further stages of the algorithm. """ @@ -498,7 +498,6 @@ def __init__(self, num_samples: Optional[int] = None) -> None: the number of samples that will be processed. """ super().__init__(num_samples=num_samples) - self._tensor_processor = self._get_processor() self._all_values = [] @staticmethod @@ -507,7 +506,7 @@ def _get_processor(): pass def _register_input_common(self, x: NNCFTensor): - self._all_values.append(self._tensor_processor.batch_mean(x).tensor) + self._all_values.append(x.tensor) def _reset(self): self._all_values.clear() diff --git a/nncf/common/tensor_statistics/statistic_point.py b/nncf/common/tensor_statistics/statistic_point.py index 5b01d19f950..3cf533ac7b6 100644 --- a/nncf/common/tensor_statistics/statistic_point.py +++ b/nncf/common/tensor_statistics/statistic_point.py @@ -25,9 +25,7 @@ class StatisticPoint: algorithm implies on what algorithm nedeed this statistics. """ - def __init__( - self, target_point: TargetPoint, tensor_collector: TensorStatisticCollectorBase, algorithm: "Algorithm" - ): + def __init__(self, target_point: TargetPoint, tensor_collector: TensorStatisticCollectorBase, algorithm: str): self.target_point = target_point self.algorithm_to_tensor_collectors = {algorithm: [tensor_collector]} @@ -65,11 +63,12 @@ def add_statistic_point(self, statistic_point: StatisticPoint) -> None: _statistic_point.algorithm_to_tensor_collectors[algorithm].extend( statistic_point.algorithm_to_tensor_collectors[algorithm] ) - return - _statistic_point.algorithm_to_tensor_collectors[ - algorithm - ] = statistic_point.algorithm_to_tensor_collectors[algorithm] - return + else: + _statistic_point.algorithm_to_tensor_collectors[ + algorithm + ] = statistic_point.algorithm_to_tensor_collectors[algorithm] + return + self.data[target_node_name].append(statistic_point) def iter_through_statistic_points_in_target_node( @@ -88,7 +87,7 @@ def iter_through_statistic_points_in_target_node( def get_tensor_collectors( self, filter_fn: Optional[Callable[[StatisticPoint], bool]] = None - ) -> Generator[Tuple["Algorithm", StatisticPoint, TensorStatisticCollectorBase], None, None]: + ) -> Generator[Tuple[str, StatisticPoint, TensorStatisticCollectorBase], None, None]: """ Returns iterable through all tensor collectors. @@ -114,7 +113,7 @@ def get_algo_statistics_for_node( self, target_node_name: str, filter_fn: Callable[[StatisticPoint], bool], - algorithm: "Algorithm", + algorithm: str, ) -> Generator[TensorStatisticCollectorBase, None, None]: """ Returns iterable through all statistic collectors in node with target_node_name. diff --git a/nncf/common/tensor_statistics/statistics.py b/nncf/common/tensor_statistics/statistics.py index 194c69ba56e..0f6d0d1aad3 100644 --- a/nncf/common/tensor_statistics/statistics.py +++ b/nncf/common/tensor_statistics/statistics.py @@ -52,7 +52,7 @@ class MeanTensorStatistic(TensorStatistic): def __init__(self, mean_values, shape): """ - :param mean_values: Сollected mean per-axis values. + :param mean_values: Collected mean per-axis values. :param shape: The shape of the collected statistics. """ self.mean_values = mean_values @@ -86,18 +86,18 @@ def __eq__(self, other: "PercentileTensorStatistic", rtol=1e-9) -> bool: return True -class BatchTensorStatistic(TensorStatistic): +class RawTensorStatistic(TensorStatistic): VALUES_STATS = "values" """ - Base class for the statistics that collects as mean per-batch + Base class for the raw statistics, without any aggregation. """ def __init__(self, values): """ - :param values: Сollected per-batch values. + :param values: Collected raw values. """ self.values = values - def __eq__(self, other: "BatchTensorStatistic") -> bool: + def __eq__(self, other: "RawTensorStatistic") -> bool: return self.tensor_eq(self.values, other.values) diff --git a/nncf/common/utils/backend.py b/nncf/common/utils/backend.py index fcd315dee22..b996772df38 100644 --- a/nncf/common/utils/backend.py +++ b/nncf/common/utils/backend.py @@ -8,9 +8,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import importlib from copy import deepcopy from enum import Enum -from typing import TypeVar +from typing import List, TypeVar TModel = TypeVar("TModel") @@ -22,58 +23,115 @@ class BackendType(Enum): OPENVINO = "OpenVINO" -def get_backend(model) -> BackendType: +def get_available_backends() -> List[BackendType]: """ - Returns the NNCF backend name string inferred from the type of the model object passed into this function. + Returns a list of available backends. - :param model: The framework-specific object representing the trainable model. - :return: A BackendType representing the correct NNCF backend to be used when working with the framework. + :return: A list of available backends. + """ + frameworks = [ + ("torch", BackendType.TORCH), + ("tensorflow", BackendType.TENSORFLOW), + ("onnx", BackendType.ONNX), + ("openvino.runtime", BackendType.OPENVINO), + ] + + available_backends = [] + for module_name, backend in frameworks: + try: + importlib.import_module(module_name) + available_backends.append(backend) + except ImportError: + pass + + return available_backends + + +def is_torch_model(model: TModel) -> bool: + """ + Returns True if the model is an instance of torch.nn.Module, otherwise False. + + :param model: A target model. + :return: True if the model is an instance of torch.nn.Module, otherwise False. + """ + import torch + + return isinstance(model, torch.nn.Module) + + +def is_tensorflow_model(model: TModel) -> bool: + """ + Returns True if the model is an instance of tensorflow.Module, otherwise False. + + :param model: A target model. + :return: True if the model is an instance of tensorflow.Module, otherwise False. + """ + import tensorflow + + return isinstance(model, tensorflow.Module) + + +def is_onnx_model(model: TModel) -> bool: """ - available_frameworks = [] - try: - import torch + Returns True if the model is an instance of onnx.ModelProto, otherwise False. - available_frameworks.append("PyTorch") - except ImportError: - torch = None + :param model: A target model. + :return: True if the model is an instance of onnx.ModelProto, otherwise False. + """ + import onnx - try: - import tensorflow + return isinstance(model, onnx.ModelProto) - available_frameworks.append("Tensorflow") - except ImportError: - tensorflow = None - try: - import onnx +def is_openvino_model(model: TModel) -> bool: + """ + Returns True if the model is an instance of openvino.runtime.Model, otherwise False. - available_frameworks.append("ONNX") - except ImportError: - onnx = None + :param model: A target model. + :return: True if the model is an instance of openvino.runtime.Model, otherwise False. + """ + import openvino.runtime as ov - try: - import openvino.runtime as ov + return isinstance(model, ov.Model) - available_frameworks.append("OpenVINO") - except ImportError: - ov = None - if torch is not None and isinstance(model, torch.nn.Module): +def is_openvino_compiled_model(model: TModel) -> bool: + """ + Returns True if the model is an instance of openvino.runtime.CompiledModel, otherwise False. + + :param model: A target model. + :return: True if the model is an instance of openvino.runtime.CompiledModel, otherwise False. + """ + import openvino.runtime as ov + + return isinstance(model, ov.CompiledModel) + + +def get_backend(model: TModel) -> BackendType: + """ + Returns the NNCF backend name string inferred from the type of the model object passed into this function. + + :param model: The framework-specific model. + :return: A BackendType representing the correct NNCF backend to be used when working with the framework. + """ + available_backends = get_available_backends() + + if BackendType.TORCH in available_backends and is_torch_model(model): return BackendType.TORCH - if tensorflow is not None and isinstance(model, tensorflow.Module): + if BackendType.TENSORFLOW in available_backends and is_tensorflow_model(model): return BackendType.TENSORFLOW - if onnx is not None and isinstance(model, onnx.ModelProto): + if BackendType.ONNX in available_backends and is_onnx_model(model): return BackendType.ONNX - if ov is not None and isinstance(model, ov.Model): + if BackendType.OPENVINO in available_backends and is_openvino_model(model): return BackendType.OPENVINO raise RuntimeError( "Could not infer the backend framework from the model type because " "the framework is not available or the model type is unsupported. " - "The available frameworks found: {}.".format(", ".join(available_frameworks)) + "The available frameworks found: {}.".format(", ".join([b.value for b in available_backends])) ) @@ -82,7 +140,7 @@ def copy_model(model: TModel) -> TModel: Function to create copy of the backend-specific model. :param model: the backend-specific model instance - :return: Copy of the backend-specific model instance + :return: Copy of the backend-specific model instance. """ model_backend = get_backend(model) if model_backend == BackendType.OPENVINO: diff --git a/nncf/common/utils/dot_file_rw.py b/nncf/common/utils/dot_file_rw.py index ff896e824f1..a67dd9d5df8 100644 --- a/nncf/common/utils/dot_file_rw.py +++ b/nncf/common/utils/dot_file_rw.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import pathlib import networkx as nx diff --git a/nncf/common/utils/os.py b/nncf/common/utils/os.py index 6e7775026b1..fb6f146304b 100644 --- a/nncf/common/utils/os.py +++ b/nncf/common/utils/os.py @@ -12,6 +12,8 @@ from contextlib import contextmanager from pathlib import Path +import psutil + # pylint: disable=W1514 @contextmanager @@ -37,3 +39,28 @@ def is_windows(): def is_linux(): return "linux" in sys.platform + + +def get_available_cpu_count(logical: bool = True) -> int: + """ + Return the number of CPUs in the system. + + :param logical: If False return the number of physical cores only (e.g. hyper thread CPUs are excluded), + otherwise number of logical cores. Defaults, True. + :return: Number of CPU. + """ + try: + num_cpu = psutil.cpu_count(logical=logical) + return num_cpu if num_cpu is not None else 1 + except Exception: # pylint: disable=broad-except + return 1 + + +def get_available_memory_amount() -> int: + """ + :return: Available memory amount (bytes) + """ + try: + return psutil.virtual_memory()[1] + except Exception: # pylint: disable=broad-except + return 0 diff --git a/nncf/common/utils/timer.py b/nncf/common/utils/timer.py index 2d273e67d50..6e87676ce07 100644 --- a/nncf/common/utils/timer.py +++ b/nncf/common/utils/timer.py @@ -20,8 +20,9 @@ def timer(): """ Context manager to measure execution time. """ - start_time = time.perf_counter() - yield - elapsed_time = time.perf_counter() - start_time + start_time = end_time = time.perf_counter() + yield lambda: end_time - start_time + end_time = time.perf_counter() + elapsed_time = end_time - start_time time_string = time.strftime("%H:%M:%S", time.gmtime(elapsed_time)) nncf_logger.info(f"Elapsed Time: {time_string}") diff --git a/nncf/config/config.py b/nncf/config/config.py index 8ed3e8422d2..72cfdb28997 100644 --- a/nncf/config/config.py +++ b/nncf/config/config.py @@ -83,7 +83,7 @@ def has_extra_struct(self, struct_cls: Type[NNCFExtraConfigStruct]) -> NNCFExtra def get_all_extra_structs_for_copy(self) -> List[NNCFExtraConfigStruct]: return list(self.__nncf_extra_structs.values()) - def get_redefinable_global_param_value_for_algo(self, param_name: str, algo_name: str) -> Optional: + def get_redefinable_global_param_value_for_algo(self, param_name: str, algo_name: str) -> Optional[str]: """ Some parameters can be specified both on the global NNCF config .json level (so that they apply to all algos), and at the same time overridden in the algorithm-specific section of the .json. @@ -130,7 +130,7 @@ def validate(loaded_json): nncf_logger.error("Invalid NNCF config supplied!") absolute_path_parts = [str(x) for x in e.absolute_path] if not NNCFConfig._is_path_to_algorithm_name(absolute_path_parts): - e.message += f"\nRefer to the NNCF config schema documentation at " f"{SCHEMA_VISUALIZATION_URL}" + e.message += f"\nRefer to the NNCF config schema documentation at {SCHEMA_VISUALIZATION_URL}" e.schema = "*schema too long for stdout display*" raise e diff --git a/nncf/config/extractors.py b/nncf/config/extractors.py index 5f6f2327661..da7ac722ef8 100644 --- a/nncf/config/extractors.py +++ b/nncf/config/extractors.py @@ -70,9 +70,7 @@ def extract_algo_specific_config(config: NNCFConfig, algo_name_to_match: str) -> f"algo {algo_name_to_match} in the NNCF config!" ) if not matches: - raise RuntimeError( - f"Did not find an algorithm configuration for " f"algo {algo_name_to_match} in the NNCF config!" - ) + raise RuntimeError(f"Did not find an algorithm configuration for algo {algo_name_to_match} in the NNCF config!") return next(iter(matches)) diff --git a/nncf/config/schemata/algo/filter_pruning.py b/nncf/config/schemata/algo/filter_pruning.py index 155909056b6..3bf94069835 100644 --- a/nncf/config/schemata/algo/filter_pruning.py +++ b/nncf/config/schemata/algo/filter_pruning.py @@ -8,6 +8,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + from nncf.config.definitions import FILTER_PRUNING_ALGO_NAME_IN_CONFIG from nncf.config.definitions import ONLINE_DOCS_ROOT from nncf.config.schemata.basic import BOOLEAN diff --git a/nncf/config/schemata/common/targeting.py b/nncf/config/schemata/common/targeting.py index 8a76f6d2ebd..e9962ce5711 100644 --- a/nncf/config/schemata/common/targeting.py +++ b/nncf/config/schemata/common/targeting.py @@ -8,9 +8,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from nncf.config.schemata.basic import BOOLEAN from nncf.config.schemata.basic import make_string_or_array_of_strings_schema from nncf.config.schemata.basic import with_attributes from nncf.config.schemata.common.initialization import BATCHNORM_ADAPTATION_SCHEMA +from nncf.config.schemata.defaults import VALIDATE_SCOPES IGNORED_SCOPES_DESCRIPTION = ( "A list of model control flow graph node scopes to be ignored for this " @@ -20,6 +22,10 @@ "A list of model control flow graph node scopes to be considered for this operation" " - functions as a 'denylist'. Optional." ) +VALIDATE_SCOPES_DESCRIPTION = ( + "If set to True, then a RuntimeError will be raised if the names of the " + "ignored/target scopes do not match the names of the scopes in the model graph." +) SCOPING_PROPERTIES = { "ignored_scopes": with_attributes( make_string_or_array_of_strings_schema(), @@ -39,6 +45,11 @@ "UNet/ModuleList\\[up_path\\].*", ], ), + "validate_scopes": with_attributes( + BOOLEAN, + description=VALIDATE_SCOPES_DESCRIPTION, + default=VALIDATE_SCOPES, + ), } GENERIC_INITIALIZER_SCHEMA = { "type": "object", diff --git a/nncf/config/schemata/defaults.py b/nncf/config/schemata/defaults.py index 5f32464b2f9..a55d098e7cf 100644 --- a/nncf/config/schemata/defaults.py +++ b/nncf/config/schemata/defaults.py @@ -87,3 +87,5 @@ AA_LR_REDUCTION_FACTOR = 0.5 AA_MINIMAL_COMPRESSION_RATE_STEP = 0.025 AA_MAXIMAL_TOTAL_EPOCHS = 10000 + +VALIDATE_SCOPES = True diff --git a/nncf/experimental/common/graph/netron.py b/nncf/experimental/common/graph/netron.py index c98b0420caf..95c84c112ac 100644 --- a/nncf/experimental/common/graph/netron.py +++ b/nncf/experimental/common/graph/netron.py @@ -1,15 +1,14 @@ -""" - Copyright (c) 2022 Intel Corporation - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Since we are not reading XML, but creating it, the package security message is irrelevant import xml.etree.ElementTree as ET # nosec from typing import Callable, Dict, List, Optional, Tuple @@ -55,14 +54,14 @@ def __init__( self, node_id: str, name: str, - type: str, + node_type: str, attrs: Optional[Dict[str, str]] = None, inputs: Optional[List[PortDesc]] = None, outputs: Optional[List[PortDesc]] = None, ): self.node_id = node_id self.name = name - self.type = type + self.type = node_type if attrs is None: attrs = {} self.attrs = attrs @@ -74,9 +73,9 @@ def as_xml_element(self) -> ET.Element: ET.SubElement(node, Tags.DATA, self.attrs) if self.inputs: - input = ET.SubElement(node, Tags.INPUT) + input_ = ET.SubElement(node, Tags.INPUT) for port in self.inputs: - input.append(port.as_xml_element()) + input_.append(port.as_xml_element()) if self.outputs: output = ET.SubElement(node, Tags.OUTPUT) @@ -156,7 +155,7 @@ def get_graph_desc( NodeDesc( node_id=str(node.node_id), name=node.node_name, - type=node.node_type.title(), + node_type=node.node_type.title(), attrs=get_attributes_fn(node), inputs=inputs, outputs=outputs, diff --git a/nncf/experimental/common/pruning/nodes_grouping.py b/nncf/experimental/common/pruning/nodes_grouping.py index 4a297923810..e0fe392b4d7 100644 --- a/nncf/experimental/common/pruning/nodes_grouping.py +++ b/nncf/experimental/common/pruning/nodes_grouping.py @@ -85,14 +85,14 @@ def get_pruning_groups( root_group = PropagationGroup(block=PruningBlock(), producers={ProducerInfo(node.node_id, pruning_dim)}) mask = PropagationMask(dim_groups_map={target_output_dim_for_compression: [root_group]}) roots[node.node_id] = root_group - node.data["output_mask"] = mask + node.attributes["output_mask"] = mask def get_attributes_fn(node: NNCFNode) -> Dict[str, Any]: result = {"metatype": str(node.metatype.name), "node_id": str(node.node_id)} if node.layer_attributes: result.update(map(lambda pair: (pair[0], str(pair[1])), node.layer_attributes.__dict__.items())) - if "output_mask" in node.data: - output_mask = node.data["output_mask"] + if "output_mask" in node.attributes: + output_mask = node.attributes["output_mask"] if output_mask: result["output_mask"] = str(output_mask) return result diff --git a/nncf/experimental/common/pruning/operations.py b/nncf/experimental/common/pruning/operations.py index f25d4b8ac90..b2617ed0acf 100644 --- a/nncf/experimental/common/pruning/operations.py +++ b/nncf/experimental/common/pruning/operations.py @@ -86,7 +86,7 @@ class InputPruningOp(BasePruningOp): def mask_propagation( cls, node: NNCFNode, graph: NNCFGraph, tensor_processor: Type[NNCFPruningBaseTensorProcessor] ) -> None: - node.data["output_mask"] = None + node.attributes["output_mask"] = None class OutputPruningOp(BasePruningOp): @@ -94,7 +94,7 @@ class OutputPruningOp(BasePruningOp): def mask_propagation( cls, node: NNCFNode, graph: NNCFGraph, tensor_processor: Type[NNCFPruningBaseTensorProcessor] ) -> None: - node.data["output_mask"] = None + node.attributes["output_mask"] = None input_masks = get_input_masks(node, graph) cls.invalidate_masks(input_masks) @@ -115,7 +115,7 @@ def mask_propagation( input_masks = get_input_masks(node, graph) assert len(input_masks) in [1, 2] is_input_mask_empty_map = map(not_, input_masks) - output_mask = node.data.get("output_mask", None) + output_mask = node.attributes.get("output_mask", None) input_tensors_shapes = [x.tensor_shape for x in graph.get_input_edges(node)] node_id = node.node_id if all(is_input_mask_empty_map): @@ -133,7 +133,7 @@ def mask_propagation( elif len(input_masks) == 2: output_mask = cls._handle_two_inputs(input_masks, input_tensors_shapes, node_id) - node.data["output_mask"] = output_mask + node.attributes["output_mask"] = output_mask @staticmethod def _handle_single_input( @@ -221,7 +221,7 @@ def mask_propagation( if cls.accept_pruned_input(node): identity_mask_propagation(node, graph) else: - node.data["output_mask"] = None + node.attributes["output_mask"] = None class ElementwisePruningOp(BasePruningOp): @@ -230,7 +230,7 @@ def mask_propagation( cls, node: NNCFNode, graph: NNCFGraph, tensor_processor: Type[NNCFPruningBaseTensorProcessor] ) -> None: input_masks = get_input_masks(node, graph) - node.data["output_mask"] = cls._get_output_mask(input_masks) + node.attributes["output_mask"] = cls._get_output_mask(input_masks) @classmethod def _get_output_mask(cls, input_masks: List[Optional[PropagationMask]]) -> Optional[PropagationMask]: @@ -239,7 +239,7 @@ def _get_output_mask(cls, input_masks: List[Optional[PropagationMask]]) -> Optio output_mask = None if len(input_masks) == 1: nncf_logger.warning( - f"ElementWise with a single input is not properly supported. " + "ElementWise with a single input is not properly supported. " "The second input might be a constant without node in the graph. " "The constant should be in the graph or in the node attributes. " "It's also should be pruned in accordance with an input mask. " @@ -284,7 +284,7 @@ def mask_propagation( input_masks = get_input_masks(node, graph) assert len(input_masks) == 1 input_mask = input_masks[0] - node.data["output_mask"] = cls._get_output_mask(input_mask, node, graph) + node.attributes["output_mask"] = cls._get_output_mask(input_mask, node, graph) @classmethod def _get_output_mask( @@ -371,7 +371,8 @@ def mask_propagation( # not affected by split groups are propagated further output_mask.dim_groups_map[dim] = groups else: - # invalidate groups, that assigned to the removed dimension or to the dimension that have 1 channel + # invalidate groups, that assigned to the removed dimension or + # to the dimension that have 1 channel for group in groups: group.invalidate() else: @@ -379,7 +380,7 @@ def mask_propagation( "symbolic mask propagation for split by prune dimension is not implemented, " "just propagate further for now" ) - node.data["output_mask"] = output_mask + node.attributes["output_mask"] = output_mask class ReshapeMode(Enum): @@ -453,7 +454,7 @@ def mask_propagation( grouping[in_map[in_idx][0]].extend(groups) output_mask.dim_groups_map = dict(grouping) - node.data["output_mask"] = output_mask + node.attributes["output_mask"] = output_mask @staticmethod def _can_reach_number_by_multiply(number_to_reach: int, array: List[int], start_idx: int) -> Tuple[bool, int]: @@ -677,7 +678,7 @@ def mask_propagation( assert len(input_masks) == 1 input_mask = input_masks[0] if not input_mask: - node.data["output_mask"] = None + node.attributes["output_mask"] = None return if isinstance(node.layer_attributes, TransposeLayerAttributes): @@ -697,7 +698,7 @@ def mask_propagation( dim_groups_map={new_idx: input_mask.dim_groups_map[old_idx] for old_idx, new_idx in idx_map} ) - node.data["output_mask"] = output_mask + node.attributes["output_mask"] = output_mask class StopMaskForwardPruningOp(BasePruningOp): @@ -707,7 +708,7 @@ def mask_propagation( ) -> None: input_masks = get_input_masks(node, graph) cls.invalidate_masks(input_masks) - node.data["output_mask"] = None + node.attributes["output_mask"] = None class ExpandAsPruningOp(BasePruningOp): @@ -718,32 +719,32 @@ def mask_propagation( input_edges = graph.get_input_edges(node) assert len(input_edges) == 2, "expand should always have 2 inputs" input_to_expand = input_edges[0].from_node - mask = input_to_expand.data.get("output_mask") + mask = input_to_expand.attributes.get("output_mask") propagated_mask = None if mask: nncf_logger.warning( - f"expand_as is applied to the node with propagation mask. Currently, it's not supported " - "and mask is invalidated. node_name={node.node_name}" + "expand_as is applied to the node with propagation mask. Currently, it's not supported " + f"and mask is invalidated. node_name={node.node_name}" ) mask.invalidate_groups() input_to_get_shape = input_edges[1].from_node - mask: PropagationMask = input_to_get_shape.data.get("output_mask") + mask: PropagationMask = input_to_get_shape.attributes.get("output_mask") if mask: target_shape = input_edges[1].tensor_shape source_shape = input_edges[0].tensor_shape for dim, groups in mask.dim_groups_map.items(): if target_shape[dim] == source_shape[dim]: nncf_logger.warning( - f"expand_as takes the shape from the node with propagation mask and pruning " + "expand_as takes the shape from the node with propagation mask and pruning " "dimension in the mask matches the dimension in the expanded input. Currently, " - "it's not supported and mask is invalidated. node_name={node.node_name}" + f"it's not supported and mask is invalidated. node_name={node.node_name}" ) for group in groups: group.invalidate() # TODO: (nlyalyus) assume that expand_as is on constant path that does not affect pruning, otherwise pruning # of self attention block would be not possible in the general case. propagated_mask = mask - node.data["output_mask"] = propagated_mask + node.attributes["output_mask"] = propagated_mask class ScatterPruningOp(BasePruningOp): @@ -751,18 +752,18 @@ class ScatterPruningOp(BasePruningOp): def mask_propagation( cls, node: NNCFNode, graph: NNCFGraph, tensor_processor: Type[NNCFPruningBaseTensorProcessor] ) -> None: - input_masks = [input_edge.from_node.data.get("output_mask") for input_edge in graph.get_input_edges(node)] + input_masks = [input_edge.from_node.attributes.get("output_mask") for input_edge in graph.get_input_edges(node)] assert len(input_masks) == 2, "expect that masked_fill should always have 2 inputs" i1, i2 = input_masks propagated_mask = None if i1 != i2: nncf_logger.warning( - f"expand_as takes the shape from the node with propagation mask and pruning " + "expand_as takes the shape from the node with propagation mask and pruning " "dimension in the mask matches the dimension in the expanded input. Currently, " - "it's not supported and mask is invalidated. node_name={node.node_name}" + f"it's not supported and mask is invalidated. node_name={node.node_name}" ) if i1: i1.invalidate_groups() else: propagated_mask = i1 - node.data["output_mask"] = propagated_mask + node.attributes["output_mask"] = propagated_mask diff --git a/nncf/experimental/common/pruning/propagation_data.py b/nncf/experimental/common/pruning/propagation_data.py index 7a0a8ed074f..6157d3d26dd 100644 --- a/nncf/experimental/common/pruning/propagation_data.py +++ b/nncf/experimental/common/pruning/propagation_data.py @@ -161,7 +161,7 @@ def is_invalid(self): def __str__(self) -> str: producers = ",".join(map(str, sorted(self._producers))) consumers = ",".join(map(str, sorted(self._consumers))) - return f"Block: {self.block}\n" f"Producers: {producers}\n" f"Consumers: {consumers}" + return f"Block: {self.block}\nProducers: {producers}\nConsumers: {consumers}" def __repr__(self) -> str: producers = ",".join(map(str, self.get_producers())) diff --git a/nncf/experimental/common/tensor_statistics/collectors.py b/nncf/experimental/common/tensor_statistics/collectors.py index 52e61e8c6b2..3655fffe5d6 100644 --- a/nncf/experimental/common/tensor_statistics/collectors.py +++ b/nncf/experimental/common/tensor_statistics/collectors.py @@ -35,7 +35,7 @@ def __init__(self, reduction_shape: Optional[ReductionShape] = None, inplace: bo """ :param reduction_shape: Reduction shape for reduction calculation. Equal to list(range(len(input.shape))) if empty. - :param: Wheather should be calculated inplace or out of place. + :param inplace: Whether should be calculated inplace or out of place. """ self._reduction_shape = reduction_shape @@ -110,7 +110,7 @@ def _get_reduction_shape(self, tensor: NNCFTensor) -> Union[int, Tuple[int, ...] class TensorAggregatorBase: """ - Tensor aggregator is designed to recieve (register) calculated statistics and + Tensor aggregator is designed to receive (register) calculated statistics and aggregate them in terms of NNCFCollectorTensorProcessor operations. """ @@ -145,12 +145,23 @@ def _register_reduced_input_impl(self, x: TensorType) -> None: :param x: Tensor to register. """ - @abstractmethod def aggregate(self) -> Any: """ Aggregates collected tensors and returns aggregated result. + In case no tensors were collected returns None. - :retunr: Aggregated result. + :return: Aggregated result. + """ + if self._collected_samples: + return self._aggregate_impl() + return None + + @abstractmethod + def _aggregate_impl(self) -> Any: + """ + Aggregates collected tensors and returns aggregated result. + + :return: Aggregated result. """ def reset(self): @@ -170,7 +181,7 @@ class TensorCollector: Statistic branch consists of one reducer and one aggregator instance. TensorCollector applies a reducer on a correspondent inputs and then passes the one of the reduced tensors chosen by output port id to a correspondent aggregator for each registered statistic branch. - Receives tesnors by `register_input` method. Aggregated values as a TensorStatistic instance or + Receives tensors by `register_input` method. Aggregated values as a TensorStatistic instance or a dict could be collected by `get_statistics` call. """ @@ -182,8 +193,14 @@ def __init__(self, statistic_container: Optional[TensorStatistic] = None) -> Non self._enabled = True @property - def num_samples(self) -> int: - return max(aggregator.num_samples for aggregator in self._aggregators.values()) + def num_samples(self) -> Optional[int]: + output = None + for aggregator in self._aggregators.values(): + if aggregator.num_samples and output: + output = max(output, aggregator.num_samples) + else: + output = aggregator.num_samples + return output @property def enabled(self) -> bool: @@ -218,15 +235,15 @@ def register_statistic_branch( :param container_key: Container key to pass aggregated statistic to. :param reducer: TensorReducer instance for the statistic collection branch. - :param aggregator: TensorAggergator instance for the statistic collection branch. + :param aggregator: TensorAggregator instance for the statistic collection branch. :reducer_output_port_id: Reducer target output port id. """ if container_key in self._stat_container_kwargs_map: raise RuntimeError( - f"Two differend statistic branches for one" f" container key {container_key} are encountered" + f"Two different statistic branches for one container key {container_key} are encountered" ) if any(aggr is aggregator for aggr in self._aggregators.values()): - raise RuntimeError(f"One aggregator instance {aggregator} " f" for different branches is encountered") + raise RuntimeError(f"One aggregator instance {aggregator} for different branches is encountered") self._reducers.add(reducer) key = (hash(reducer), reducer_output_port_id, hash(aggregator)) @@ -262,7 +279,7 @@ def register_inputs(self, inputs: Dict[int, List[NNCFTensor]]) -> None: for reducer in self._reducers: reducer_hash = hash(reducer) input_ = inputs[reducer_hash] - if any([tensor.is_empty() for tensor in input_]): + if any(tensor.is_empty() for tensor in input_): continue reduced_inputs[reducer_hash] = reducer(input_) @@ -302,9 +319,9 @@ def get_statistics(self) -> Union[TensorStatistic, Dict[str, Any]]: def get_inplace_fn_info(self) -> List[Tuple[Any, int]]: """ - Returns necessery information to insert inplace operation into graph. + Returns necessary information to insert inplace operation into graph. - :returns: nesessery information to insert inplace operation into graph + :returns: necessary information to insert inplace operation into graph in format of pair of reducer builder and correspondent reducer output port id. """ retval = [] @@ -324,7 +341,7 @@ def any_stat_out_of_place(self) -> bool: def replace_aggregator(self, key: Tuple[int, int, int], aggregator: TensorAggregatorBase) -> None: """ Friend method that replaces aggregator instance on equivalent one. - Key shoud be valid for for given aggregator and a statistic branch + Key should be valid for for given aggregator and a statistic branch with key should be present in TensorCollector. :param key: Statistic branch key. @@ -436,11 +453,11 @@ class QuantileReducerBase(TensorReducerBase): def __init__( self, reduction_shape: Optional[ReductionShape] = None, - quantile: Union[float, List[float]] = [0.01, 0.99], + quantile: Optional[Union[float, Tuple[float]]] = None, inplace: bool = False, ): super().__init__(reduction_shape, False) - self._quantile = quantile + self._quantile = (0.01, 0.99) if quantile is None else quantile def __eq__(self, __o: object) -> bool: return super().__eq__(__o) and self._quantile == __o._quantile @@ -497,7 +514,7 @@ def __init__(self, num_samples: Optional[int]): def _register_reduced_input_impl(self, x: TensorType) -> None: self._container.append(x.tensor) - def aggregate(self): + def _aggregate_impl(self): return self._container @@ -508,7 +525,7 @@ def __init__(self): def _register_reduced_input_impl(self, x: TensorType) -> None: self._container = x - def aggregate(self): + def _aggregate_impl(self): return self._container.shape @@ -519,7 +536,7 @@ def _register_reduced_input_impl(self, x: TensorType) -> None: else: self._container = self._tensor_processor.min(x, self._container) - def aggregate(self): + def _aggregate_impl(self): return self._container.tensor @@ -530,11 +547,11 @@ def _register_reduced_input_impl(self, x: TensorType) -> None: else: self._container = self._tensor_processor.max(x, self._container) - def aggregate(self): + def _aggregate_impl(self): return self._container.tensor -class OfflineAggregatorBase(TensorAggregatorBase): +class OfflineAggregatorBase(TensorAggregatorBase, ABC): def __init__( self, tensor_processor, use_per_sample_stats: bool = False, num_samples: Optional[int] = None, window_size=None ): @@ -549,22 +566,22 @@ def _register_reduced_input_impl(self, x: TensorType) -> None: else: self._container.append(x) - def _aggregate(self, fn): + def _offline_aggregation_impl(self, fn): stacked_val = self._tensor_processor.stack(self._container) return fn(stacked_val, axis=0, keepdims=False).tensor class MeanAggregator(OfflineAggregatorBase): - def aggregate(self): - return self._aggregate(self._tensor_processor.mean) + def _aggregate_impl(self): + return self._offline_aggregation_impl(self._tensor_processor.mean) class MedianAggregator(OfflineAggregatorBase): - def aggregate(self): - return self._aggregate(self._tensor_processor.median) + def _aggregate_impl(self): + return self._offline_aggregation_impl(self._tensor_processor.median) -class NoOutliersAggregatorBase(OfflineAggregatorBase): +class NoOutliersAggregatorBase(OfflineAggregatorBase, ABC): def __init__( self, tensor_processor, @@ -576,7 +593,7 @@ def __init__( super().__init__(tensor_processor, use_per_sample_stats, num_samples, window_size) self._quantile = quantile - def _aggregate(self, fn) -> List[NNCFTensor]: + def _offline_aggregation_impl(self, fn) -> List[NNCFTensor]: stacked_val = self._tensor_processor.stack(self._container) result = self._tensor_processor.no_outliers_map(stacked_val, fn, axis=0, alpha=self._quantile) return result.tensor @@ -589,13 +606,13 @@ def __hash__(self) -> int: class MeanNoOutliersAggregator(NoOutliersAggregatorBase): - def aggregate(self) -> Any: - return self._aggregate(self._tensor_processor.masked_mean) + def _aggregate_impl(self) -> Any: + return self._offline_aggregation_impl(self._tensor_processor.masked_mean) class MedianNoOutliersAggregator(NoOutliersAggregatorBase): - def aggregate(self) -> Any: - return self._aggregate(self._tensor_processor.masked_median) + def _aggregate_impl(self) -> Any: + return self._offline_aggregation_impl(self._tensor_processor.masked_median) AGGREGATORS_MAP = { diff --git a/nncf/experimental/openvino/quantization/quantize_model.py b/nncf/experimental/openvino/quantization/quantize_model.py deleted file mode 100644 index a1a571b0b28..00000000000 --- a/nncf/experimental/openvino/quantization/quantize_model.py +++ /dev/null @@ -1,157 +0,0 @@ -# Copyright (c) 2023 Intel Corporation -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from copy import deepcopy -from typing import Any, Callable, Iterable, Optional - -import openvino.runtime as ov -from openvino._offline_transformations import compress_quantize_weights_transformation - -from nncf.common.logging import nncf_logger -from nncf.common.quantization.structs import QuantizationPreset -from nncf.common.utils.backend import get_backend -from nncf.common.utils.timer import timer -from nncf.data.dataset import Dataset -from nncf.openvino.quantization.backend_parameters import BackendParameters -from nncf.openvino.quantization.backend_parameters import is_weight_compression_needed -from nncf.openvino.quantization.quantize_model import quantize_impl -from nncf.parameters import DropType -from nncf.parameters import ModelType -from nncf.parameters import TargetDevice -from nncf.quantization.advanced_parameters import AdvancedAccuracyRestorerParameters -from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters -from nncf.quantization.algorithms.accuracy_control.algorithm import QuantizationAccuracyRestorer -from nncf.quantization.algorithms.accuracy_control.algorithm import get_algo_backend -from nncf.scopes import IgnoredScope - - -def _match_const_nodes_names(initial_model: ov.Model, quantized_model: ov.Model) -> None: - """ - Replaces the name of the constant node in the `quantized_model` - with the name of the corresponding constant node in the `initial_model`. - - :param initial_model: Initial model. - :param quantized_model_graph: Quantized model. - """ - initial_name_to_const_map = { - op.get_friendly_name(): op for op in initial_model.get_ops() if op.get_type_name() == "Constant" - } - modified_name_to_const_map = { - op.get_friendly_name(): op for op in quantized_model.get_ops() if op.get_type_name() == "Constant" - } - - for initial_name in initial_name_to_const_map: - num_matches = 0 - - name_to_search = initial_name - if "compressed" in name_to_search: - name_to_search = name_to_search[: name_to_search.rfind("compressed") - 1] - - for modified_name, const_op in modified_name_to_const_map.items(): - if modified_name.startswith(name_to_search): - num_matches += 1 - const_op.set_friendly_name(initial_name) - - if num_matches != 1: - raise RuntimeError( - "Unexpected Behavior: number of matches greater than 1\n" - f"num_matches: {num_matches}, name: {initial_name}" - ) - - -def quantize_with_accuracy_control( - model: ov.Model, - calibration_dataset: Dataset, - validation_dataset: Dataset, - validation_fn: Callable[[Any, Iterable[Any]], float], - max_drop: float = 0.01, - drop_type: DropType = DropType.ABSOLUTE, - preset: QuantizationPreset = QuantizationPreset.PERFORMANCE, - target_device: TargetDevice = TargetDevice.ANY, - subset_size: int = 300, - fast_bias_correction: bool = True, - model_type: Optional[ModelType] = None, - ignored_scope: Optional[IgnoredScope] = None, - advanced_quantization_parameters: Optional[AdvancedQuantizationParameters] = None, - advanced_accuracy_restorer_parameters: Optional[AdvancedAccuracyRestorerParameters] = None, -) -> ov.Model: - """ - Implementation of the `quantize_with_accuracy_control()` method for the OpenVINO backend via POT. - """ - if advanced_accuracy_restorer_parameters is None: - advanced_accuracy_restorer_parameters = AdvancedAccuracyRestorerParameters() - - if advanced_accuracy_restorer_parameters.tune_hyperparams: - raise RuntimeError( - "Quantization algorithm with accuracy control from the " - "OpenVINO backend does not support tuning hyperparams yet" - ) - - compress_weights = is_weight_compression_needed(advanced_quantization_parameters) - - if advanced_quantization_parameters is None: - copied_parameters = AdvancedQuantizationParameters() - else: - copied_parameters = deepcopy(advanced_quantization_parameters) - copied_parameters.backend_params[BackendParameters.COMPRESS_WEIGHTS] = False - - quantized_model = quantize_impl( - model, - calibration_dataset, - preset, - target_device, - subset_size, - fast_bias_correction, - model_type, - ignored_scope, - copied_parameters, - ) - - # We need to match constant names when the - # quantized model was got using POT. For example, we have the - # `Constant_63974886249` constant name in the quantized model, - # but `Constant_6397` in the initial model. - # The `_collect_original_biases_and_weights()`` method throws - # the error otherwise. - _match_const_nodes_names(model, quantized_model) - - backend = get_backend(model) - algo_backend = get_algo_backend(backend) - - nncf_logger.info("Validation of initial model was started") - with timer(): - initial_metric = validation_fn(algo_backend.prepare_for_inference(model), validation_dataset.get_data()) - nncf_logger.info(f"Metric of initial model: {initial_metric}") - - nncf_logger.info("Validation of quantized model was started") - with timer(): - quantized_metric = validation_fn( - algo_backend.prepare_for_inference(quantized_model), validation_dataset.get_data() - ) - nncf_logger.info(f"Metric of quantized model: {quantized_metric}") - - ranking_subset_size = subset_size - if advanced_accuracy_restorer_parameters.ranking_subset_size is not None: - ranking_subset_size = advanced_accuracy_restorer_parameters.ranking_subset_size - - accuracy_aware_loop = QuantizationAccuracyRestorer( - ranking_subset_size=ranking_subset_size, - max_num_iterations=advanced_accuracy_restorer_parameters.max_num_iterations, - max_drop=max_drop, - drop_type=drop_type, - ) - quantized_model = accuracy_aware_loop.restore_accuracy( - model, initial_metric, quantized_model, quantized_metric, validation_dataset, validation_fn - ) - if compress_weights: - compress_quantize_weights_transformation(quantized_model) - - return quantized_model diff --git a/nncf/experimental/tensor/README.md b/nncf/experimental/tensor/README.md new file mode 100644 index 00000000000..09e3dc6a1e0 --- /dev/null +++ b/nncf/experimental/tensor/README.md @@ -0,0 +1,174 @@ +# Tensors wrapper + +The `Tensor` class is a wrapper class that provides a common interface for different types of tensors, +such as NumPy and PyTorch. This allows algorithms to be written that are abstracted from the underlying model type, +making them more portable and reusable. + +## Usage + +The main idea is common algorithms should use wrapped tensors and provide to backend-specific function unwrapped tensor. + +### Initialization Tensor + +```python +from nncf.experimental.tensor import Tensor + +import numpy as np +numpy_array = np.array([1,2]) +nncf_tensor = Tensor(numpy_array) + +import torch +torch_tensor = np.array([1,2]) +nncf_tensor = Tensor(torch_tensor) +``` + +### Math operations + +All math operations are overrided to operated with wrapped object and return `Tensor` + +```python +tensor_a = Tensor(np.array([1,2])) +tenor_b = Tensor(np.array([1,2])) +tensor_a + tenor_b # Tensor(array([2, 4])) +``` + +### Comparison operators + +All math operations are overrided to operated with wrapped object and return `Tensor` + +```python +tensor_a = Tensor(np.array([1,2])) +tenor_b = Tensor(np.array([1,2])) +tensor_a < tenor_b # Tensor(array([False, False])) +``` + +### Method of the Tensor class + +Some methods of the tensors available from wrapped Tensor class like `max`, `flatten` and other common methods. + +```python +nncf_tensor.max() # Tensor(2) +``` + +### Functions over Tensor + +All available functions you can found in [functions.py](functions.py). + +```python +from nncf.experimental.tensor import functions +functions.max(nncf_tensor) # Tensor(2) +``` + +**NOTE** A function requires at least one positional argument, which is used to dispatch the function +to the appropriate implementation depending on the type of argument. + +```python +functions.max(nncf_tensor) # Correct +functions.max(a=nncf_tensor) # TypeError: wrapper requires at least 1 positional argument +``` + +### Loop over Tensor + +For `Tensor` available `TensorIterator` that return `Tensor` + +```python +tensor_a = Tensor(np.array([1,2])) +for x in tensor_a: + print(x) + +# Tensor(1) +# Tensor(2) +``` + +### Get element by index Tensor + +```python +tensor_a = Tensor(np.array([[1],[2]])) +tensor_a[0] # Tensor(array([1])) +tensor_a[0:2] # Tensor(array([[1],[2]])) +``` + +## Class feature enhancement + +**NOTE** Use names and descriptions in numpy style. + +### Add new method or function + +1. Add method to [class Tensor](tensor.py) + + ```python + class Tensor: + ... + def foo(self, arg1: Type) -> "Tensor": + return functions.foo(self, arg1) + ``` + +2. Add function to [function.py](function.py) + + ```python + @functools.singledispatch + def foo(a: TTensor, arg1: Type) -> TTensor: + """ + __description__ + + :param a: The input tensor. + :param arg1: __description__ + :return: __description__ + """ + if isinstance(a, tensor.Tensor): + return tensor.Tensor(foo(a.data, axis)) + return NotImplemented(f"Function `foo` is not implemented for {type(a)}") + ``` + +3. Add function name to `__all__` in [function.py](function.py) + +4. Add backend specific implementation of method to: + + - [numpy_function.py](numpy_function.py) + + ```python + @functions.foo.register(np.ndarray) + @functions.foo.register(np.number) + def _(a: TType, arg1: Type) -> np.ndarray: + return np.foo(a, arg1) + ``` + + - [torch_function.py](torch_function.py) + + ```python + @functions.foo.register(torch.Tensor) + def _(a: torch.Tensor, arg1: Type) -> torch.Tensor: + return torch.foo(a, arg1) + ``` + +5. Add test of method to [test template](tests/shared/test_templates/template_test_nncf_tensor.py) for Tensor class + +### Add new backend + +1. Add backend specific implementation for all function from [function.py](function.py) in `_functions.py` file. + +2. Add `test_tensor.py` in backend-specific t directory for tests that inherited from class `TemplateTestNNCFTensorOperators` + + ```python + class TestNPNNCFTensorOperators(TemplateTestNNCFTensorOperators): + @staticmethod + def to_tensor(x): + return np.array(x) # Function to initialize tensor from list + ``` + +3. Add new backend type to `mock_modules` list in [docs/api/source/conf.py](https://github.com/openvinotoolkit/nncf/blob/develop/docs/api/source/conf.py#L131) + + ```python + mock_modules = [ + "torch", + "torchvision", + "onnx", + "onnxruntime", + "openvino", + "tensorflow", + "tensorflow_addons", + "nncf.experimental.tensor.torch_functions", + "nncf.experimental.tensor.numpy_functions", + "nncf.experimental.tensor._functions", + ] + ``` diff --git a/nncf/experimental/tensor/__init__.py b/nncf/experimental/tensor/__init__.py new file mode 100644 index 00000000000..96c8cb31f82 --- /dev/null +++ b/nncf/experimental/tensor/__init__.py @@ -0,0 +1,16 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from nncf.experimental.tensor.enums import TensorBackendType +from nncf.experimental.tensor.enums import TensorDataType +from nncf.experimental.tensor.enums import TensorDeviceType +from nncf.experimental.tensor.tensor import Tensor +from nncf.experimental.tensor.tensor import unwrap_tensor_data diff --git a/nncf/experimental/tensor/enums.py b/nncf/experimental/tensor/enums.py new file mode 100644 index 00000000000..1402597164f --- /dev/null +++ b/nncf/experimental/tensor/enums.py @@ -0,0 +1,43 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from enum import Enum +from enum import auto + + +class TensorBackendType(Enum): + """ + Enum representing the different tensor backends. + """ + + NUMPY = auto() + TORCH = auto() + + +class TensorDataType(Enum): + """ + Enum representing the different tensor data types. + """ + + float16 = auto() + float32 = auto() + float64 = auto() + int8 = auto() + uint8 = auto() + + +class TensorDeviceType(Enum): + """ + Enum representing the different tensor device types. + """ + + CPU = auto() + GPU = auto() diff --git a/nncf/experimental/tensor/functions.py b/nncf/experimental/tensor/functions.py new file mode 100644 index 00000000000..30f27a65cce --- /dev/null +++ b/nncf/experimental/tensor/functions.py @@ -0,0 +1,361 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import functools +from typing import List, Optional, Tuple, TypeVar, Union + +from nncf.experimental.tensor import Tensor +from nncf.experimental.tensor import unwrap_tensor_data +from nncf.experimental.tensor.enums import TensorDataType +from nncf.experimental.tensor.enums import TensorDeviceType + +TTensor = TypeVar("TTensor") + + +def _tensor_guard(func: callable): + """ + A decorator that ensures that the first argument to the decorated function is a Tensor. + """ + + @functools.wraps(func) + def wrapper(*args, **kwargs): + if isinstance(args[0], Tensor): + return func(*args, **kwargs) + raise NotImplementedError(f"Function `{func.__name__}` is not implemented for {type(args[0])}") + + return wrapper + + +@functools.singledispatch +@_tensor_guard +def device(a: TTensor) -> TensorDeviceType: + """ + Return the device of the tensor. + + :param a: The input tensor. + :return: The device of the tensor. + """ + return device(a.data) + + +@functools.singledispatch +@_tensor_guard +def squeeze(a: TTensor, axis: Optional[Union[int, Tuple[int]]] = None) -> TTensor: + """ + Remove axes of length one from a. + + :param a: The input tensor. + :param axis: Selects a subset of the entries of length one in the shape. + :return: The input array, but with all or a subset of the dimensions of length 1 removed. + This is always a itself or a view into a. Note that if all axes are squeezed, + the result is a 0d array and not a scalar. + """ + return Tensor(squeeze(a.data, axis=axis)) + + +@functools.singledispatch +@_tensor_guard +def flatten(a: TTensor) -> TTensor: + """ + Return a copy of the tensor collapsed into one dimension. + + :param a: The input tensor. + :return: A copy of the input tensor, flattened to one dimension. + """ + return Tensor(flatten(a.data)) + + +@functools.singledispatch +@_tensor_guard +def max(a: TTensor, axis: Optional[Union[int, Tuple[int]]] = None) -> TTensor: # pylint: disable=redefined-builtin + """ + Return the maximum of an array or maximum along an axis. + + :param a: The input tensor. + :param axis: Axis or axes along which to operate. By default, flattened input is used. + :return: Maximum of a. + """ + return Tensor(max(a.data, axis)) + + +@functools.singledispatch +@_tensor_guard +def min(a: TTensor, axis: Optional[Union[int, Tuple[int]]] = None) -> TTensor: # pylint: disable=redefined-builtin + """ + Return the minimum of an array or minimum along an axis. + + :param a: The input tensor. + :param axis: Axis or axes along which to operate. By default, flattened input is used. + :return: Minimum of a. + """ + return Tensor(min(a.data, axis)) + + +@functools.singledispatch +@_tensor_guard +def abs(a: TTensor) -> TTensor: # pylint: disable=redefined-builtin + """ + Calculate the absolute value element-wise. + + :param a: The input tensor. + :return: A tensor containing the absolute value of each element in x. + """ + return Tensor(abs(a.data)) + + +@functools.singledispatch +@_tensor_guard +def astype(a: TTensor, data_type: TensorDataType) -> TTensor: + """ + Copy of the tensor, cast to a specified type. + + :param a: The input tensor. + :param dtype: Type code or data type to which the tensor is cast. + + :return: Copy of the tensor in specified type. + """ + return Tensor(astype(a.data, data_type)) + + +@functools.singledispatch +@_tensor_guard +def dtype(a: TTensor) -> TensorDataType: + """ + Return data type of the tensor. + + :param a: The input tensor. + :return: The data type of the tensor. + """ + return dtype(a.data) + + +@functools.singledispatch +@_tensor_guard +def reshape(a: TTensor, shape: List[int]) -> TTensor: + """ + Gives a new shape to a tensor without changing its data. + + :param a: Tensor to be reshaped. + :param shape: The new shape should be compatible with the original shape. + :return: Reshaped tensor. + """ + return Tensor(reshape(a.data, shape)) + + +@functools.singledispatch +@_tensor_guard +def all(a: TTensor, axis: Optional[Union[int, Tuple[int]]] = None) -> TTensor: # pylint: disable=redefined-builtin + """ + Test whether all tensor elements along a given axis evaluate to True. + + :param a: The input tensor. + :param axis: Axis or axes along which a logical AND reduction is performed. + :return: A new boolean or tensor. + """ + return Tensor(all(a.data, axis=axis)) + + +@functools.singledispatch +@_tensor_guard +def allclose(a: TTensor, b: TTensor, rtol: float = 1e-05, atol: float = 1e-08, equal_nan: bool = False) -> TTensor: + """ + Returns True if two arrays are element-wise equal within a tolerance. + + :param a: The first input tensor. + :param b: The second input tensor. + :param rtol: The relative tolerance parameter, defaults to 1e-05. + :param atol: The absolute tolerance parameter, defaults to 1e-08. + :param equal_nan: Whether to compare NaN`s as equal. If True, + NaN`s in a will be considered equal to NaN`s in b in the output array. + Defaults to False. + :return: True if the two arrays are equal within the given tolerance, otherwise False. + """ + return Tensor( + allclose( + a.data, + unwrap_tensor_data(b), + rtol=rtol, + atol=atol, + equal_nan=equal_nan, + ) + ) + + +@functools.singledispatch +@_tensor_guard +def any(a: TTensor, axis: Optional[Union[int, Tuple[int]]] = None) -> TTensor: # pylint: disable=redefined-builtin + """ + Test whether any tensor elements along a given axis evaluate to True. + + :param a: The input tensor. + :param axis: Axis or axes along which a logical OR reduction is performed. + :return: A new boolean or tensor. + """ + return Tensor(any(a.data, axis)) + + +@functools.singledispatch +@_tensor_guard +def count_nonzero(a: TTensor, axis: Optional[Union[int, Tuple[int]]] = None) -> TTensor: + """ + Counts the number of non-zero values in the tensor input. + + :param a: The tensor for which to count non-zeros. + :param axis: Axis or tuple of axes along which to count non-zeros. + :return: Number of non-zero values in the tensor along a given axis. + Otherwise, the total number of non-zero values in the tensor is returned. + """ + return Tensor(count_nonzero(a.data, axis)) + + +@functools.singledispatch +@_tensor_guard +def isempty(a: TTensor) -> TTensor: + """ + Return True if input tensor is empty. + + :param a: The input tensor. + :return: True if tensor is empty, otherwise False. + """ + return Tensor(isempty(a.data)) + + +@functools.singledispatch +@_tensor_guard +def isclose(a: TTensor, b: TTensor, rtol: float = 1e-05, atol: float = 1e-08, equal_nan: bool = False) -> TTensor: + """ + Returns a boolean array where two arrays are element-wise equal within a tolerance. + + :param a: The first input tensor. + :param b: The second input tensor. + :param rtol: The relative tolerance parameter, defaults to 1e-05. + :param atol: The absolute tolerance parameter, defaults to 1e-08. + :param equal_nan: Whether to compare NaN`s as equal. If True, + NaN`s in a will be considered equal to NaN`s in b in the output array. + Defaults to False. + :return: Returns a boolean tensor of where a and b are equal within the given tolerance. + """ + return Tensor( + isclose( + a.data, + unwrap_tensor_data(b), + rtol=rtol, + atol=atol, + equal_nan=equal_nan, + ) + ) + + +@functools.singledispatch +@_tensor_guard +def maximum(x1: TTensor, x2: TTensor) -> TTensor: + """ + Element-wise maximum of tensor elements. + + :param x1: The first input tensor. + :param x2: The second input tensor. + :return: Output tensor. + """ + return Tensor(maximum(x1.data, unwrap_tensor_data(x2))) + + +@functools.singledispatch +@_tensor_guard +def minimum(x1: TTensor, x2: TTensor) -> TTensor: + """ + Element-wise minimum of tensor elements. + + :param x1: The first input tensor. + :param x2: The second input tensor. + :return: Output tensor. + """ + return Tensor(minimum(x1.data, unwrap_tensor_data(x2))) + + +@functools.singledispatch +@_tensor_guard +def ones_like(a: TTensor) -> TTensor: + """ + Return a tensor of ones with the same shape and type as a given tensor. + + :param a: The shape and data-type of a define these same attributes of the returned tensor. + :return: Tensor of ones with the same shape and type as a. + """ + return Tensor(ones_like(a.data)) + + +@functools.singledispatch +@_tensor_guard +def where(condition: TTensor, x: TTensor, y: TTensor) -> TTensor: + """ + Return elements chosen from x or y depending on condition. + + :param condition: Where True, yield x, otherwise yield y. + :param x: Value at indices where condition is True. + :param y: Value at indices where condition is False. + :return: A tensor with elements from x where condition is True, and elements from y elsewhere. + """ + return Tensor( + where( + condition.data, + unwrap_tensor_data(x), + unwrap_tensor_data(y), + ) + ) + + +@functools.singledispatch +@_tensor_guard +def zeros_like(a: TTensor) -> TTensor: + """ + Return an tensor of zeros with the same shape and type as a given tensor. + + :param input: The shape and data-type of a define these same attributes of the returned tensor. + :return: tensor of zeros with the same shape and type as a. + """ + return Tensor(zeros_like(a.data)) + + +__all__ = [ + "device", + "squeeze", + "flatten", + "max", + "min", + "abs", + "astype", + "reshape", + "all", + "allclose", + "any", + "count_nonzero", + "isempty", + "isclose", + "maximum", + "minimum", + "ones_like", + "minimum", + "where", + "zeros_like", +] + + +def _initialize_backends(): + # pylint: disable=unused-import + import nncf.experimental.tensor.numpy_functions + + try: + import nncf.experimental.tensor.torch_functions + except ImportError: + pass + + +_initialize_backends() diff --git a/nncf/experimental/tensor/numpy_functions.py b/nncf/experimental/tensor/numpy_functions.py new file mode 100644 index 00000000000..be070db4bdb --- /dev/null +++ b/nncf/experimental/tensor/numpy_functions.py @@ -0,0 +1,164 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional, Tuple, Union + +import numpy as np + +from nncf.experimental.tensor import functions +from nncf.experimental.tensor.enums import TensorDataType +from nncf.experimental.tensor.enums import TensorDeviceType + +DTYPE_MAP = { + TensorDataType.float16: np.dtype(np.float16), + TensorDataType.float32: np.dtype(np.float32), + TensorDataType.float64: np.dtype(np.float64), + TensorDataType.int8: np.dtype(np.int8), + TensorDataType.uint8: np.dtype(np.uint8), +} + +DTYPE_MAP_REV = {v: k for k, v in DTYPE_MAP.items()} + + +@functions.device.register(np.ndarray) +@functions.device.register(np.number) +def _(a: Union[np.ndarray, np.number]) -> TensorDeviceType: + return TensorDeviceType.CPU + + +@functions.squeeze.register(np.ndarray) +@functions.squeeze.register(np.number) +def _(a: Union[np.ndarray, np.number], axis: Optional[Union[int, Tuple[int]]] = None) -> np.ndarray: + return np.squeeze(a, axis=axis) + + +@functions.flatten.register(np.ndarray) +@functions.flatten.register(np.number) +def _(a: Union[np.ndarray, np.number]) -> np.ndarray: + return a.flatten() + + +@functions.max.register(np.ndarray) +@functions.max.register(np.number) +def _(a: Union[np.ndarray, np.number], axis: Optional[Union[int, Tuple[int]]] = None) -> np.ndarray: + return np.max(a, axis=axis) + + +@functions.min.register(np.ndarray) +@functions.min.register(np.number) +def _(a: Union[np.ndarray, np.number], axis: Optional[Union[int, Tuple[int]]] = None) -> np.ndarray: + return np.min(a, axis=axis) + + +@functions.abs.register(np.ndarray) +@functions.abs.register(np.number) +def _(a: Union[np.ndarray, np.number]) -> np.ndarray: + return np.absolute(a) + + +@functions.astype.register(np.ndarray) +@functions.astype.register(np.number) +def _(a: Union[np.ndarray, np.number], dtype: TensorDataType) -> np.ndarray: + return a.astype(DTYPE_MAP[dtype]) + + +@functions.dtype.register(np.ndarray) +@functions.dtype.register(np.number) +def _(a: Union[np.ndarray, np.number]) -> TensorDataType: + return DTYPE_MAP_REV[np.dtype(a.dtype)] + + +@functions.reshape.register(np.ndarray) +@functions.reshape.register(np.number) +def _(a: Union[np.ndarray, np.number], shape: Union[int, Tuple[int]]) -> np.ndarray: + return a.reshape(shape) + + +@functions.all.register(np.ndarray) +@functions.all.register(np.number) +def _(a: Union[np.ndarray, np.number], axis: Optional[Union[int, Tuple[int]]] = None) -> Union[np.ndarray, bool]: + return np.all(a, axis=axis) + + +@functions.allclose.register(np.ndarray) +@functions.allclose.register(np.number) +def _( + a: Union[np.ndarray, np.number], + b: Union[np.ndarray, np.number], + rtol: float = 1e-05, + atol: float = 1e-08, + equal_nan: bool = False, +) -> bool: + return np.allclose(a, b, rtol=rtol, atol=atol, equal_nan=equal_nan) + + +@functions.any.register(np.ndarray) +@functions.any.register(np.number) +def _(a: Union[np.ndarray, np.number], axis: Optional[Union[int, Tuple[int]]] = None) -> Union[np.ndarray, bool]: + return np.any(a, axis=axis) + + +@functions.count_nonzero.register(np.ndarray) +@functions.count_nonzero.register(np.number) +def _(a: Union[np.ndarray, np.number], axis: Optional[Union[int, Tuple[int]]] = None) -> np.ndarray: + return np.count_nonzero(a, axis=axis) + + +@functions.isempty.register(np.ndarray) +@functions.isempty.register(np.number) +def _(a: Union[np.ndarray, np.number]) -> bool: + return a.size == 0 + + +@functions.isclose.register(np.ndarray) +@functions.isclose.register(np.number) +def _( + a: Union[np.ndarray, np.number], + b: np.ndarray, + rtol: float = 1e-05, + atol: float = 1e-08, + equal_nan: bool = False, +): + return np.isclose(a, b, rtol=rtol, atol=atol, equal_nan=equal_nan) + + +@functions.maximum.register(np.ndarray) +@functions.maximum.register(np.number) +def _(x1: Union[np.ndarray, np.number], x2: np.ndarray) -> np.ndarray: + return np.maximum(x1, x2) + + +@functions.minimum.register(np.ndarray) +@functions.minimum.register(np.number) +def _(x1: Union[np.ndarray, np.number], x2: np.ndarray) -> np.ndarray: + return np.minimum(x1, x2) + + +@functions.ones_like.register(np.ndarray) +@functions.ones_like.register(np.number) +def _(a: Union[np.ndarray, np.number]) -> np.ndarray: + return np.ones_like(a) + + +@functions.where.register(np.ndarray) +@functions.where.register(np.number) +def _( + condition: Union[np.ndarray, np.number], + x: Union[np.ndarray, np.number, float, bool], + y: Union[np.ndarray, float, bool], +) -> np.ndarray: + return np.where(condition, x, y) + + +@functions.zeros_like.register(np.ndarray) +@functions.zeros_like.register(np.number) +def _(a: Union[np.ndarray, np.number]) -> np.ndarray: + return np.zeros_like(a) diff --git a/nncf/experimental/tensor/tensor.py b/nncf/experimental/tensor/tensor.py new file mode 100644 index 00000000000..daa8e37aff4 --- /dev/null +++ b/nncf/experimental/tensor/tensor.py @@ -0,0 +1,181 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from typing import Any, List, Optional, Tuple, TypeVar, Union + +from nncf.experimental.tensor.enums import TensorDataType +from nncf.experimental.tensor.enums import TensorDeviceType + +TTensor = TypeVar("TTensor") + + +class Tensor: + """ + An interface to framework specific tensors for common NNCF algorithms. + """ + + def __init__(self, data: Optional[TTensor]): + self._data = data.data if isinstance(data, Tensor) else data + + @property + def data(self) -> TTensor: + return self._data + + @property + def shape(self) -> List[int]: + return list(self.data.shape) + + @property + def device(self) -> TensorDeviceType: + return _call_function("device", self) + + @property + def dtype(self) -> TensorDeviceType: + return _call_function("dtype", self) + + def __bool__(self) -> bool: + return bool(self.data) + + def __iter__(self): + return TensorIterator(self.data) + + def __getitem__(self, index: int) -> "Tensor": + return Tensor(self.data[index]) + + def __str__(self) -> str: + return f"nncf.Tensor({str(self.data)})" + + def __repr__(self) -> str: + return f"nncf.Tensor({repr(self.data)})" + + # built-in operations + + def __add__(self, other: TTensor) -> "Tensor": + return Tensor(self.data + unwrap_tensor_data(other)) + + def __radd__(self, other: TTensor) -> "Tensor": + return Tensor(unwrap_tensor_data(other) + self.data) + + def __sub__(self, other: TTensor) -> "Tensor": + return Tensor(self.data - unwrap_tensor_data(other)) + + def __rsub__(self, other: TTensor) -> "Tensor": + return Tensor(unwrap_tensor_data(other) - self.data) + + def __mul__(self, other: TTensor) -> "Tensor": + return Tensor(self.data * unwrap_tensor_data(other)) + + def __rmul__(self, other: TTensor) -> "Tensor": + return Tensor(unwrap_tensor_data(other) * self.data) + + def __pow__(self, other: TTensor) -> "Tensor": + return Tensor(self.data ** unwrap_tensor_data(other)) + + def __truediv__(self, other: TTensor) -> "Tensor": + return Tensor(self.data / unwrap_tensor_data(other)) + + def __rtruediv__(self, other: TTensor) -> "Tensor": + return Tensor(unwrap_tensor_data(other) / self.data) + + def __floordiv__(self, other: TTensor) -> "Tensor": + return Tensor(self.data // unwrap_tensor_data(other)) + + def __rfloordiv__(self, other: TTensor) -> "Tensor": + return Tensor(unwrap_tensor_data(other) // self.data) + + def __neg__(self) -> "Tensor": + return Tensor(-self.data) + + # Comparison operators + + def __lt__(self, other: TTensor) -> "Tensor": + return Tensor(self.data < unwrap_tensor_data(other)) + + def __le__(self, other: TTensor) -> "Tensor": + return Tensor(self.data <= unwrap_tensor_data(other)) + + def __eq__(self, other: TTensor) -> "Tensor": + return Tensor(self.data == unwrap_tensor_data(other)) + + def __ne__(self, other: TTensor) -> "Tensor": + return Tensor(self.data != unwrap_tensor_data(other)) + + def __gt__(self, other: TTensor) -> "Tensor": + return Tensor(self.data > unwrap_tensor_data(other)) + + def __ge__(self, other: TTensor) -> "Tensor": + return Tensor(self.data >= unwrap_tensor_data(other)) + + # Tensor functions + + def squeeze(self, axis: Optional[Union[int, Tuple[int]]] = None) -> "Tensor": + return _call_function("squeeze", self, axis) + + def flatten(self) -> "Tensor": + return _call_function("flatten", self) + + def max(self, axis: Optional[TTensor] = None) -> "Tensor": + return _call_function("max", self, axis) + + def min(self, axis: Optional[TTensor] = None) -> "Tensor": + return _call_function("min", self, axis) + + def abs(self) -> "Tensor": + return _call_function("abs", self) + + def isempty(self) -> "Tensor": + return _call_function("isempty", self) + + def astype(self, dtype: TensorDataType): + return _call_function("astype", self, dtype) + + def reshape(self, shape: TTensor) -> "Tensor": + return _call_function("reshape", self, shape) + + +def _call_function(func_name: str, *args): + """ + Call function from functions.py to avoid circular imports. + + :param func_name: Name of function. + :return: Result of function call. + """ + from nncf.experimental.tensor import functions + + fn = getattr(functions, func_name) + return fn(*args) + + +class TensorIterator: + """Iterator for Tensor class""" + + def __init__(self, tensor): + self._tensor = tensor + self._index = 0 + + def __next__(self) -> Tensor: + if self._index < len(self._tensor): + result = self._tensor[self._index] + self._index += 1 + return Tensor(result) + + raise StopIteration + + +def unwrap_tensor_data(obj: Any) -> TTensor: + """ + Return the data of a Tensor object, or the object itself if it is not a Tensor. + + :param obj: The object to unwrap. + :return: The data of the Tensor object, or the object itself. + """ + return obj.data if isinstance(obj, Tensor) else obj diff --git a/nncf/experimental/tensor/torch_functions.py b/nncf/experimental/tensor/torch_functions.py new file mode 100644 index 00000000000..09ef0f1b886 --- /dev/null +++ b/nncf/experimental/tensor/torch_functions.py @@ -0,0 +1,148 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import List, Optional, Tuple, Union + +import torch + +from nncf.experimental.tensor import TensorDataType +from nncf.experimental.tensor import TensorDeviceType +from nncf.experimental.tensor import functions + +DTYPE_MAP = { + TensorDataType.float16: torch.float16, + TensorDataType.float32: torch.float32, + TensorDataType.float64: torch.float64, + TensorDataType.int8: torch.int8, + TensorDataType.uint8: torch.uint8, +} + +DTYPE_MAP_REV = {v: k for k, v in DTYPE_MAP.items()} + + +@functions.device.register(torch.Tensor) +def _(a: torch.Tensor) -> TensorDeviceType: + DEVICE_MAP = { + "cpu": TensorDeviceType.CPU, + "cuda": TensorDeviceType.GPU, + } + return DEVICE_MAP[a.device.type] + + +@functions.squeeze.register(torch.Tensor) +def _(a: torch.Tensor, axis: Optional[Union[int, Tuple[int]]] = None) -> torch.Tensor: + if axis is None: + return a.squeeze() + return a.squeeze(axis) + + +@functions.flatten.register(torch.Tensor) +def _(a: torch.Tensor) -> torch.Tensor: + return a.flatten() + + +@functions.max.register(torch.Tensor) +def _(a: torch.Tensor, axis: Optional[Union[int, Tuple[int]]] = None) -> torch.Tensor: + if axis is None: + return torch.max(a) + return torch.max(a, dim=axis).values + + +@functions.min.register(torch.Tensor) +def _(a: torch.Tensor, axis: Optional[Union[int, Tuple[int]]] = None) -> torch.Tensor: + if axis is None: + return torch.min(a) + return torch.min(a, dim=axis).values + + +@functions.abs.register(torch.Tensor) +def _(a: torch.Tensor) -> torch.Tensor: + return torch.absolute(a) + + +@functions.astype.register(torch.Tensor) +def _(a: torch.Tensor, dtype: TensorDataType) -> torch.Tensor: + return a.type(DTYPE_MAP[dtype]) + + +@functions.dtype.register(torch.Tensor) +def _(a: torch.Tensor) -> TensorDataType: + return DTYPE_MAP_REV[a.dtype] + + +@functions.reshape.register(torch.Tensor) +def _(a: torch.Tensor, shape: List[int]) -> torch.Tensor: + return a.reshape(shape) + + +@functions.all.register(torch.Tensor) +def _(a: torch.Tensor, axis: Optional[Union[int, Tuple[int]]] = None) -> Union[torch.Tensor, bool]: + if axis is None: + return torch.all(a) + return torch.all(a, dim=axis) + + +@functions.allclose.register(torch.Tensor) +def _(a: torch.Tensor, b: torch.Tensor, rtol: float = 1e-05, atol: float = 1e-08, equal_nan: bool = False) -> bool: + return torch.allclose(a, b, rtol=rtol, atol=atol, equal_nan=equal_nan) + + +@functions.any.register(torch.Tensor) +def _(a: torch.Tensor, axis: Optional[Union[int, Tuple[int]]] = None) -> Union[torch.Tensor, bool]: + if axis is None: + return torch.any(a) + return torch.any(a, dim=axis) + + +@functions.count_nonzero.register(torch.Tensor) +def _(a: torch.Tensor, axis: Optional[Union[int, Tuple[int]]] = None) -> torch.Tensor: + return torch.count_nonzero(a, dim=axis) + + +@functions.isempty.register(torch.Tensor) +def _(a: torch.Tensor) -> bool: + return a.numel() == 0 + + +@functions.isclose.register(torch.Tensor) +def _(a: torch.Tensor, b: torch.Tensor, rtol: float = 1e-05, atol: float = 1e-08, equal_nan: bool = False): + return torch.isclose(a, b, atol=atol, rtol=rtol, equal_nan=equal_nan) + + +@functions.maximum.register(torch.Tensor) +def _(x1: torch.Tensor, x2: torch.Tensor) -> torch.Tensor: + if not isinstance(x2, torch.Tensor): + x2 = torch.tensor(x2, device=x1.data.device) + return torch.maximum(x1, x2) + + +@functions.minimum.register(torch.Tensor) +def _(x1: torch.Tensor, x2: torch.Tensor) -> torch.Tensor: + if not isinstance(x2, torch.Tensor): + x2 = torch.tensor(x2, device=x1.data.device) + return torch.minimum(x1, x2) + + +@functions.ones_like.register(torch.Tensor) +def _(a: torch.Tensor) -> torch.Tensor: + return torch.ones_like(a) + + +@functions.where.register(torch.Tensor) +def _( + condition: torch.Tensor, x: Union[torch.Tensor, float, bool], y: Union[torch.Tensor, float, bool] +) -> torch.Tensor: + return torch.where(condition, x, y) + + +@functions.zeros_like.register(torch.Tensor) +def _(a: torch.Tensor) -> torch.Tensor: + return torch.zeros_like(a) diff --git a/nncf/experimental/torch/nas/bootstrapNAS/BootstrapNAS.md b/nncf/experimental/torch/nas/bootstrapNAS/BootstrapNAS.md index ecc6be129bf..f60322dbda8 100644 --- a/nncf/experimental/torch/nas/bootstrapNAS/BootstrapNAS.md +++ b/nncf/experimental/torch/nas/bootstrapNAS/BootstrapNAS.md @@ -1,96 +1,98 @@ -### BootstrapNAS +# BootstrapNAS -Automated generation of weight-sharing super-networks (Cai, Gan, et al., 2020) for Neural Architecture Search (NAS) (Elsken et al., 2019). A weight-sharing super-network is a data structure from which smaller and more efficient sub-networks can be extracted. +Automated generation of weight-sharing super-networks (Cai, Gan, et al., 2020) for Neural Architecture Search (NAS) (Elsken et al., 2019). A weight-sharing super-network is a data structure from which smaller and more efficient sub-networks can be extracted.

BootstrapNAS Architecture

-BootstrapNAS (1) takes as input a pre-trained model. (2) It uses this model to generate a weight-sharing super-network. (3) BootstrapNAS then applies a training strategy, and once the super-network has been trained, (4) it searches for efficient subnetworks that satisfy the user's requirements. (5) The configuration of the discovered sub-network(s) is returned to the user. +BootstrapNAS (1) takes as input a pre-trained model. (2) It uses this model to generate a weight-sharing super-network. (3) BootstrapNAS then applies a training strategy, and once the super-network has been trained, (4) it searches for efficient subnetworks that satisfy the user's requirements. (5) The configuration of the discovered sub-network(s) is returned to the user. -The parameters for generating, training and searching on the super-network are defined in a configuration file within two exclusive subsets of parameters for training and search: -```json - "bootstrapNAS": { - "training": { - ... - }, - "search": { - ... - } +The parameters for generating, training and searching on the super-network are defined in a configuration file within two exclusive subsets of parameters for training and search: + +```json5 +"bootstrapNAS": { + "training": { + ... + }, + "search": { + ... } +} ``` -In the `training` section, you specify the training algorithm, e.g., `progressive_shrinking`, schedule and elasticity parameters: +In the `training` section, you specify the training algorithm, e.g., `progressive_shrinking`, schedule and elasticity parameters: ```json "training": { - "algorithm": "progressive_shrinking", - "progressivity_of_elasticity": ["depth", "width"], + "algorithm": "progressive_shrinking", + "progressivity_of_elasticity": ["depth", "width"], "batchnorm_adaptation": { "num_bn_adaptation_samples": 1500 }, - "schedule": { + "schedule": { "list_stage_descriptions": [ - {"train_dims": ["depth"], "epochs": 25, "depth_indicator": 1, "init_lr": 2.5e-6, "epochs_lr": 25}, - {"train_dims": ["depth"], "epochs": 40, "depth_indicator": 2, "init_lr": 2.5e-6, "epochs_lr": 40}, - {"train_dims": ["depth", "width"], "epochs": 50, "depth_indicator": 2, "reorg_weights": true, "width_indicator": 2, "bn_adapt": true, "init_lr": 2.5e-6, "epochs_lr": 50}, - {"train_dims": ["depth", "width"], "epochs": 50, "depth_indicator": 2, "reorg_weights": true, "width_indicator": 3, "bn_adapt": true, "init_lr": 2.5e-6, "epochs_lr": 50} - ] - }, + {"train_dims": ["depth"], "epochs": 25, "depth_indicator": 1, "init_lr": 2.5e-6, "epochs_lr": 25}, + {"train_dims": ["depth"], "epochs": 40, "depth_indicator": 2, "init_lr": 2.5e-6, "epochs_lr": 40}, + {"train_dims": ["depth", "width"], "epochs": 50, "depth_indicator": 2, "reorg_weights": true, "width_indicator": 2, "bn_adapt": true, "init_lr": 2.5e-6, "epochs_lr": 50}, + {"train_dims": ["depth", "width"], "epochs": 50, "depth_indicator": 2, "reorg_weights": true, "width_indicator": 3, "bn_adapt": true, "init_lr": 2.5e-6, "epochs_lr": 50} + ] + }, "elasticity": { "available_elasticity_dims": ["width", "depth"], "width": { "max_num_widths": 3, "min_width": 32, - "width_step": 32, + "width_step": 32, "width_multipliers": [1, 0.80, 0.60] }, - ... + ... } - ``` -In the search section, you specify the search algorithm, e.g., `NSGA-II` and its parameters. For example: + +In the search section, you specify the search algorithm, e.g., `NSGA-II` and its parameters. For example: + ```json "search": { "algorithm": "NSGA2", - "num_evals": 3000, - "population": 50, - "ref_acc": 93.65 + "num_evals": 3000, + "population": 50, + "ref_acc": 93.65 } ``` -By default, BootstrapNAS uses `NSGA-II` (Dev et al., 2002), an genetic algorithm that constructs a pareto front of efficient sub-networks. +By default, BootstrapNAS uses `NSGA-II` (Dev et al., 2002), an genetic algorithm that constructs a pareto front of efficient sub-networks. -List of parameters that can be used in the configuration file: +List of parameters that can be used in the configuration file: **Training:** `algorithm`: Defines training strategy for tuning supernet. By default, `progressive_shrinking`. -`progressivity_of_elasticity`: Defines the order of adding a new elasticity dimension from stage to stage. +`progressivity_of_elasticity`: Defines the order of adding a new elasticity dimension from stage to stage. examples=["width", "depth", "kernel"]. -`batchnorm_adaptation`: Specifies the number of samples from the training dataset to use for model inference during the +`batchnorm_adaptation`: Specifies the number of samples from the training dataset to use for model inference during the BatchNorm statistics adaptation procedure for the compressed model. -`schedule`: The schedule section includes a list of stage descriptors (`list_stage_descriptions`) that specify the -elasticity dimensions enabled for a particular stage (`train_dims`), the number of `epochs` for the stage, the -`depth_indicator` which in the case of elastic depth, restricts the maximum number of blocks in each independent group -that can be skipped, the `width_indicator`, which restricts the maximum number of width values in each elastic layer. -The user can also specify whether weights should be reorganized (`reorg_weights`), whether batch norm adaptation should -be triggered at the beginning of the stage (`bn_adapt`), the initial learning rate for the stage (`init_lr`), and -the epochs to use for adjusting the learning rate (`epochs_lr`). +`schedule`: The schedule section includes a list of stage descriptors (`list_stage_descriptions`) that specify the +elasticity dimensions enabled for a particular stage (`train_dims`), the number of `epochs` for the stage, the +`depth_indicator` which in the case of elastic depth, restricts the maximum number of blocks in each independent group +that can be skipped, the `width_indicator`, which restricts the maximum number of width values in each elastic layer. +The user can also specify whether weights should be reorganized (`reorg_weights`), whether batch norm adaptation should +be triggered at the beginning of the stage (`bn_adapt`), the initial learning rate for the stage (`init_lr`), and +the epochs to use for adjusting the learning rate (`epochs_lr`). -`elasticity`: Currently, BootstrapNAS supports three elastic dimensions (`kernel`, `width` and `depth`). -Elastic depth automatically finds blocks to skip, by default. The user can specify the `min_block_size`, i.e., minimal +`elasticity`: Currently, BootstrapNAS supports three elastic dimensions (`kernel`, `width` and `depth`). +Elastic depth automatically finds blocks to skip, by default. The user can specify the `min_block_size`, i.e., minimal number of operations in the skipping block, and the `max_block_size`, i.e., maximal number of operations in the block. -Alternatively, one can specify list of blocks to skip manually via `skipped_blocks`. -In the case of elastic width, the user can specify the `min_width`, i.e., the minimal number of output channels that -can be activated for each layers with elastic width. Default value is 32, the `max_num_widths`, which restricts total -number of different elastic width values for each layer, a `width_step`, which defines a step size for a generation of -the elastic width search space, or a `width_multiplier` to define the elastic width search space via a list of multipliers. -Finally, the user can determine the type of filter importance metric: L1, L2 or geometric mean. L2 is selected by default. -For elastic kernel, the user can specify the `max_num_kernels`, which restricts the total number of different elastic +Alternatively, one can specify list of blocks to skip manually via `skipped_blocks`. +In the case of elastic width, the user can specify the `min_width`, i.e., the minimal number of output channels that +can be activated for each layers with elastic width. Default value is 32, the `max_num_widths`, which restricts total +number of different elastic width values for each layer, a `width_step`, which defines a step size for a generation of +the elastic width search space, or a `width_multiplier` to define the elastic width search space via a list of multipliers. +Finally, the user can determine the type of filter importance metric: L1, L2 or geometric mean. L2 is selected by default. +For elastic kernel, the user can specify the `max_num_kernels`, which restricts the total number of different elastic kernel values for each layer. `train_steps`: Defines the number of samples used for each training epoch. @@ -107,8 +109,7 @@ kernel values for each layer. `ref_acc`: Defines the reference accuracy from the pre-trained model used to generate the super-network. -For more information about BootstrapNAS and to cite this work, please refer to the following publications: - +For more information about BootstrapNAS and to cite this work, please refer to the following publications: [Automated Super-Network Generation for Scalable Neural Architecture Search](https://openreview.net/attachment?id=HK-zmbTB8gq&name=main_paper_and_supplementary_material). @@ -122,9 +123,10 @@ For more information about BootstrapNAS and to cite this work, please refer to t url={https://openreview.net/forum?id=HK-zmbTB8gq} } ``` + [Enabling NAS with Automated Super-Network Generation](https://arxiv.org/abs/2112.10878) -```BibTex +```bibtex @article{ bootstrapNAS, author = {Mu{\~{n}}oz, J. Pablo and Lyalyushkin, Nikolay and Akhauri, Yash and Senina, Anastasia and Kozlov, Alexander and Jain, Nilesh}, @@ -140,10 +142,10 @@ For more information about BootstrapNAS and to cite this work, please refer to t } ``` -#### References +## References - Cai, H., C. Gan, et al. (2020). “Once for All: Train One Network and Specialize it for Efficient Deployment”. In: International Conference on Learning Representations. - Deb, K., A. Pratap, et al. (2002). “A fast and elitist multiobjective genetic algorithm: NSGA-II”. In: 303 IEEE Transactions on Evolutionary Computation 6.2, pp. 182–197. -- Elsken, T., J. H. Metzen, and F. Hutter (2019). “Neural Architecture Search: A Survey”. In: Journal of Machine Learning Research 20.55, pp. 1–21. \ No newline at end of file +- Elsken, T., J. H. Metzen, and F. Hutter (2019). “Neural Architecture Search: A Survey”. In: Journal of Machine Learning Research 20.55, pp. 1–21. diff --git a/nncf/experimental/torch/nas/bootstrapNAS/elasticity/elastic_kernel.py b/nncf/experimental/torch/nas/bootstrapNAS/elasticity/elastic_kernel.py index b8cf833ec6c..56a3f3dd211 100644 --- a/nncf/experimental/torch/nas/bootstrapNAS/elasticity/elastic_kernel.py +++ b/nncf/experimental/torch/nas/bootstrapNAS/elasticity/elastic_kernel.py @@ -168,18 +168,26 @@ class ElasticKernelConv2DOp(ElasticKernelOp, nn.Module): and modifies in the way that kernel size is changing to a given value. """ - def __init__(self, max_kernel_size: KernelSizeType, node_name: NNCFNodeName, params: ElasticKernelParams): + def __init__( + self, + max_kernel_size: KernelSizeType, + node_name: NNCFNodeName, + params: ElasticKernelParams, + original_padding_value: Optional[int] = 0, + ): """ Constructor. :param max_kernel_size: maximum kernel size value in the original operation. :param node_name: string representation of operation address. It's used for more informative messages only. :param params: parameters to configure elastic kernel for the operation. + :param original_padding_value: the padding value used in the original model. """ super().__init__(max_kernel_size=max_kernel_size, node_name=node_name) self._max_num_params = params.max_num_kernels + self._original_padding_value = original_padding_value # Create kernel_size_list based on max module kernel size - self._kernel_size_list = self.generate_kernel_size_list(max_kernel_size) + self._kernel_size_list = self.generate_kernel_size_list(max_kernel_size, original_padding_value) self._ks_set = list(set(self.kernel_size_list)) self._ks_set.sort() @@ -193,11 +201,14 @@ def __init__(self, max_kernel_size: KernelSizeType, node_name: NNCFNodeName, par for name, param in scale_params.items(): self.register_parameter(name, param) - def generate_kernel_size_list(self, max_kernel_size: KernelSizeType) -> List[KernelSizeType]: + def generate_kernel_size_list( + self, max_kernel_size: KernelSizeType, original_padding_value: int + ) -> List[KernelSizeType]: """ Generates list of available kernel size values. :param max_kernel_size: maximum value of kernel size, it's supposed to be odd + :param original_padding_value: the padding value used in the original model. :return: list of kernel size values. """ DEFAULT_KERNEL_SIZE_STEP = 2 @@ -206,7 +217,7 @@ def generate_kernel_size_list(self, max_kernel_size: KernelSizeType) -> List[Ker return [1] kernel = max_kernel_size ks_list = [] - while kernel > 1: + while kernel >= max(max_kernel_size - 2 * original_padding_value, 3): ks_list.append(kernel) kernel -= DEFAULT_KERNEL_SIZE_STEP if self._max_num_params == len(ks_list): @@ -281,6 +292,10 @@ def _get_active_filter(self, kernel_size, weight): filters = start_filter return filters + @property + def original_padding_value(self) -> int: + return self._original_padding_value + class ElasticKernelPaddingAdjustment: """ @@ -293,7 +308,10 @@ def __init__(self, elastic_k_w_op: ElasticKernelConv2DOp): self._elastic_k_w_op = elastic_k_w_op def __call__(self, _) -> int: - return self._elastic_k_w_op.get_active_kernel_size() // 2 + shift_padding_value = ( + self._elastic_k_w_op.max_kernel_size - self._elastic_k_w_op.get_active_kernel_size() + ) // 2 + return self._elastic_k_w_op.original_padding_value - shift_padding_value class ElasticKernelInputForExternalPadding: @@ -464,7 +482,8 @@ def build(self, target_model: NNCFNetwork) -> ElasticKernelHandler: layer_attrs = node.layer_attributes assert isinstance(layer_attrs, ConvolutionLayerAttributes), "Conv2D can have elastic kernel only" max_kernel_size = layer_attrs.kernel_size[0] - elastic_kernel_op = ElasticKernelConv2DOp(max_kernel_size, node_name, self._params) + original_padding_values = layer_attrs.padding_values[0] + elastic_kernel_op = ElasticKernelConv2DOp(max_kernel_size, node_name, self._params, original_padding_values) elastic_kernel_op.to(device) update_conv_params_op = UpdateWeight(elastic_kernel_op) transformation_commands.append( diff --git a/nncf/experimental/torch/nas/bootstrapNAS/elasticity/elastic_width.py b/nncf/experimental/torch/nas/bootstrapNAS/elasticity/elastic_width.py index 1311ddc80b3..dfb9f49e430 100644 --- a/nncf/experimental/torch/nas/bootstrapNAS/elasticity/elastic_width.py +++ b/nncf/experimental/torch/nas/bootstrapNAS/elasticity/elastic_width.py @@ -260,7 +260,7 @@ def __init__( fixed_width_list.sort(reverse=True) if fixed_width_list[0] > max_width: raise RuntimeError( - f"Width list for {node_name} " f"contains invalid values: {fixed_width_list}, {max_width}" + f"Width list for {node_name} contains invalid values: {fixed_width_list}, {max_width}" ) if fixed_width_list[0] != max_width: raise RuntimeError(f"Max width for {node_name} is not aligned with pre-trained model") @@ -638,7 +638,7 @@ def activate_subnet_for_config(self, config: ElasticWidthConfig) -> None: :param config: map of pruning group id to width value """ for node in self._propagation_graph.get_all_nodes(): - node.data.pop("output_mask", None) + node.attributes.pop("output_mask", None) names_of_processed_nodes = set() for cluster_id, width in config.items(): @@ -649,7 +649,7 @@ def activate_subnet_for_config(self, config: ElasticWidthConfig) -> None: max_width = elastic_width_info.elastic_op.max_width device = get_model_device(self._target_model) mask = self._width_to_mask(width, max_width, device) - node.data["output_mask"] = mask + node.attributes["output_mask"] = mask elastic_width_info.elastic_op.set_active_width(width) names_of_processed_nodes.add(node_id) @@ -686,8 +686,8 @@ def activate_subnet_for_config(self, config: ElasticWidthConfig) -> None: break for previous in previous_nodes: if "output_mask" in previous.data: - if previous.data["output_mask"] is not None: - input_masks.append(previous.data["output_mask"]) + if previous.attributes["output_mask"] is not None: + input_masks.append(previous.attributes["output_mask"]) input_masks = [i for i in input_masks if i] else: nodes_to_check.append(previous) @@ -764,7 +764,7 @@ def reorganize_weights(self) -> None: Reorder output filters in descending order of their importance. """ for node in self._propagation_graph.get_all_nodes(): - node.data.pop("output_mask", None) + node.attributes.pop("output_mask", None) # 1. Calculate filter importance for all groups of prunable layers for group in self._pruned_module_groups_info.get_all_clusters(): @@ -787,7 +787,7 @@ def reorganize_weights(self) -> None: # 1.2 Setup reorder indexes as output mask to reorganize filters for minfo in group.elements: node = self._propagation_graph.get_node_by_id(minfo.nncf_node_id) - node.data["output_mask"] = PTNNCFTensor(reorder_indexes) + node.attributes["output_mask"] = PTNNCFTensor(reorder_indexes) # 2. Propagating masks across the graph reorder_algo = FilterReorderingAlgorithm( @@ -809,9 +809,9 @@ def find_pairs_of_nodes_with_different_width(self, pairs_of_nodes: List[Tuple[st pair_indexes = [] for idx, (start_node_name, end_node_name) in enumerate(pairs_of_nodes): start_node = self._propagation_graph.get_node_by_name(start_node_name) - start_mask = start_node.data["output_mask"] + start_mask = start_node.attributes["output_mask"] end_node = self._propagation_graph.get_node_by_name(end_node_name) - end_mask = end_node.data["output_mask"] + end_mask = end_node.attributes["output_mask"] all_start_output_shapes = self._propagation_graph.get_output_shapes_for_node(start_node_name) start_output_shape = list(OrderedDict.fromkeys(all_start_output_shapes)) diff --git a/nncf/experimental/torch/nas/bootstrapNAS/elasticity/multi_elasticity_handler.py b/nncf/experimental/torch/nas/bootstrapNAS/elasticity/multi_elasticity_handler.py index 78f8c0ee4d1..4beca169579 100644 --- a/nncf/experimental/torch/nas/bootstrapNAS/elasticity/multi_elasticity_handler.py +++ b/nncf/experimental/torch/nas/bootstrapNAS/elasticity/multi_elasticity_handler.py @@ -198,6 +198,19 @@ def get_state(self) -> Dict[str, Any]: self._state_names.IS_HANDLER_ENABLED_MAP: is_handler_enabled_map, } + def get_search_space(self) -> Dict[str, Any]: + """ + Returns a dictionary with Python data structures (dict, list, tuple, str, int, float, True, False, None) that + represents the search space of the super-network. + + :return: search space + """ + active_handlers = {dim: self._handlers[dim] for dim in self._handlers if self._is_handler_enabled_map[dim]} + space = {} + for handler_id, handler in active_handlers.items(): + space[handler_id.value] = handler.get_search_space() + return space + def enable_all(self) -> None: """ Enables all elasticities for being selected on sampling subnets. diff --git a/nncf/experimental/torch/nas/bootstrapNAS/elasticity/visualization.py b/nncf/experimental/torch/nas/bootstrapNAS/elasticity/visualization.py index 5b634d30f4f..97e6e11bc36 100644 --- a/nncf/experimental/torch/nas/bootstrapNAS/elasticity/visualization.py +++ b/nncf/experimental/torch/nas/bootstrapNAS/elasticity/visualization.py @@ -30,9 +30,7 @@ class SubnetGraph: def __init__(self, compression_graph: PTNNCFGraph, multi_elasticity_handler: MultiElasticityHandler): # TODO: visualize other elastic dimension: depth, kernel (ticket 76870) self._width_graph = compression_graph.get_graph_for_structure_analysis(extended=True) - for node_key in compression_graph.get_all_node_keys(): - compression_node = compression_graph.get_node_by_key(node_key) - + for node_key, compression_node in compression_graph.nodes.items(): operator_name = self._get_operator_name(compression_node, multi_elasticity_handler.width_handler) metatype = compression_node.metatype @@ -64,7 +62,7 @@ def _get_operator_name(compressed_node: NNCFNode, width_handler: ElasticWidthHan input_widths = None if input_masks: input_widths = [ElasticWidthHandler.mask_to_width(input_mask) for input_mask in input_masks] - output_width = ElasticWidthHandler.mask_to_width(node.data["output_mask"]) + output_width = ElasticWidthHandler.mask_to_width(node.attributes["output_mask"]) if input_widths: IW = None diff --git a/nncf/experimental/torch/nas/bootstrapNAS/search/evaluator_handler.py b/nncf/experimental/torch/nas/bootstrapNAS/search/evaluator_handler.py index 3eb5cb2111a..7ebfbb3f71c 100644 --- a/nncf/experimental/torch/nas/bootstrapNAS/search/evaluator_handler.py +++ b/nncf/experimental/torch/nas/bootstrapNAS/search/evaluator_handler.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from typing import NoReturn, Optional, Tuple, TypeVar from nncf.common.logging import nncf_logger diff --git a/nncf/experimental/torch/nas/bootstrapNAS/search/search.py b/nncf/experimental/torch/nas/bootstrapNAS/search/search.py index e43d2d36ebe..a20faf6eb43 100644 --- a/nncf/experimental/torch/nas/bootstrapNAS/search/search.py +++ b/nncf/experimental/torch/nas/bootstrapNAS/search/search.py @@ -18,9 +18,10 @@ import torch from pymoo.algorithms.moo.nsga2 import NSGA2 from pymoo.core.problem import Problem -from pymoo.factory import get_crossover -from pymoo.factory import get_mutation -from pymoo.factory import get_sampling +from pymoo.operators.crossover.sbx import SBX +from pymoo.operators.mutation.pm import PM +from pymoo.operators.repair.rounding import RoundingRepair +from pymoo.operators.sampling.rnd import IntegerRandomSampling from pymoo.optimize import minimize from torch.utils.data.dataloader import DataLoader from torch.utils.tensorboard import SummaryWriter @@ -28,6 +29,7 @@ from nncf import NNCFConfig from nncf.common.initialization.batchnorm_adaptation import BatchnormAdaptationAlgorithm from nncf.common.logging import nncf_logger +from nncf.common.plotting import noninteractive_plotting from nncf.common.utils.decorators import skip_if_dependency_unavailable from nncf.common.utils.os import safe_open from nncf.config.extractors import get_bn_adapt_algo_kwargs @@ -46,6 +48,16 @@ ValFnType = Callable[[TModel, DataLoaderType], float] +class FixIntegerRandomSampling(IntegerRandomSampling): + """ + Wrapper for the IntegerRandomSampling with the fix for https://github.com/anyoptimization/pymoo/issues/388. + """ + + def _do(self, problem, n_samples, **kwargs): + n, (xl, xu) = problem.n_var, problem.bounds() + return np.column_stack([np.random.randint(xl[k], xu[k] + 1, size=(n_samples)) for k in range(n)]) + + class EvolutionaryAlgorithms(Enum): NSGA2 = "NSGA2" @@ -205,15 +217,21 @@ def __init__( if evo_algo == EvolutionaryAlgorithms.NSGA2.value: self._algorithm = NSGA2( pop_size=self.search_params.population, - sampling=get_sampling("int_lhs"), - crossover=get_crossover( - "int_sbx", prob=self.search_params.crossover_prob, eta=self.search_params.crossover_eta + sampling=FixIntegerRandomSampling(), + crossover=SBX( + prob=self.search_params.crossover_prob, + eta=self.search_params.crossover_eta, + vtype=float, + repair=RoundingRepair(), ), - mutation=get_mutation( - "int_pm", prob=self.search_params.mutation_prob, eta=self.search_params.mutation_eta + mutation=PM( + prob=self.search_params.mutation_prob, + eta=self.search_params.mutation_eta, + vtype=float, + repair=RoundingRepair(), ), eliminate_duplicates=True, - save_history=True, + save_history=False, ) else: raise NotImplementedError(f"Evolutionary Search Algorithm {evo_algo} not implemented") @@ -232,7 +250,7 @@ def __init__( self._problem = None self.checkpoint_save_dir = None - self.type_var = np.int + self.type_var = int @property def evaluator_handlers(self) -> List[BaseEvaluatorHandler]: @@ -280,7 +298,7 @@ def vars_upper(self) -> List[float]: return self._vars_upper @property - def num_vars(self) -> float: + def num_vars(self) -> int: """ Number of design variables used by the search algorithm. :return: @@ -362,6 +380,7 @@ def get_macs_for_active_subnet() -> float: ("n_gen", int(self.search_params.num_evals / self.search_params.population)), seed=self.search_params.seed, # save_history=True, + copy_algorithm=False, verbose=self._verbose, ) @@ -391,45 +410,46 @@ def visualize_search_progression(self, filename="search_progression") -> NoRetur """ import matplotlib.pyplot as plt - plt.figure() - colormap = plt.cm.get_cmap("viridis") - col = range(int(self.search_params.num_evals / self.search_params.population)) - for i in range(0, len(self.search_records), self.search_params.population): - c = [col[int(i / self.search_params.population)]] * len( - self.search_records[i : i + self.search_params.population] - ) - plt.scatter( - [abs(row[2]) for row in self.search_records][i : i + self.search_params.population], - [abs(row[4]) for row in self.search_records][i : i + self.search_params.population], - s=9, - c=c, - alpha=0.5, - marker="D", - cmap=colormap, - ) - plt.scatter( - *tuple(abs(ev.input_model_value) for ev in self.evaluator_handlers), - marker="s", - s=120, - color="blue", - label="Input Model", - edgecolors="black", - ) - if None not in self.best_vals: + with noninteractive_plotting(): + plt.figure() + colormap = plt.cm.get_cmap("viridis") + col = range(int(self.search_params.num_evals / self.search_params.population)) + for i in range(0, len(self.search_records), self.search_params.population): + c = [col[int(i / self.search_params.population)]] * len( + self.search_records[i : i + self.search_params.population] + ) + plt.scatter( + [abs(row[2]) for row in self.search_records][i : i + self.search_params.population], + [abs(row[4]) for row in self.search_records][i : i + self.search_params.population], + s=9, + c=c, + alpha=0.5, + marker="D", + cmap=colormap, + ) plt.scatter( - *tuple(abs(val) for val in self.best_vals), - marker="o", + *tuple(abs(ev.input_model_value) for ev in self.evaluator_handlers), + marker="s", s=120, - color="yellow", - label="BootstrapNAS A", + color="blue", + label="Input Model", edgecolors="black", - linewidth=2.5, ) - plt.legend() - plt.title("Search Progression") - plt.xlabel(self.efficiency_evaluator_handler.name) - plt.ylabel(self.accuracy_evaluator_handler.name) - plt.savefig(f"{self._log_dir}/{filename}.png") + if None not in self.best_vals: + plt.scatter( + *tuple(abs(val) for val in self.best_vals), + marker="o", + s=120, + color="yellow", + label="BootstrapNAS A", + edgecolors="black", + linewidth=2.5, + ) + plt.legend() + plt.title("Search Progression") + plt.xlabel(self.efficiency_evaluator_handler.name) + plt.ylabel(self.accuracy_evaluator_handler.name) + plt.savefig(f"{self._log_dir}/{filename}.png") def save_evaluators_state(self) -> NoReturn: """ diff --git a/nncf/experimental/torch/nas/bootstrapNAS/search/supernet.py b/nncf/experimental/torch/nas/bootstrapNAS/search/supernet.py new file mode 100644 index 00000000000..d64a3269aea --- /dev/null +++ b/nncf/experimental/torch/nas/bootstrapNAS/search/supernet.py @@ -0,0 +1,158 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any, Callable, Dict, List, Tuple, TypeVar + +import torch + +from nncf import NNCFConfig +from nncf.experimental.torch.nas.bootstrapNAS.elasticity.elasticity_controller import ElasticityController +from nncf.experimental.torch.nas.bootstrapNAS.elasticity.multi_elasticity_handler import SubnetConfig +from nncf.experimental.torch.nas.bootstrapNAS.training.model_creator_helpers import resume_compression_from_state +from nncf.torch.checkpoint_loading import load_state +from nncf.torch.model_creation import create_nncf_network +from nncf.torch.nncf_network import NNCFNetwork + +TModel = TypeVar("TModel") +ValFnType = Callable[[NNCFNetwork, Any], Any] + + +class TrainedSuperNet: + """ + An interface for handling pre-trained super-networks. This class can be used to quickly implement + third party solutions for subnetwork search on existing super-networks. + """ + + def __init__(self, elastic_ctrl: ElasticityController, nncf_network: NNCFNetwork): + """ + Initializes the super-network interface. + + :param elastic_ctrl: Elasticity controller to activate subnetworks + :param nncf_network: NNCFNetwork that wraps the original PyTorch model. + """ + self._m_handler = elastic_ctrl.multi_elasticity_handler + self._elasticity_ctrl = elastic_ctrl + self._model = nncf_network + + @classmethod + def from_checkpoint( + cls, + model: TModel, + nncf_config: NNCFConfig, + supernet_elasticity_path: str, + supernet_weights_path: str, + ) -> "TrainedSuperNet": + """ + Loads existing super-network weights and elasticity information, and creates the SuperNetwork interface. + + :param model: base model that was used to create the super-network. + :param nncf_config: configuration used to create the super-network. + :param supernet_elasticity_path: path to file containing state information about the super-network. + :param supernet_weights_path: trained weights to resume the super-network. + :return: SuperNetwork with wrapped functionality. + """ + nncf_network = create_nncf_network(model, nncf_config) + compression_state = torch.load(supernet_elasticity_path, map_location=torch.device(nncf_config.device)) + model, elasticity_ctrl = resume_compression_from_state(nncf_network, compression_state) + model_weights = torch.load(supernet_weights_path, map_location=torch.device(nncf_config.device)) + load_state(model, model_weights, is_resume=True) + elasticity_ctrl.multi_elasticity_handler.activate_maximum_subnet() + return TrainedSuperNet(elasticity_ctrl, model) + + def get_search_space(self) -> Dict: + """ + :return: dictionary with possible values for elastic configurations. + """ + return self._m_handler.get_search_space() + + def get_design_vars_info(self) -> Tuple[int, List[int]]: + """ + :return: number of possible values in subnet configurations and + the number of possible values for each elastic property. + """ + self._m_handler.get_design_vars_info() + + def eval_subnet_with_design_vars(self, design_config: List, eval_fn: ValFnType, **kwargs) -> Any: + """ + + :return: the value produced by the user's function to evaluate the subnetwork. + """ + self._m_handler.activate_subnet_for_config(self._m_handler.get_config_from_pymoo(design_config)) + return eval_fn(self._model, **kwargs) + + def eval_active_subnet(self, eval_fn: ValFnType, **kwargs) -> Any: + """ + :param eval_fn: user's function to evaluate the active subnetwork. + :return: value of the user's function used to evaluate the subnetwork. + """ + return eval_fn(self._model, **kwargs) + + def eval_subnet(self, config: SubnetConfig, eval_fn: ValFnType, **kwargs) -> Any: + """ + :param config: subnetwork configuration. + :param eval_fn: user's function to evaluate the active subnetwork. + :return: value of the user's function used to evaluate the subnetwork. + """ + self.activate_config(config) + return self.eval_active_subnet(eval_fn, **kwargs) + + def activate_config(self, config: SubnetConfig) -> None: + """ + :param config: subnetwork configuration to activate. + """ + self._m_handler.activate_subnet_for_config(config) + + def activate_maximal_subnet(self) -> None: + """ + Activates the maximal subnetwork in the super-network. + """ + self._m_handler.activate_maximum_subnet() + + def activate_minimal_subnet(self) -> None: + """ + Activates the minimal subnetwork in the super-network. + """ + self._m_handler.activate_minimum_subnet() + + def get_active_config(self) -> SubnetConfig: + """ + :return: the active configuration. + """ + return self._m_handler.get_active_config() + + def get_macs_for_active_config(self) -> float: + """ + :return: MACs of active subnet. + """ + return self._m_handler.count_flops_and_weights_for_active_subnet()[0] / 2e6 + + def export_active_subnet_to_onnx(self, filename: str = "subnet") -> None: + """ + Exports the active subnetwork to ONNX format. + + :param filename: name of the output file. + """ + self._elasticity_ctrl.export_model(f"{filename}.onnx") + + def get_config_from_pymoo(self, pymoo_config: List) -> SubnetConfig: + """ + Converts a Pymoo subnetwork configuration into a SubnetConfig. + + :param pymoo_config: subnetwork configuration in Pymoo format. + :return: subnetwork configuration in SubnetConfig format. + """ + return self._m_handler.get_config_from_pymoo(pymoo_config) + + def get_active_subnet(self) -> NNCFNetwork: + """ + :return: the nncf network with the current active configuration. + """ + return self._model diff --git a/nncf/experimental/torch/nas/bootstrapNAS/training/progressive_shrinking_builder.py b/nncf/experimental/torch/nas/bootstrapNAS/training/progressive_shrinking_builder.py index 56c61225c9a..9191bce742a 100644 --- a/nncf/experimental/torch/nas/bootstrapNAS/training/progressive_shrinking_builder.py +++ b/nncf/experimental/torch/nas/bootstrapNAS/training/progressive_shrinking_builder.py @@ -20,8 +20,11 @@ ) from nncf.experimental.torch.nas.bootstrapNAS.training.scheduler import NASSchedulerParams from nncf.torch.algo_selector import PT_COMPRESSION_ALGORITHMS +from nncf.torch.algo_selector import ZeroCompressionLoss from nncf.torch.compression_method_api import PTCompressionAlgorithmBuilder from nncf.torch.graph.transformations.layout import PTTransformationLayout +from nncf.torch.knowledge_distillation.knowledge_distillation_loss import KnowledgeDistillationLoss +from nncf.torch.model_creation import create_compression_algorithm_builder from nncf.torch.nncf_network import NNCFNetwork @@ -88,6 +91,7 @@ def _get_algo_specific_config_section(self) -> Dict: def _build_controller(self, model: NNCFNetwork) -> "ProgressiveShrinkingController": elasticity_ctrl = self._elasticity_builder.build_controller(model) schedule_params = NASSchedulerParams.from_config(self._algo_config.get("schedule", {})) + compression_loss_func = self._build_compression_loss_function(model) return ProgressiveShrinkingController( model, elasticity_ctrl, @@ -95,8 +99,19 @@ def _build_controller(self, model: NNCFNetwork) -> "ProgressiveShrinkingControll self._progressivity_of_elasticity, schedule_params, self._lr_schedule_config, + compression_loss_func, ) + def _build_compression_loss_function(self, model: NNCFNetwork) -> "PTCompressionLoss": + compression_builder = create_compression_algorithm_builder(self._algo_config) + compressed_model = compression_builder.apply_to(model) + compression_ctrl = compression_builder.build_controller(compressed_model) + assert type(compression_ctrl.loss) in [ + ZeroCompressionLoss, + KnowledgeDistillationLoss, + ], "Currently only knowledge distillation loss is supported." + return compression_ctrl.loss + def _get_transformation_layout(self, target_model: NNCFNetwork) -> PTTransformationLayout: available_elasticity_dims = self._elasticity_builder.get_available_elasticity_dims() self.check_elasticity_dims_consistency(available_elasticity_dims, self._progressivity_of_elasticity) diff --git a/nncf/experimental/torch/nas/bootstrapNAS/training/progressive_shrinking_controller.py b/nncf/experimental/torch/nas/bootstrapNAS/training/progressive_shrinking_controller.py index 88e3a77f389..a447ac57187 100644 --- a/nncf/experimental/torch/nas/bootstrapNAS/training/progressive_shrinking_controller.py +++ b/nncf/experimental/torch/nas/bootstrapNAS/training/progressive_shrinking_controller.py @@ -8,7 +8,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict, List, NoReturn +from typing import Any, Callable, Dict, List, NoReturn from nncf.api.compression import CompressionLoss from nncf.api.compression import CompressionScheduler @@ -25,7 +25,6 @@ from nncf.experimental.torch.nas.bootstrapNAS.training.scheduler import BootstrapNASScheduler from nncf.experimental.torch.nas.bootstrapNAS.training.scheduler import NASSchedulerParams from nncf.experimental.torch.nas.bootstrapNAS.training.stage_descriptor import StageDescriptor -from nncf.torch.algo_selector import ZeroCompressionLoss from nncf.torch.nncf_network import NNCFNetwork @@ -53,13 +52,14 @@ def __init__( progressivity_of_elasticity: List[ElasticityDim], schedule_params: NASSchedulerParams, lr_schedule_config: Dict[str, Any], + compression_loss_func: Callable, ): super().__init__(target_model) self._elasticity_ctrl = elasticity_ctrl self._bn_adaptation = bn_adaptation self._progressivity_of_elasticity = progressivity_of_elasticity self._target_model = target_model - self._loss = ZeroCompressionLoss(next(target_model.parameters()).device) + self._loss = compression_loss_func self._available_elasticity_dims = self.multi_elasticity_handler.get_available_elasticity_dims() self._lr_schedule_config = lr_schedule_config self._scheduler = BootstrapNASScheduler( diff --git a/nncf/experimental/torch/pruning/operations.py b/nncf/experimental/torch/pruning/operations.py index ce77dc53616..3c12de85cf6 100644 --- a/nncf/experimental/torch/pruning/operations.py +++ b/nncf/experimental/torch/pruning/operations.py @@ -1,16 +1,13 @@ -""" - Copyright (c) 2022 Intel Corporation - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" - +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. from nncf.common.graph.operator_metatypes import UnknownMetatype from nncf.common.pruning.utils import PruningOperationsMetatypeRegistry @@ -62,6 +59,7 @@ from nncf.torch.graph.operator_metatypes import PTSILUMetatype from nncf.torch.graph.operator_metatypes import PTSoftmaxMetatype from nncf.torch.graph.operator_metatypes import PTSplitMetatype +from nncf.torch.graph.operator_metatypes import PTSqueezeMetatype from nncf.torch.graph.operator_metatypes import PTSubMetatype from nncf.torch.graph.operator_metatypes import PTSumMetatype from nncf.torch.graph.operator_metatypes import PTTanhMetatype @@ -95,6 +93,7 @@ class PTIdentityMaskForwardPruningOp(IdentityMaskForwardPruningOp): PTSoftmaxMetatype, PTAvgPool2dMetatype, PTMaxPool2dMetatype, + PTMeanMetatype, PTDropoutMetatype, PTSILUMetatype, PTPowerMetatype, @@ -127,7 +126,7 @@ class PTElementwisePruningOp(ElementwisePruningOp): @PT_EXPERIMENTAL_PRUNING_OPERATOR_METATYPES.register("stop_propagation_ops") class PTStopMaskForwardPruningOp(StopMaskForwardPruningOp): - subtypes = [PTMeanMetatype, PTMaxMetatype, PTMinMetatype, PTSumMetatype, UnknownMetatype] + subtypes = [PTMaxMetatype, PTMinMetatype, PTSumMetatype, UnknownMetatype] @PT_EXPERIMENTAL_PRUNING_OPERATOR_METATYPES.register("transpose") @@ -137,7 +136,7 @@ class PTTransposePruningOp(TransposePruningOp): @PT_EXPERIMENTAL_PRUNING_OPERATOR_METATYPES.register("reshape") class PTReshape(ReshapePruningOp): - subtypes = [PTReshapeMetatype] + subtypes = [PTReshapeMetatype, PTSqueezeMetatype] @PT_EXPERIMENTAL_PRUNING_OPERATOR_METATYPES.register("split") diff --git a/nncf/experimental/torch/quantization/quantize_model.py b/nncf/experimental/torch/quantization/quantize_model.py index 650de40c217..c97ab9c9675 100644 --- a/nncf/experimental/torch/quantization/quantize_model.py +++ b/nncf/experimental/torch/quantization/quantize_model.py @@ -13,7 +13,6 @@ import torch -from nncf.common.logging import nncf_logger from nncf.common.quantization.structs import QuantizationPreset from nncf.data import Dataset from nncf.parameters import ModelType @@ -104,14 +103,6 @@ def quantize_impl( if target_device == TargetDevice.CPU_SPR: raise RuntimeError("target_device == CPU_SPR is not supported") - if advanced_parameters is None: - advanced_parameters = AdvancedQuantizationParameters() - if not advanced_parameters.disable_bias_correction: - nncf_logger.warning( - "Bias correction and fast bias correction algorithms are not supported by Torch backend yet." - ) - advanced_parameters.disable_bias_correction = True - nncf_network = create_nncf_network(model.eval(), calibration_dataset) quantization_algorithm = PostTrainingQuantization( @@ -124,9 +115,9 @@ def quantize_impl( advanced_parameters=advanced_parameters, ) - quantized_model = quantization_algorithm.apply(nncf_network, dataset=calibration_dataset) - - # TODO (asuslov): quantized_model = quantized_model.strip() + quantized_model = quantization_algorithm.apply( + nncf_network, nncf_network.nncf.get_graph(), dataset=calibration_dataset + ) quantized_model.nncf.disable_dynamic_graph_building() diff --git a/nncf/experimental/openvino/__init__.py b/nncf/experimental/torch/replace_custom_modules/__init__.py similarity index 100% rename from nncf/experimental/openvino/__init__.py rename to nncf/experimental/torch/replace_custom_modules/__init__.py diff --git a/nncf/experimental/torch/replace_custom_modules/timm_custom_modules.py b/nncf/experimental/torch/replace_custom_modules/timm_custom_modules.py new file mode 100644 index 00000000000..9628466fb49 --- /dev/null +++ b/nncf/experimental/torch/replace_custom_modules/timm_custom_modules.py @@ -0,0 +1,171 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from copy import deepcopy +from typing import Optional + +from timm.layers import Linear +from timm.layers.norm_act import BatchNormAct2d +from timm.layers.norm_act import GroupNormAct +from timm.layers.norm_act import LayerNormAct +from torch import nn + +from nncf.torch.nncf_module_replacement import replace_modules_by_nncf_modules + + +def _copy_parameters(src_module: nn.Module, trg_module: nn.Module): + """ + Copies parameters of a source module to a target module. + :param src_module: The source module to copy parameters from. + :param trg_module: The target module to copy parameters to. + """ + for name, param in src_module.named_parameters(): + setattr(trg_module, name, deepcopy(param)) + + +def _convert_linear(module: Linear) -> nn.Linear: + """ + Convert Linear module to torch.nn.Linear. + + param module: The module to convert. + :return nn.Linear: Converted module. + """ + with_bias = module.bias is not None + new_ln = nn.Linear( + in_features=module.in_features, + out_features=module.out_features, + bias=with_bias, + device=module.weight.device, + dtype=module.weight.dtype, + ) + _copy_parameters(module, new_ln) + return new_ln + + +def _convert_batch_norm_act_2d(module: BatchNormAct2d) -> nn.Sequential: + """ + Converts a BatchNormAct2d module to an nn.Sequential module that contains nn.BatchNorm2d, + followed by dropout and activation functions. + + :param module: The module to convert. + :return nn.Sequential: A new nn.Sequential module containing nn.BatchNorm2d, dropout, and activation functions. + """ + new_bn = nn.BatchNorm2d( + num_features=module.num_features, + eps=module.eps, + momentum=module.momentum, + affine=module.affine, + track_running_stats=module.track_running_stats, + device=module.weight.device, + dtype=module.weight.dtype, + ) + _copy_parameters(module, new_bn) + new_bn.running_mean = deepcopy(module.running_mean) + new_bn.running_var = deepcopy(module.running_var) + + new_drop = deepcopy(module.drop) + new_act = deepcopy(module.act) + return nn.Sequential(new_bn, new_drop, new_act) + + +def _convert_group_norm_act(module: GroupNormAct) -> nn.Sequential: + """ + Converts a GroupNormAct module to an nn.Sequential module that contains nn.GroupNorm, + followed by dropout and activation functions. + + :param module: The module to convert. + :return nn.Sequential: A new nn.Sequential module containing nn.GroupNorm, dropout, and activation functions. + """ + new_gn = nn.GroupNorm( + num_groups=module.num_groups, + num_channels=module.num_channels, + eps=module.eps, + affine=module.eps, + device=module.weight.device, + dtype=module.weight.dtype, + ) + _copy_parameters(module, new_gn) + new_drop = deepcopy(module.drop) + new_act = deepcopy(module.act) + return nn.Sequential(new_gn, new_drop, new_act) + + +def _convert_layer_norm_act(module: LayerNormAct) -> nn.Sequential: + """ + Converts a LayerNormAct module to an nn.Sequential module that contains nn.LayerNorm, + followed by dropout and activation functions. + + :param module: The module to convert. + :return nn.Sequential: A new nn.Sequential module containing nn.LayerNorm, dropout, and activation functions. + """ + new_norm = nn.LayerNorm( + normalized_shape=module.normalized_shape, + eps=module.eps, + elementwise_affine=module.elementwise_affine, + device=module.weight.device, + dtype=module.weight.dtype, + ) + _copy_parameters(module, new_norm) + new_drop = deepcopy(module.drop) + new_act = deepcopy(module.act) + return nn.Sequential(new_norm, new_drop, new_act) + + +CONVERT_FN_MAP = { + BatchNormAct2d: _convert_batch_norm_act_2d, + GroupNormAct: _convert_group_norm_act, + LayerNormAct: _convert_layer_norm_act, + Linear: _convert_linear, +} + + +def is_timm_custom_module(module: nn.Module): + """ + Check that module is timm custom module and can be converted. + + :param module: The module. + :return: `True` if module is custom module, otherwise `False` + """ + return type(module) in CONVERT_FN_MAP + + +def convert_timm_custom_modules(module: nn.Module) -> Optional[nn.Module]: + """ + Replaces the given module with a PyTorch native module if possible. + + :param module: The module to replace. + :return: The replaced module if replacement is possible, None otherwise. + """ + module_type = type(module) + convert_fn = CONVERT_FN_MAP.get(module_type) + if convert_fn is None: + raise TypeError( + f"The type of module {module_type} should be one of the following: {list(CONVERT_FN_MAP.keys())}" + ) + return convert_fn(module) + + +def replace_timm_custom_modules_with_torch_native(model: nn.Module) -> nn.Module: + """ + Replace custom module that can not be operated by NNCF to torch native modules. + + :param model: The target model. + :return nn.Module: Transformed model. + """ + model_copy = deepcopy(model) + + model_copy, _ = replace_modules_by_nncf_modules( + model=model_copy, + custom_replacer=convert_timm_custom_modules, + predicate_fn=is_timm_custom_module, + ) + + return model_copy diff --git a/nncf/experimental/torch/search_building_blocks/search_graph.py b/nncf/experimental/torch/search_building_blocks/search_graph.py index a25c4230efe..a2820130f87 100644 --- a/nncf/experimental/torch/search_building_blocks/search_graph.py +++ b/nncf/experimental/torch/search_building_blocks/search_graph.py @@ -15,6 +15,7 @@ import networkx as nx from nncf.common.graph.graph import NNCFGraph +from nncf.common.graph.graph import NNCFNode from nncf.common.graph.graph import NNCFNodeName from nncf.common.graph.graph_matching import find_subgraphs_matching_pattern from nncf.common.graph.patterns.manager import PatternsManager @@ -50,21 +51,21 @@ def is_dummy(self) -> bool: @property def node_name(self) -> NNCFNodeName: - return self.data.get(NNCFGraph.NODE_NAME_ATTR) + return self.data.get(NNCFNode.NODE_NAME_ATTR) @property def node_type(self) -> str: """ Returns type of node. """ - return self.data.get(NNCFGraph.NODE_TYPE_ATTR) + return self.data.get(NNCFNode.NODE_TYPE_ATTR) @property def layer_name(self) -> str: """ Returns the name of the layer to which the node corresponds. """ - return self.data.get(NNCFGraph.LAYER_NAME_ATTR) + return self.data.get(NNCFNode.LAYER_NAME_ATTR) @property def main_id(self) -> int: diff --git a/nncf/experimental/torch/sparsity/movement/MovementSparsity.md b/nncf/experimental/torch/sparsity/movement/MovementSparsity.md index 986922c4219..39dae627c3f 100644 --- a/nncf/experimental/torch/sparsity/movement/MovementSparsity.md +++ b/nncf/experimental/torch/sparsity/movement/MovementSparsity.md @@ -1,4 +1,4 @@ -### Movement Sparsity +# Movement Sparsity [Movement Pruning (Sanh et al., 2020)](https://arxiv.org/pdf/2005.07683.pdf) is an effective learning-based unstructured sparsification algorithm, especially for Transformer-based models in transfer learning setup. [Lagunas et al., 2021](https://arxiv.org/pdf/2109.04838.pdf) extends the algorithm to sparsify by block grain size, enabling structured sparsity which can achieve device-agnostic inference acceleration. @@ -6,7 +6,7 @@ NNCF implements both unstructured and structured movement sparsification. The im For usage explanation of the algorithm, let's start with an example configuration below which is targeted for BERT models. -**Example configuration of Movement Sparsity for BERT models** +## Example configuration of Movement Sparsity for BERT models ```json { @@ -39,20 +39,20 @@ This diagram is the sparsity level of BERT-base model over the optimization life 2. **Structured masking and fine-tuning**: At the end of first stage, i.e. `warmup_end_epoch`, the sparsified model cannot be accelerated without tailored HW/SW but some sparse structures can be totally discarded from the model to save compute and memory footprint. NNCF provides mechanism to achieve structured masking by `"enable_structured_masking": true`, where it automatically resolves the structured masking between dependent layers and rewinds the sparsified parameters that does not participate in acceleration for task modeling. In the example above, the sparsity level has dropped after `warmup_end_epoch` due to structured masking and the model will continue to fine-tune thereafter. Currently, the automatic structured masking feature was tested on **_BERT, DistilBERT, RoBERTa, MobileBERT, Wav2Vec2, Swin, ViT, CLIPVisual_** architectures defined by [Hugging Face's transformers](https://huggingface.co/docs/transformers/index). Support for other architectures is not guaranteed. Users can disable this feature by setting `"enable_structured_masking": false`, where the sparse structures at the end of first stage will be frozen and training/fine-tuning will continue on unmasked parameters. Please refer next section to realize model inference acceleration with [OpenVINO](https://docs.openvino.ai/latest/index.html) toolchain. -#### Inference Acceleration via [OpenVINO](https://docs.openvino.ai/latest/index.html) +## Inference Acceleration via [OpenVINO](https://docs.openvino.ai/latest/index.html) Optimized models are compatible with OpenVINO toolchain. Use `compression_controller.export_model("movement_sparsified_model.onnx")` to export model in onnx format. Sparsified parameters in the onnx are in value of zero. Structured sparse structures can be discarded during ONNX translation to OpenVINO IR using [Model Optimizer](https://docs.openvino.ai/latest/openvino_docs_MO_DG_Deep_Learning_Model_Optimizer_DevGuide.html) with additional option `--transform=Pruning`. Corresponding IR is compressed and deployable with [OpenVINO Runtime](https://docs.openvino.ai/latest/openvino_docs_OV_UG_OV_Runtime_User_Guide.html). To quantify inference performance improvement, both ONNX and IR can be profiled using [Benchmark Tool](https://docs.openvino.ai/latest/openvino_inference_engine_tools_benchmark_tool_README.html). -#### Getting Started +## Getting Started Please refer [optimum-intel](https://github.com/huggingface/optimum-intel/tree/main/examples/openvino) for example pipelines on image classification, question answering, etc. The repository also provides examples of joint pruning, quantization and distillation, end-to-end from NNCF optimization to compressed OpenVINO IR. -#### Known Limitation +## Known Limitation 1. Movement sparsification only supports `torch.nn.Linear` layers. 2. Automatic structured masking feature supports **BERT, DistilBERT, RoBERTa, MobileBERT, Wav2Vec2, Swin, ViT, CLIPVisual** architectures defined by [Hugging Face's transformers](https://huggingface.co/docs/transformers/index). Other similar architectures may work, but support is not guaranteed. -#### Detailed description of Movement Sparsity configuration +## Detailed description of Movement Sparsity configuration - `algorithm`: The algorithm name is "movement_sparsity". - `warmup_start_epoch` & `warmup_end_epoch`: The algorithm will conduct model weight sparsification gradually from epoch >= `warmup_start_epoch` to epoch < `warmup_end_epoch`, with epoch is zero-indexed. This span is known as sparsification warm-up (stage 1). @@ -68,7 +68,7 @@ Please refer [optimum-intel](https://github.com/huggingface/optimum-intel/tree/m - `ignored_scopes`: A string or a list of strings representing the layers to be ignored by Movement Sparsity algorithm. -#### Extra configuration in `params` section +## Extra configuration in `params` section Following arguments have been defaulted to work well out of the box. However, you can specify them for a more controlled sparsification strategy. @@ -76,7 +76,7 @@ Following arguments have been defaulted to work well out of the box. However, yo - `power`: Optional. The importance threshold and regularization factor follow a concave polynomial warm-up schedule where its decay factor is parameterized by `power`. Default is 3. - `steps_per_epoch`: Optional. Number of steps per epoch is needed for threshold and regularization factor scheduling. It varies by dataset size and training hyperparameters. By default, this can be automatically derived during the first epoch without any side effect, as long as `warmup_start_epoch` >= 1. Specification of `steps_per_epoch` is only required when warm-up sparsification is intended to start at the first epoch. -#### References +## References 1. Victor Sanh, Thomas Wolf, and Alexander M. Rush. 2020. [Movement Pruning: Adaptive Sparsity by Fine-Tuning]((https://arxiv.org/pdf/2005.07683.pdf)). In Advances in Neural Information Processing Systems, 33, pp. 20378-20389. 2. François Lagunas, Ella Charlaix, Victor Sanh, and Alexander M. Rush. 2021. [Block Pruning For Faster Transformers]((https://arxiv.org/pdf/2109.04838.pdf)). In Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing, pp. 10619–10629. diff --git a/nncf/experimental/torch/sparsity/movement/layers.py b/nncf/experimental/torch/sparsity/movement/layers.py index e645c834545..e1d2e52813b 100644 --- a/nncf/experimental/torch/sparsity/movement/layers.py +++ b/nncf/experimental/torch/sparsity/movement/layers.py @@ -160,7 +160,7 @@ def __init__( """ super().__init__() self.target_module_node = target_module_node - self.prune_bias = bool(target_module_node.layer_attributes.bias) + self.prune_bias = bool(target_module_node.layer_attributes.with_bias) self.frozen = frozen self.layerwise_loss_lambda = layerwise_loss_lambda self._importance_threshold = -math.inf diff --git a/nncf/experimental/torch/sparsity/movement/structured_mask_handler.py b/nncf/experimental/torch/sparsity/movement/structured_mask_handler.py index a4506be15bc..c7626dac4b4 100644 --- a/nncf/experimental/torch/sparsity/movement/structured_mask_handler.py +++ b/nncf/experimental/torch/sparsity/movement/structured_mask_handler.py @@ -391,7 +391,7 @@ def _create_structured_mask_context_groups( module_vs_sparse_module_info_map = {minfo.module: minfo for minfo in sparsified_module_info_list} pruning_producing_types = ["linear"] - nncf_graph = nncf_network.get_original_graph() + nncf_graph = nncf_network.nncf.get_original_graph() pruning_groups = get_pruning_groups( nncf_graph, PT_EXPERIMENTAL_PRUNING_OPERATOR_METATYPES, pruning_producing_types ) diff --git a/nncf/onnx/graph/metatypes/onnx_metatypes.py b/nncf/onnx/graph/metatypes/onnx_metatypes.py index c3cb5aa8e0d..f3c44a0b9ad 100644 --- a/nncf/onnx/graph/metatypes/onnx_metatypes.py +++ b/nncf/onnx/graph/metatypes/onnx_metatypes.py @@ -9,7 +9,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from dataclasses import dataclass from typing import List, Optional, Type import onnx @@ -17,6 +16,7 @@ from nncf.common.graph.operator_metatypes import OperatorMetatype from nncf.common.graph.operator_metatypes import OperatorMetatypeRegistry from nncf.common.hardware.opset import HWConfigOpName +from nncf.onnx.graph.onnx_graph import ONNXGraph ONNX_OPERATION_METATYPES = OperatorMetatypeRegistry("onnx_operator_metatypes") @@ -34,7 +34,7 @@ def get_subtypes(cls) -> List[Type[OperatorMetatype]]: return cls.subtypes @classmethod - def matches(cls, model: onnx.ModelProto, node: onnx.NodeProto) -> Optional[bool]: + def matches(cls, model: onnx.ModelProto, node: onnx.NodeProto) -> bool: return node.op_type in cls.op_names @classmethod @@ -50,33 +50,30 @@ def determine_subtype(cls, model: onnx.ModelProto, node: onnx.NodeProto) -> Opti return matches[0] -@dataclass -class OpWeightDef: +class ONNXOpWithWeightsMetatype(ONNXOpMetatype): """ - Contains the information about the weight and bias of the operation. + Metatype which could have weights. :param weight_channel_axis: Axis for weight per-channel quantization, meaning the number of output filters. - :param weight_port_id: Input port of the node's weight. + :param weight_port_ids: Input ports of the node's weight. If the value is None the weight_port_id should be determined dynamically. :param bias_port_id: Input port of the node's bias. If the value is None it means that the Metatype does not have bias. """ weight_channel_axis: int - weight_port_id: Optional[int] = None + weight_port_ids: Optional[List[int]] = None bias_port_id: Optional[int] = None -class ONNXOpWithWeightsMetatype(ONNXOpMetatype): - weight_definitions = None # type: OpWeightDef - - @ONNX_OPERATION_METATYPES.register() class ONNXDepthwiseConvolutionMetatype(ONNXOpWithWeightsMetatype): name = "DepthwiseConvOp" op_names = ["Conv"] hw_config_names = [HWConfigOpName.DEPTHWISECONVOLUTION] - weight_definitions = OpWeightDef(weight_channel_axis=0, weight_port_id=1, bias_port_id=2) + weight_channel_axis = 0 + weight_port_ids = [1] + bias_port_id = 2 output_channel_axis = 1 @classmethod @@ -89,7 +86,9 @@ class ONNXConvolutionMetatype(ONNXOpWithWeightsMetatype): name = "ConvOp" op_names = ["Conv"] hw_config_names = [HWConfigOpName.CONVOLUTION] - weight_definitions = OpWeightDef(weight_channel_axis=0, weight_port_id=1, bias_port_id=2) + weight_channel_axis = 0 + weight_port_ids = [1] + bias_port_id = 2 output_channel_axis = 1 subtypes = [ONNXDepthwiseConvolutionMetatype] @@ -99,17 +98,33 @@ class ONNXConvolutionTransposeMetatype(ONNXOpWithWeightsMetatype): name = "ConvTransposeOp" op_names = ["ConvTranspose"] hw_config_names = [HWConfigOpName.CONVOLUTION] - weight_definitions = OpWeightDef(weight_channel_axis=1, weight_port_id=1, bias_port_id=2) + weight_channel_axis = 1 + weight_port_ids = [1] + bias_port_id = 2 output_channel_axis = 1 @ONNX_OPERATION_METATYPES.register() -class ONNXLinearMetatype(ONNXOpWithWeightsMetatype): - name = "LinearOp" +class ONNXGemmMetatype(ONNXOpWithWeightsMetatype): + name = "GemmOp" op_names = ["Gemm"] hw_config_names = [HWConfigOpName.MATMUL] - # TODO(kshpv): ticket:95156 - weight_definitions = OpWeightDef(weight_channel_axis=0, weight_port_id=1, bias_port_id=2) + weight_channel_axis = -1 + weight_port_ids = None + bias_port_id = 2 + possible_weight_ports = [0, 1] + output_channel_axis = -1 + + +@ONNX_OPERATION_METATYPES.register() +class ONNXMatMulMetatype(ONNXOpMetatype): + name = "MatMulOp" + op_names = ["MatMul"] + hw_config_names = [HWConfigOpName.MATMUL] + weight_channel_axis = -1 + weight_port_ids = None + bias_port_id = 2 + possible_weight_ports = [0, 1] output_channel_axis = -1 @@ -217,7 +232,7 @@ class ONNXDivLayerMetatype(ONNXOpMetatype): @ONNX_OPERATION_METATYPES.register() -class ONNXConcatLayerMetatype(ONNXOpMetatype): +class ONNXConcatMetatype(ONNXOpMetatype): name = "ConcatOp" op_names = ["Concat"] hw_config_names = [HWConfigOpName.CONCAT] @@ -236,6 +251,13 @@ class ONNXResizeMetatype(ONNXOpMetatype): hw_config_names = [HWConfigOpName.INTERPOLATE] +@ONNX_OPERATION_METATYPES.register() +class ONNXCenterCropPadMetatype(ONNXOpMetatype): + name = "CenterCropPadOp" + op_names = ["CenterCropPad"] + hw_config_names = [HWConfigOpName.CROP] + + @ONNX_OPERATION_METATYPES.register() class ONNXReshapeMetatype(ONNXOpMetatype): name = "ReshapeOp" @@ -243,6 +265,12 @@ class ONNXReshapeMetatype(ONNXOpMetatype): hw_config_names = [HWConfigOpName.RESHAPE] +@ONNX_OPERATION_METATYPES.register() +class ONNXTileMetatype(ONNXOpMetatype): + name = "TileOp" + op_names = ["Tile"] + + @ONNX_OPERATION_METATYPES.register() class ONNXUpsampleMetatype(ONNXOpMetatype): name = "UpsampleOp" @@ -322,6 +350,20 @@ class ONNXOrMetatype(ONNXOpMetatype): hw_config_names = [HWConfigOpName.LOGICALOR] +@ONNX_OPERATION_METATYPES.register() +class ONNXMaximumMetatype(ONNXOpMetatype): + name = "MaxOp" + op_names = ["Max"] + hw_config_names = [HWConfigOpName.MAXIMUM] + + +@ONNX_OPERATION_METATYPES.register() +class ONNXMinimumMetatype(ONNXOpMetatype): + name = "MinOp" + op_names = ["Min"] + hw_config_names = [HWConfigOpName.MINIMUM] + + @ONNX_OPERATION_METATYPES.register() class ONNXFloorMetatype(ONNXOpMetatype): name = "FloorOp" @@ -350,6 +392,18 @@ class ONNXReciprocalMetatype(ONNXOpMetatype): hw_config_names = [HWConfigOpName.POWER] +@ONNX_OPERATION_METATYPES.register() +class ONNXEmbeddingMetatype(ONNXOpMetatype): + name = "EmbeddingOp" + hw_config_names = [HWConfigOpName.EMBEDDING] + weight_port_ids = [0] + weight_channel_axis = 0 + + @classmethod + def matches(cls, model: onnx.ModelProto, node: onnx.NodeProto) -> bool: + return _is_embedding(model, node) + + @ONNX_OPERATION_METATYPES.register() class ONNXLogMetatype(ONNXOpMetatype): name = "LogOp" @@ -363,28 +417,46 @@ class ONNXAbsMetatype(ONNXOpMetatype): @ONNX_OPERATION_METATYPES.register() -class ONNXScatterElementslMetatype(ONNXOpMetatype): +class ONNXScatterElementsMetatype(ONNXOpMetatype): name = "ScatterElementsOp" op_names = ["ScatterElements"] @ONNX_OPERATION_METATYPES.register() -class ONNXRoiAlignMetatype(ONNXOpMetatype): - name = "RoiAlignOp" - op_names = ["RoiAlign"] +class ONNXScatterMetatype(ONNXOpMetatype): + name = "ScatterOp" + op_names = ["Scatter"] @ONNX_OPERATION_METATYPES.register() -class ONNXMatMulMetatype(ONNXOpMetatype): - name = "MatMulOp" - op_names = ["MatMul"] - hw_config_names = [HWConfigOpName.MATMUL] +class ONNXScatterNDMetatype(ONNXOpMetatype): + name = "ScatterNDOp" + op_names = ["ScatterND"] + + +@ONNX_OPERATION_METATYPES.register() +class ONNXRoiAlignMetatype(ONNXOpMetatype): + name = "RoiAlignOp" + op_names = ["RoiAlign"] @ONNX_OPERATION_METATYPES.register() class ONNXGatherMetatype(ONNXOpMetatype): name = "GatherOp" op_names = ["Gather"] + subtypes = [ONNXEmbeddingMetatype] + + +@ONNX_OPERATION_METATYPES.register() +class ONNXGatherNDMetatype(ONNXOpMetatype): + name = "GatherNDOp" + op_names = ["GatherND"] + + +@ONNX_OPERATION_METATYPES.register() +class ONNXGatherElementsMetatype(ONNXOpMetatype): + name = "GatherElementsOp" + op_names = ["GatherElements"] @ONNX_OPERATION_METATYPES.register() @@ -411,7 +483,12 @@ class ONNXNonMaxSuppressionMetatype(ONNXOpMetatype): class ONNXCastMetatype(ONNXOpMetatype): name = "CastOp" op_names = ["Cast"] - hw_config_names = [HWConfigOpName.SQUEEZE] + + +@ONNX_OPERATION_METATYPES.register() +class ONNXCastLikeMetatype(ONNXOpMetatype): + name = "CastLikeOp" + op_names = ["CastLike"] @ONNX_OPERATION_METATYPES.register() @@ -420,6 +497,38 @@ class ONNXReduceMinMetatype(ONNXOpMetatype): op_names = ["ReduceMin"] +@ONNX_OPERATION_METATYPES.register() +class ONNXReduceMaxMetatype(ONNXOpMetatype): + name = "ReduceMaxOp" + op_names = ["ReduceMax"] + hw_config_names = [HWConfigOpName.REDUCEMAX] + + +@ONNX_OPERATION_METATYPES.register() +class ONNXReduceSumMetatype(ONNXOpMetatype): + name = "ReduceSumOp" + op_names = ["ReduceSum"] + hw_config_names = [HWConfigOpName.REDUCESUM] + + +class ONNXReduceL2Metatype(ONNXOpMetatype): + name = "ReduceL2Op" + op_names = ["ReduceL2"] + hw_config_names = [HWConfigOpName.REDUCEL2] + + +@ONNX_OPERATION_METATYPES.register() +class ONNXDepthToSpaceMetatype(ONNXOpMetatype): + name = "DepthToSpaceOp" + op_names = ["DepthToSpace"] + + +@ONNX_OPERATION_METATYPES.register() +class ONNXSpaceToDepthMetatype(ONNXOpMetatype): + name = "SpaceToDepthOp" + op_names = ["SpaceToDepth"] + + @ONNX_OPERATION_METATYPES.register() class ONNXReduceMeanMetatype(ONNXOpMetatype): name = "ReduceMeanOp" @@ -452,6 +561,12 @@ class ONNXTransposeMetatype(ONNXOpMetatype): hw_config_names = [HWConfigOpName.TRANSPOSE] +@ONNX_OPERATION_METATYPES.register() +class ONNXDropoutMetatype(ONNXOpMetatype): + name = "DropoutOp" + op_names = ["Dropout"] + + @ONNX_OPERATION_METATYPES.register() class ONNXFlattenMetatype(ONNXOpMetatype): name = "FlattenOp" @@ -495,13 +610,17 @@ class ONNXDeformableConvolutionMetatype(ONNXOpMetatype): op_names = ["DeformConv"] -WEIGHT_LAYER_METATYPES = [ +CONSTANT_WEIGHT_LAYER_METATYPES = [ ONNXConvolutionMetatype, ONNXDepthwiseConvolutionMetatype, ONNXConvolutionTransposeMetatype, - ONNXLinearMetatype, + ONNXEmbeddingMetatype, ] +MATMUL_METATYPES = [ONNXGemmMetatype, ONNXMatMulMetatype] + +GENERAL_WEIGHT_LAYER_METATYPES = CONSTANT_WEIGHT_LAYER_METATYPES + MATMUL_METATYPES + # Contains the operation metatypes for which bias can be applied. OPERATIONS_WITH_BIAS_METATYPES = [ ONNXConvolutionMetatype, @@ -513,11 +632,105 @@ def get_operator_metatypes() -> List[Type[OperatorMetatype]]: """ Returns a list of the operator metatypes. - :return: List of operator metatypes . + :return: List of operator metatypes. """ return list(ONNX_OPERATION_METATYPES.registry_dict.values()) +def get_metatype(model: onnx.ModelProto, node: onnx.NodeProto) -> ONNXOpMetatype: + """ + Returns matched ONNXOpMetatype metatype to a ONNX node. + + :param model: ONNX model. + :param node: Node from ONNX model. + :return: Matched metatype. + """ + metatype = ONNX_OPERATION_METATYPES.get_operator_metatype_by_op_name(node.op_type) + if metatype.get_subtypes(): + subtype = metatype.determine_subtype(model, node) + if subtype is not None: + metatype = subtype + return metatype + + +def get_constant_weight_port_ids(metatype: ONNXOpMetatype) -> List[int]: + """ + Returns port ids on which metatype must have a weight based on Operation definition. + + :param metatype: Metatype. + :return: Port ids. + """ + if metatype in CONSTANT_WEIGHT_LAYER_METATYPES: + return metatype.weight_port_ids + return [] + + +def get_possible_weight_port_ids(metatype: ONNXOpMetatype) -> List[int]: + """ + Returns weight port ids on which metatype could have a weight. + Example: ONNXMatMulMetatype could have activations or weights on input port ids: 0, 1 + + :param metatype: Metatype. + :return: Port ids. + """ + if metatype in MATMUL_METATYPES: + return metatype.possible_weight_ports + return [] + + +def get_bias_tensor_port_id(metatype: ONNXOpWithWeightsMetatype) -> Optional[int]: + """ + Returns input port id, where a bias tensor should output. + + :param node: Node, for which input port id is returned, + :return: Input port id, where a weight bias should output or None if node can not have bias. + """ + if metatype in OPERATIONS_WITH_BIAS_METATYPES: + return metatype.bias_port_id + return None + + +def get_tensor_edge_name(onnx_graph: ONNXGraph, node: onnx.NodeProto, port_id: int) -> Optional[str]: + """ + Returns an edge name associated with a weight of a node laying on an input port_id. + + Checks whether a node has a tensor on input port_id. + If does then it is a weight and returns corresponding edge name. + If not - take a parent node into this port id and does the same check for it. + + If an edge with a weight was not found then returns None. + + METATYPES THAT COULD CONSUME A WEIGHT TENSOR: + ONNXConstantMetatype + ONNXIdentityMetatype + ONNXReshapeMetatype + ONNXTransposeMetatype + ONNXQuantizeLinearMetatype + + :param onnx_graph: ONNXGraph. + :param node: Node. + :param port_id: Port id on which a weight edge is seeking. + :return: Edge name associated with a weight. + """ + PROPAGATING_NODES = ( + ONNXIdentityMetatype.get_all_aliases() + + ONNXTransposeMetatype.get_all_aliases() + + ONNXQuantizeLinearMetatype.get_all_aliases() + + ONNXReshapeMetatype.get_all_aliases() + + ONNXDequantizeLinearMetatype.get_all_aliases() + ) + END_NODES = ONNXConstantMetatype.get_all_aliases() + parent = onnx_graph.get_parent(node, port_id) + if not parent: + if onnx_graph.has_tensor(node.input[port_id]): + return node.input[port_id] + elif parent.op_type in END_NODES: + return node.input[port_id] + elif parent.op_type in PROPAGATING_NODES: + return get_tensor_edge_name(onnx_graph, parent, 0) + return None + + def _is_depthwise_conv(model: onnx.ModelProto, node: onnx.NodeProto) -> bool: """ Returns True if the convolution is depthwise, False - otherwise. @@ -552,3 +765,23 @@ def _is_depthwise_conv(model: onnx.ModelProto, node: onnx.NodeProto) -> bool: ): return True return False + + +def _is_embedding(model: onnx.ModelProto, node: onnx.NodeProto) -> bool: + """ + Returns True if the layer can be represented as embedding, False - otherwise. + + :param model: ONNX model to get the node's weight. + :param node: Layer to check whether it is embedding. + :return: True if the layer is embedding, False - otherwise. + """ + tensor_port_id = ONNXEmbeddingMetatype.weight_port_ids[0] + onnx_graph = ONNXGraph(model) + allowed_types_list = ["TensorProto.FLOAT"] + weight_edge_name = get_tensor_edge_name(onnx_graph, node, tensor_port_id) + + if weight_edge_name is not None: + tensor_data_type = onnx_graph.get_tensor(weight_edge_name).data_type + if onnx.helper.tensor_dtype_to_string(tensor_data_type) in allowed_types_list: + return True + return False diff --git a/nncf/onnx/graph/model_transformer.py b/nncf/onnx/graph/model_transformer.py index 1c281fdab16..a8f5355babf 100644 --- a/nncf/onnx/graph/model_transformer.py +++ b/nncf/onnx/graph/model_transformer.py @@ -357,7 +357,7 @@ def _apply_bias_correction_transformations( node_name = transformation.target_point.target_node_name onnx_node = onnx_graph.get_node_by_name(node_name) bias_initializer_name = onnx_node.input[bias_tensor_position] - bias_initializer = onnx_graph.get_initializer(bias_initializer_name) + bias_initializer = onnx_graph.get_tensor(bias_initializer_name) new_bias_tensor = onnx.numpy_helper.from_array(transformation.bias_value, bias_initializer_name) bias_initializer.CopyFrom(new_bias_tensor) diff --git a/nncf/onnx/graph/model_utils.py b/nncf/onnx/graph/model_utils.py new file mode 100644 index 00000000000..41c9afcfe54 --- /dev/null +++ b/nncf/onnx/graph/model_utils.py @@ -0,0 +1,53 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from collections import deque + +import onnx + +from nncf.common.factory import ModelTransformerFactory +from nncf.common.graph.graph import NNCFGraph +from nncf.common.graph.transformations.commands import TargetType +from nncf.common.graph.transformations.layout import TransformationLayout +from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXDequantizeLinearMetatype +from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXQuantizeLinearMetatype +from nncf.onnx.graph.transformations.commands import ONNXQDQNodeRemovingCommand +from nncf.onnx.graph.transformations.commands import ONNXTargetPoint + + +def remove_fq_from_inputs(model: onnx.ModelProto, nncf_graph: NNCFGraph) -> onnx.ModelProto: + """ + This method removes the activation Quantizer nodes from the model. + It's needed for the further bias shift calculation that relates on quantized weights. + + :param model: onnx.ModelProto instance. + :param nncf_graph: NNCFGraph instance. + :return: onnx.ModelProto instance without activation Quantizer nodes. + """ + transformation_layout = TransformationLayout() + model_transformer = ModelTransformerFactory.create(model) + + seen_nodes = [] + nodes_queue = deque(nncf_graph.get_input_nodes()) + while nodes_queue: + current_node = nodes_queue.popleft() + current_node_name = current_node.node_name + + if current_node_name in seen_nodes: + continue + + seen_nodes.append(current_node_name) + if current_node.metatype in [ONNXQuantizeLinearMetatype, ONNXDequantizeLinearMetatype]: + target_point = ONNXTargetPoint(TargetType.LAYER, current_node_name, 0) + command = ONNXQDQNodeRemovingCommand(target_point) + transformation_layout.register(command) + nodes_queue.extend(nncf_graph.get_next_nodes(current_node)) + + return model_transformer.transform(transformation_layout) diff --git a/nncf/onnx/graph/nncf_graph_builder.py b/nncf/onnx/graph/nncf_graph_builder.py index cb95adddac1..ebf438bdcbc 100644 --- a/nncf/onnx/graph/nncf_graph_builder.py +++ b/nncf/onnx/graph/nncf_graph_builder.py @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. from collections import Counter -from typing import List, Optional, Tuple +from typing import Any, Dict, Optional, Set import onnx @@ -21,11 +21,148 @@ from nncf.common.graph.layer_attributes import Dtype from nncf.common.graph.operator_metatypes import InputNoopMetatype from nncf.common.graph.operator_metatypes import OutputNoopMetatype -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNX_OPERATION_METATYPES -from nncf.onnx.graph.metatypes.onnx_metatypes import WEIGHT_LAYER_METATYPES +from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXGemmMetatype +from nncf.onnx.graph.metatypes.onnx_metatypes import get_bias_tensor_port_id +from nncf.onnx.graph.metatypes.onnx_metatypes import get_constant_weight_port_ids +from nncf.onnx.graph.metatypes.onnx_metatypes import get_metatype +from nncf.onnx.graph.metatypes.onnx_metatypes import get_possible_weight_port_ids +from nncf.onnx.graph.metatypes.onnx_metatypes import get_tensor_edge_name from nncf.onnx.graph.onnx_graph import ONNXGraph +class ONNXLayerAttributes(BaseLayerAttributes): + """ + Every NNCFNode for ONNX backend has a ONNXLayerAttributes. + If node has weight tensor(-s), information for algorithms about weight is stored in weight_attrs. + If node has bias tensor, information for algorithms about bias is stored in bias_attrs. + If node has attibutes needed for algorithms, they are stored in node_attrs. + E.g. 'transA' attirbute of Gemm node for Quantization. + """ + + def __init__( + self, + weight_attrs: Optional[Dict[int, Dict]] = None, + bias_attrs: Optional[Dict[str, Any]] = None, + node_attrs: Optional[Dict[str, Any]] = None, + ): + """ + :param weight_attrs: Maps input port id asocciated with weight to a weight description. + :param bias_attrs: Maps bias tensor name asocciated with weight to a weight description. + :param node_attrs: Maps attribute name to an attribute value. + """ + self.weight_attrs = weight_attrs if weight_attrs is not None else {} + self.bias_attrs = bias_attrs if bias_attrs is not None else {} + self.node_attrs = node_attrs if node_attrs is not None else {} + + def has_weight(self) -> bool: + return bool(self.weight_attrs) + + def has_bias(self) -> bool: + return bool(self.bias_attrs) + + def has_node_attrs(self) -> bool: + return bool(self.node_attrs) + + +def _get_weight_port_ids(node: onnx.NodeProto, onnx_graph: ONNXGraph) -> Set[int]: + """ + Returns all weight input ports. + First, add constant weight port ids from metatype. + Second, add weight port ids determined dynamically if metatype could have them. + + :param node: ONNX node. + :param onnx_graph: ONNXGraph. + :return: Port ids with weights. + """ + port_ids = set() + metatype = get_metatype(onnx_graph.onnx_model, node) + constant_port_ids = get_constant_weight_port_ids(metatype) + port_ids.update(constant_port_ids) + possible_port_ids = get_possible_weight_port_ids(metatype) + for port_id in possible_port_ids: + if get_tensor_edge_name(onnx_graph, node, port_id): + port_ids.add(port_id) + return port_ids + + +def _is_node_with_bias(node: onnx.NodeProto, model: onnx.ModelProto) -> bool: + """ + Returns True if node has bias tensor, otherwise - False. + + :param node: ONNX node. + :param onnx_graph: ONNXGraph. + :return: True if node has bias tensor, otherwise - False. + """ + metatype = get_metatype(model, node) + bias_tensor_port_id = get_bias_tensor_port_id(metatype) + if bias_tensor_port_id is not None and len(node.input) > bias_tensor_port_id: + return True + return False + + +def _get_weight_attr(node: onnx.NodeProto, onnx_graph: ONNXGraph, weight_port_id: int) -> Dict[int, Dict]: + """ + Returns weight attributes. + + :param node: ONNX node. + :param onnx_graph: ONNXGraph. + :param weight_port_ids: Port ids with weights location. + :return: Weight attributes. + """ + weight_attrs = {} + weight_edge_name = node.input[weight_port_id] + edge = onnx_graph.get_edge(weight_edge_name) + weight_shape = ONNXGraph.get_edge_shape(edge) + weight_attrs[weight_port_id] = {"name": weight_edge_name, "shape": weight_shape} + return weight_attrs + + +def _get_gemm_attrs(node: onnx.NodeProto) -> Dict[str, int]: + """ + Returns transpose attrbiutes of GEMM node. + + :param node: GEMM node. + :return: Trnaspose attributes. + """ + gemm_attrs = {"transA": 0, "transB": 0} + attribute_names = ["transA", "transB"] + for attr in node.attribute: + if attr.name in attribute_names: + gemm_attrs[attr.name] = onnx.helper.get_attribute_value(attr) + return gemm_attrs + + +def _get_node_attrs(node: onnx.NodeProto, model: onnx.ModelProto) -> Dict[str, Any]: + """ + Returns node attributes. + + :param node: Node. + :param onnx_graph: ONNXGraph. + :return : Node attributes. + """ + metatype = get_metatype(model, node) + if metatype == ONNXGemmMetatype: + return _get_gemm_attrs(node) + return {} + + +def _get_bias_attr(node: onnx.NodeProto, onnx_graph: ONNXGraph) -> Dict[str, str]: + """ + Returns bias tensor attributes. + + :param node: ONNX node. + :param onnx_graph: ONNXGraph. + :return: Bias tensor attributes. + """ + bias_attrs = {} + metatype = get_metatype(onnx_graph.onnx_model, node) + if _is_node_with_bias(node, onnx_graph.onnx_model): + bias_tensor_port_id = get_bias_tensor_port_id(metatype) + bias_edge_name = get_tensor_edge_name(onnx_graph, node, bias_tensor_port_id) + bias_attrs["name"] = bias_edge_name + return bias_attrs + + class GraphConverter: """ Builds the NNCFGraph from an ONNX model. @@ -66,10 +203,12 @@ def _add_nncf_input_nodes(onnx_graph: ONNXGraph, nncf_graph: NNCFGraph) -> None: """ for i, _input in enumerate(onnx_graph.get_model_inputs()): input_name = _input.name + layer_attributes = ONNXLayerAttributes() input_node = nncf_graph.add_nncf_node( node_name=MODEL_INPUT_OP_NAME + "_" + str(i), node_type=NNCFGraphNodeType.INPUT_NODE, node_metatype=InputNoopMetatype, + layer_attributes=layer_attributes, ) to_nodes = onnx_graph.get_nodes_by_input(input_name) @@ -79,6 +218,7 @@ def _add_nncf_input_nodes(onnx_graph: ONNXGraph, nncf_graph: NNCFGraph) -> None: onnx_dtype = ONNXGraph.get_edge_dtype(edge) nncf_dtype = GraphConverter.convert_onnx_dtype_to_nncf_dtype(onnx_dtype) output_port_id = 0 + for node in to_nodes: to_node_id = nncf_graph.get_node_by_name(node.name).node_id input_port_id = ONNXGraph.get_input_port_id_for_node_after_input(input_name, node) @@ -103,12 +243,14 @@ def _add_nncf_output_nodes(onnx_graph: ONNXGraph, nncf_graph: NNCFGraph) -> None """ for i, _output in enumerate(onnx_graph.get_model_outputs()): output_name = _output.name + layer_attributes = ONNXLayerAttributes() output_node = nncf_graph.add_nncf_node( node_name=MODEL_OUTPUT_OP_NAME + "_" + str(i), node_type=NNCFGraphNodeType.OUTPUT_NODE, node_metatype=OutputNoopMetatype, + layer_attributes=layer_attributes, ) - from_nodes = onnx_graph.get_nodes_by_output(output_name) + from_node = onnx_graph.get_node_by_output(output_name) output_node_node_id = output_node.node_id edge = onnx_graph.get_edge(output_name) @@ -116,18 +258,17 @@ def _add_nncf_output_nodes(onnx_graph: ONNXGraph, nncf_graph: NNCFGraph) -> None onnx_dtype = ONNXGraph.get_edge_dtype(edge) nncf_dtype = GraphConverter.convert_onnx_dtype_to_nncf_dtype(onnx_dtype) input_port_id = 0 - for node in from_nodes: - from_node_id = nncf_graph.get_node_by_name(node.name).node_id - output_port_id = ONNXGraph.get_output_port_id_for_node_before_output(output_name, node) - nncf_graph.add_edge_between_nncf_nodes( - from_node_id=from_node_id, - to_node_id=output_node_node_id, - tensor_shape=output_shape, - input_port_id=input_port_id, - output_port_id=output_port_id, - dtype=nncf_dtype, - ) - input_port_id += 1 + from_node_id = nncf_graph.get_node_by_name(from_node.name).node_id + output_port_id = ONNXGraph.get_output_port_id_for_node_before_output(output_name, from_node) + nncf_graph.add_edge_between_nncf_nodes( + from_node_id=from_node_id, + to_node_id=output_node_node_id, + tensor_shape=output_shape, + input_port_id=input_port_id, + output_port_id=output_port_id, + dtype=nncf_dtype, + ) + input_port_id += 1 @staticmethod def convert_onnx_dtype_to_nncf_dtype(onnx_dtype: int) -> Dtype: @@ -153,26 +294,28 @@ def create_nncf_graph(onnx_model: onnx.ModelProto) -> NNCFGraph: nncf_graph = NNCFGraph() onnx_graph = ONNXGraph(onnx_model) for node in onnx_graph.get_all_nodes(): - metatype = ONNX_OPERATION_METATYPES.get_operator_metatype_by_op_name(node.op_type) - if metatype.get_subtypes(): - subtype = metatype.determine_subtype(onnx_model, node) - if subtype is not None: - metatype = subtype - - if metatype in WEIGHT_LAYER_METATYPES: - is_shared = onnx_graph.is_node_shared(node) - weight_edge_name = onnx_graph.get_weight_tensor_edge(node) - edge = onnx_graph.get_edge(weight_edge_name) - weight_shape = ONNXGraph.get_edge_shape(edge) - layer_attributes = ONNXExtendedLayerAttributes(node.input, node.output, weight_shape) - else: - is_shared, weight_edge_name, layer_attributes = None, None, None + metatype = get_metatype(onnx_model, node) + weight_port_ids = _get_weight_port_ids(node, onnx_graph) + is_shared = None + weight_attrs = {} + node_attrs = _get_node_attrs(node, onnx_model) + bias_attrs = _get_bias_attr(node, onnx_graph) + if weight_port_ids: # If node has weight + weight_edge_names = [] + for weight_port_id in weight_port_ids: + weight_edge_names.append(node.input[weight_port_id]) + weight_attrs.update(_get_weight_attr(node, onnx_graph, weight_port_id)) + if not is_shared and onnx_graph.is_node_has_shared_weight(node, weight_port_id): + is_shared = True + + layer_attributes = ONNXLayerAttributes( + weight_attrs=weight_attrs, bias_attrs=bias_attrs, node_attrs=node_attrs + ) nncf_graph.add_nncf_node( node_name=node.name, node_type=node.op_type, node_metatype=metatype, layer_attributes=layer_attributes, - layer_name=weight_edge_name, is_shared=is_shared, ) for output_node in onnx_graph.get_all_nodes(): @@ -205,21 +348,3 @@ def create_nncf_graph(onnx_model: onnx.ModelProto) -> NNCFGraph: GraphConverter._add_nncf_input_nodes(onnx_graph, nncf_graph) GraphConverter._add_nncf_output_nodes(onnx_graph, nncf_graph) return nncf_graph - - -class ONNXExtendedLayerAttributes(BaseLayerAttributes): - """ - This class stores extended attributes of modules/layers for the algorithms. - """ - - def __init__( - self, input_tensor_names: List[str], output_tensor_names: List[str], weight_shape: Optional[Tuple[int]] = None - ): - """ - :param input_tensor_names: List of the input tensor/edge names of the module/layer. - :param output_tensor_names: List of the output tensor/edge names of the module/layer. - :param weight_shape: Shape of a weight shape of the module/layer. - """ - self.input_tensor_names = input_tensor_names - self.output_tensor_names = output_tensor_names - self.weight_shape = weight_shape diff --git a/nncf/onnx/graph/node_utils.py b/nncf/onnx/graph/node_utils.py index 4f5edead550..5312f2fdded 100644 --- a/nncf/onnx/graph/node_utils.py +++ b/nncf/onnx/graph/node_utils.py @@ -9,18 +9,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, Tuple +from typing import Dict, List, Optional, Tuple import numpy as np import onnx from nncf.common.graph.graph import NNCFGraph from nncf.common.graph.graph import NNCFNode -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNX_OPERATION_METATYPES -from nncf.onnx.graph.metatypes.onnx_metatypes import OPERATIONS_WITH_BIAS_METATYPES -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXIdentityMetatype -from nncf.onnx.graph.nncf_graph_builder import ONNXExtendedLayerAttributes +from nncf.common.graph.transformations.commands import TargetType +from nncf.common.logging.logger import nncf_logger +from nncf.common.tensor_statistics.collectors import ReductionShape +from nncf.onnx.graph.metatypes import onnx_metatypes as om +from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXDequantizeLinearMetatype from nncf.onnx.graph.onnx_graph import ONNXGraph +from nncf.onnx.graph.transformations.commands import ONNXTargetPoint def is_node_with_bias(node: NNCFNode) -> bool: @@ -32,37 +34,26 @@ def is_node_with_bias(node: NNCFNode) -> bool: with bias (bias is added to the output tensor of that operation), `False` otherwise. """ - if node.metatype in OPERATIONS_WITH_BIAS_METATYPES and isinstance( - node.layer_attributes, ONNXExtendedLayerAttributes - ): - return len(node.layer_attributes.input_tensor_names) > 2 - return False + return node.layer_attributes.has_bias() def get_bias_value(node_with_bias: NNCFNode, model: onnx.ModelProto) -> np.ndarray: """ Returns the bias tensor for the biased node. - :param node_with_bias : The node that corresponds to the operation with bias. + :param node_with_bias: The node that corresponds to the operation with bias. :param model: The model that contains this operation. :return: The bias value that is applied to the output tensor of the node's operation. """ onnx_graph = ONNXGraph(model) - onnx_node = onnx_graph.get_node_by_name(node_with_bias.node_name) - bias_port_id = onnx_graph.get_bias_tensor_port_id(onnx_node) - bias_input_name = onnx_node.input[bias_port_id] - if onnx_graph.has_initializer(bias_input_name): - return onnx_graph.get_initializers_value(bias_input_name) - node = onnx_graph.get_nodes_by_output(bias_input_name)[0] - metatype = ONNX_OPERATION_METATYPES.get_operator_metatype_by_op_name(node.op_type) - if metatype == ONNXIdentityMetatype: - return onnx_graph.get_initializers_value(node.input[0]) - raise RuntimeError("Could not find the bias value of the node") + assert node_with_bias.layer_attributes.has_bias() + bias_name = node_with_bias.layer_attributes.bias_attrs["name"] + return onnx_graph.get_tensor_value(bias_name) def get_input_edges_mapping(nncf_graph: NNCFGraph) -> Dict[str, Tuple[str, int]]: """ - Returns mapping between NNNCFGraph input nodes and following by ONNX nodes with corresponding input port ids. + Returns mapping between NNCFGraph input nodes and following by ONNX nodes with corresponding input port ids. :param nncf_graph: instance of NNCFGraph :return: A mapping of NNCF input node names and a tuple with the consumed node names and their input port ids. @@ -93,3 +84,164 @@ def get_input_edge(input_node_name: str, input_edges_mapping: Dict[str, Tuple[st input_edges.add(onnx_graph.get_node_edge_names(name)["input"][port_id]) assert len(input_edges) == 1 return input_edges.pop() + + +def is_any_weight_quantized(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: + """ + Returns True if any weight port id of node is quantized, + False - if all weights are not quantized or the node can not have weight. + + :param node: NNCFNode. + :param nncf_graph: NNCGraph. + :return: True if any weight port id of node is quantized, + False - if all weights are not quantized or the node can not have weight. + """ + is_quantized_weight = False + if node.layer_attributes.has_weight(): + for port_id in node.layer_attributes.weight_attrs.keys(): + is_quantized_weight = is_quantized_weight or is_port_quantized(node, nncf_graph, port_id) + return is_quantized_weight + + +def is_port_quantized(node: NNCFNode, nncf_graph: NNCFGraph, port_id: int) -> bool: + """ + Returns True if a port_id is quantized - have ONNXDequantizeLinearMetatype as a parent node. + + :param node: NNCFNode. + :param nncf_graph: NNCFGraph. + :param port_id: Input port id of a node. + :return: True if a port_id is quantized - have ONNXDequantizeLinearMetatype as a parent node. + """ + input_nodes = [edge.from_node for edge in nncf_graph.get_input_edges(node)] + if len(input_nodes) > port_id: + weight_node = input_nodes[port_id] + return weight_node.metatype == ONNXDequantizeLinearMetatype + return False + + +def transpose_axis(shape: List[int], axis: int) -> int: + """ + Returns transpose axis. + + :param shape: Tensor shape. + :param axis: Axis before transpose. + :return: Axis after transpose. + """ + axis %= len(shape) # Make axis positive + return range(len(shape) - 1, -1, -1)[axis] # Iterate backward throug axis + + +def get_reduction_shape(shape: List[int], axis: int) -> ReductionShape: + """ + Returns reduction shape for shape and axis. + + :param shape: Shape. + :param axis: Axis. + :return: Reduction shape. + """ + reduction_shape = list(range(len(shape))) + if len(reduction_shape) == 1: # If only one channel + return tuple(reduction_shape) + reduction_shape.pop(axis) + return tuple(reduction_shape) + + +def _get_weight_quantization_axis(node: NNCFNode, port_id: int) -> int: + """ + Returns weight tensor axis along quantizer parameters are calculated. + + :param node: NNCFNode, which has a weight on input port_id. + :param port_id: Input port id on which there is a weight of a node. + :return: Axis along quantizer parameters are calculated. + """ + weight_channel_axis = node.metatype.weight_channel_axis + if node.layer_attributes.has_node_attrs(): + if node.metatype == om.ONNXGemmMetatype: + weight_shape = node.layer_attributes.weight_attrs[port_id]["shape"] + if ( + port_id == 0 + and node.layer_attributes.node_attrs["transA"] == 1 + or port_id == 1 + and node.layer_attributes.node_attrs["transB"] == 1 + ): + weight_channel_axis = transpose_axis(weight_shape, weight_channel_axis) + return weight_channel_axis + + +def _get_activation_quantization_axis() -> int: + """ + Returns activation tensor axis along quantizer parameters are calculated. + + :return: Axis along quantizer parameters are calculated. + """ + return 1 # Activations have channel first layout: [N, C, Z, Y, X] + + +def _get_activation_tensor_shape( + nncf_graph: NNCFGraph, node: NNCFNode, target_point: ONNXTargetPoint +) -> Optional[List[int]]: + """ + Returns shape of an activation tensor which is correspond to the target point and node. + ONNX model can not have a shape of a edge, even after shape inference. + Therefore, if there is no info regarding shape, None is returned. + + :param nncf_graph: NNCFGraph. + :param node: NNCFNode. + :param target_point: Determines from input or ouput of a node take a shape info. + :return: None, if there is no shape info, otherwise - tensor shape. + """ + if target_point.type == TargetType.PRE_LAYER_OPERATION: + shape = nncf_graph.get_input_edges(node)[target_point.port_id].tensor_shape + elif target_point.type == TargetType.POST_LAYER_OPERATION: + shape = nncf_graph.get_output_edges(node)[target_point.port_id].tensor_shape + else: + raise NotImplementedError(f"Unsupported target point type {target_point.type}.") + if not shape: # ONNX model can not have a shape of a edge, even after shape inference. + if target_point.type == TargetType.PRE_LAYER_OPERATION: + nncf_logger.info( + f"The shape of input edge of a node {node.node_name} is unkown. \ + Therefore per-tensor quantizaiton is applied." + ) + elif target_point.type == TargetType.POST_LAYER_OPERATION: + nncf_logger.info( + f"The shape of output edge of a node {node.node_name} is unkown. \ + Therefore per-tensor quantizaiton is applied." + ) + nncf_logger.info("Please consider to run pre-processing before quantization.") + # TODO: add preprocessing tool for ONNX model. + return None + return shape + + +def get_quantized_tensor_shape( + nncf_graph: NNCFGraph, node: NNCFNode, target_point: ONNXTargetPoint +) -> Optional[List[int]]: + """ + Returns quantized tensor shape corresponding to a target point with a node if shape - info is existed. + If there is no shape info - returns None. + + :param nncf_graph: NNCFGraph. + :param node: NNCFNode. + :param target_point: Target point indicates the quantizer place in the model graph. + :return: Shape of a quantized tensor, if shape is existed. Otherwise - None. + """ + if target_point.is_weight_target_point(): + return node.layer_attributes.weight_attrs[target_point.port_id]["shape"] + return _get_activation_tensor_shape(nncf_graph, node, target_point) + + +def get_quantization_axis(is_per_channel: bool, node: NNCFNode, target_point: ONNXTargetPoint) -> Optional[int]: + """ + Returns axis of quantizer parameters are calculated along. + If quantization is per-tensor returns None. + + :param is_per_channel: True if quantizater is per-channel. + :param node: NNCFNode. + :param target_point: Target point indicates the quantizer place in the model graph. + :return: None if per-tensor, otherwise quantizion axis. + """ + if not is_per_channel: + return None + if target_point.is_weight_target_point(): + return _get_weight_quantization_axis(node, target_point.port_id) + return _get_activation_quantization_axis() diff --git a/nncf/onnx/graph/onnx_graph.py b/nncf/onnx/graph/onnx_graph.py index 5fee23edeba..df754263c99 100644 --- a/nncf/onnx/graph/onnx_graph.py +++ b/nncf/onnx/graph/onnx_graph.py @@ -9,18 +9,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Callable, Dict, List, Optional, Tuple, Union +from typing import Dict, Iterator, List, Optional, Union import numpy as np import onnx from onnx import numpy_helper -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNX_OPERATION_METATYPES -from nncf.onnx.graph.metatypes.onnx_metatypes import WEIGHT_LAYER_METATYPES -from nncf.onnx.graph.metatypes.onnx_metatypes import OpWeightDef - -# pylint: disable=too-many-public-methods class ONNXGraph: """ The class provides the interface to get the necessary information from ONNX model. @@ -44,6 +39,20 @@ def _update_edges(self) -> None: def _update_node_names(self) -> None: self._node_name_to_node = {n.name: n for n in self.onnx_model.graph.node} + def _get_all_tensors(self) -> Iterator[onnx.TensorProto]: + """ + Iterate over all tensors of ONNX model. + + :yield: tensors of ONNX model. + """ + for initializer in self.onnx_model.graph.initializer: + yield initializer + for node in self.onnx_model.graph.node: + for attribute in node.attribute: + if attribute.HasField("t"): + yield attribute.t + yield from attribute.tensors + def get_all_nodes(self) -> List[onnx.NodeProto]: """ Returns model nodes in the original order. @@ -100,14 +109,17 @@ def get_model_outputs(self) -> List[onnx.ValueInfoProto]: """ return list(self.onnx_model.graph.output) - def get_nodes_by_output(self, output_name: str) -> List[onnx.NodeProto]: + def get_node_by_output(self, output_name: str) -> Optional[onnx.NodeProto]: """ - Returns all nodes that have output edge with the name 'output_name'. + Returns node that have output edge with the name 'output_name'. :param output_name: The name of output edge. - :return: Nodes with corresponding output. + :return: Node with corresponding output. """ - return self._get_nodes_by_lambda(output_name, lambda node: node.output) + for node in self.get_all_nodes(): + if output_name in node.output: + return node + return None def get_nodes_by_input(self, input_name: str) -> List[onnx.NodeProto]: """ @@ -116,14 +128,9 @@ def get_nodes_by_input(self, input_name: str) -> List[onnx.NodeProto]: :param input_name: The name of input edge. :return: Nodes with corresponding input. """ - return self._get_nodes_by_lambda(input_name, lambda node: node.input) - - def _get_nodes_by_lambda( - self, name: str, func: Callable[[onnx.NodeProto], List[onnx.NodeProto]] - ) -> List[onnx.NodeProto]: output = [] for node in self.get_all_nodes(): - if name in func(node): + if input_name in node.input: output.append(node) return output @@ -192,92 +199,6 @@ def get_port_ids_between_nodes(from_node: onnx.NodeProto, to_node: onnx.NodeProt raise RuntimeError(f"The nodes {from_node.name} and {to_node.name} do not have edges between.") return output - def get_nodes_by_type(self, node_type: str) -> List[onnx.NodeProto]: - """ - Returns all nodes in the model that have type equal to 'node_type'. - - :param node_type: Type of the nodes. - :return: All nodes with the corresponding type. - """ - output = [] - for node in self.get_all_nodes(): - if str(node.op_type) == node_type: - output.append(node) - return output - - @staticmethod - def _get_weight_definitions(node: onnx.NodeProto) -> OpWeightDef: - """ - Returns the weight_definitions of the node's metatype. - - :param node: Node from which weight definition is obtained. - :return: weight definition of the node. - """ - metatype = ONNX_OPERATION_METATYPES.get_operator_metatype_by_op_name(node.op_type) - if metatype in WEIGHT_LAYER_METATYPES: - return metatype.weight_definitions - raise RuntimeError(f"The metatype {metatype} does not belong to a list of metatypes with a weight tensor.") - - def get_weight_port_id(self, node: onnx.NodeProto) -> int: - """ - Returns input port id, where a weight tensor should output. - - :param node: Node, for which input port id is returned, - :return: input port id, where a weight tensor should output. - """ - weight_definitions = self._get_weight_definitions(node) - if weight_definitions.weight_port_id is not None: - return weight_definitions.weight_port_id - raise RuntimeError(f"The metatype {node} does not have weight_port_id attribute") - - def get_weight_channel_axis(self, node: onnx.NodeProto) -> int: - """ - Returns a channel axis for weight per-channel quantization. - - :param node: Node, for which weight per-channel axis id is returned, - :return: Channel axis for per-channel quantization. - """ - weight_definitions = self._get_weight_definitions(node) - if weight_definitions.weight_channel_axis is not None: - return weight_definitions.weight_channel_axis - raise RuntimeError(f"The node {node} does not have weight_channel_axis attribute") - - def get_bias_tensor_port_id(self, node: onnx.NodeProto) -> int: - """ - Returns input port id, where a bias tensor should output. - - :param node: Node, for which input port id is returned, - :return: input port id, where a weight bias should output. - """ - weight_definitions = self._get_weight_definitions(node) - if weight_definitions.bias_port_id is not None: - return weight_definitions.bias_port_id - raise RuntimeError(f"The node {node} does not have bias_port_id attribute") - - def _get_weight_tensor_with_reshape(self, node: onnx.NodeProto) -> Tuple[str, np.ndarray]: - """ - Returns node's weight tensor name and its value in the case when reshape node is placed after the weight. - The returned weight tensor will be reshaped according to a shape attribute of the reshape node. - - :param node: Reshape node, whose input is weight tensor. - :return: The weight tensor name and its value with applied the reshape operation. - """ - tensor_name = node.output[0] - shape = self.get_initializers_value(node.input[1]) - tensor_value = self.get_initializers_value(node.input[0]) - reshaped_tensor_value = tensor_value.reshape(shape) - return tensor_name, reshaped_tensor_value - - def _get_tensor_from_zero_input(self, node: onnx.NodeProto) -> Tuple[str, np.ndarray]: - """ - Returns the weight tensor name and its value, which is located on the 0-index input port of the node. - - :param node: Node, which takes on the 0-index input port id the weight tensor. - :return: The weight tensor name and its value. - """ - tensor_name = self.get_initializer(node.input[0]).name - return tensor_name, self.get_initializers_value(tensor_name) - def get_node_index(self, node_name: str) -> int: """ Returns the node index in the model. @@ -290,42 +211,39 @@ def get_node_index(self, node_name: str) -> int: return i return -1 - def get_initializers_value(self, initializer_name: str) -> np.ndarray: + def has_tensor(self, tensor_name: str) -> bool: """ - Returns tensor value of model's Initializer with the name equals to 'initializer_name'. + Returns True whether the model has the tensor with the name equals to tensor_name. - :param initializer_name: Name of the tensor. - :return: The value of the tensor. + :param tensor_name: Name of the tensor. + :return: True if the model has such tensor, False - otherwise. """ - for init in self.onnx_model.graph.initializer: - if init.name == initializer_name: - tensor = numpy_helper.to_array(init) - return tensor - raise RuntimeError("There is no initializer with the name {}".format(initializer_name)) + for tensor in self._get_all_tensors(): + if tensor.name == tensor_name: + return True + return False - def has_initializer(self, initializer_name: str) -> bool: + def get_tensor_value(self, tensor_name: str) -> np.ndarray: """ - Returns True whether the model has the initializer with the name equals to initializer_name. + Returns tensor value of a tensor with the name 'tensor_name'. - :param initializer_name: Name of the initializer. - :return: True if the model has such initializer, False - otherwise. + :param tensor_name: Name of the tensor. + :return: The value of the tensor. """ - for init in self.onnx_model.graph.initializer: - if init.name == initializer_name: - return True - return False + tensor = self.get_tensor(tensor_name) + return numpy_helper.to_array(tensor) - def get_initializer(self, initializer_name: str) -> onnx.TensorProto: + def get_tensor(self, tensor_name: str) -> onnx.TensorProto: """ - Returns model's Initializer with the name equals to 'initializer_name'. + Returns a tensor with the name 'tensor_name'. :param initializer_name: Name of the Initializer. :return: The Initializer. """ - for init in self.onnx_model.graph.initializer: - if init.name == initializer_name: - return init - raise RuntimeError("There is no initializer with the name {}".format(initializer_name)) + for tensor in self._get_all_tensors(): + if tensor.name == tensor_name: + return tensor + raise RuntimeError("There is no tensor with the name {}".format(tensor_name)) @staticmethod def get_edge_shape(edge: Union[onnx.ValueInfoProto, onnx.TensorProto]) -> List[int]: @@ -366,17 +284,17 @@ def get_edge_dtype(edge: Union[onnx.ValueInfoProto, onnx.TensorProto]) -> int: return edge.type.tensor_type.elem_type return edge.data_type - def get_parents(self, node: onnx.NodeProto) -> List[onnx.NodeProto]: + def get_parent(self, node: onnx.NodeProto, port_id: int) -> Optional[onnx.NodeProto]: """ - Returns parents of the node. + Returns parents of the node. If there is no parent node, returns None. :param node: The child node. - :return: All children nodes. + :param port_id: Input port id on which the parent is seeked. + :return: Parent node. """ - output = [] - for inp in node.input: - output.extend(self.get_nodes_by_output(inp)) - return output + if port_id < len(node.input): + return self.get_node_by_output(node.input[port_id]) + return None def get_children(self, node: onnx.NodeProto) -> List[onnx.NodeProto]: """ @@ -391,24 +309,13 @@ def get_children(self, node: onnx.NodeProto) -> List[onnx.NodeProto]: output.extend(self.get_nodes_by_input(node_edge)) return output - def get_weight_tensor_edge(self, node: onnx.NodeProto) -> str: - """ - Returns weight edge name. - - :param node: Node with weight tensor. - :return: Weight edge name. - """ - weight_port_id = self.get_weight_port_id(node) - weight_tensor_edge = self.get_node_edge_names(node.name)["input"][weight_port_id] - return weight_tensor_edge - - def is_node_shared(self, node: onnx.NodeProto) -> bool: + def is_node_has_shared_weight(self, node: onnx.NodeProto, weight_port_id: int) -> bool: """ Returns whether the node share a weight. :param node: Node. :return: True whether node shares a weight - otherwise False. """ - weight_tensor_edge = self.get_weight_tensor_edge(node) + weight_tensor_edge = self.get_node_edge_names(node.name)["input"][weight_port_id] nodes = self.get_nodes_by_input(weight_tensor_edge) return len(nodes) > 1 diff --git a/nncf/onnx/graph/transformations/command_creation.py b/nncf/onnx/graph/transformations/command_creation.py index 3b886094ee2..fa418c5d50e 100644 --- a/nncf/onnx/graph/transformations/command_creation.py +++ b/nncf/onnx/graph/transformations/command_creation.py @@ -25,6 +25,6 @@ def create_bias_correction_command(node: NNCFNode, bias_value: np.ndarray) -> ON :param bias_value: The new bias value that will be set. :return: The `ONNXBiasCorrectionCommand` command to update bias. """ - bias_port_id = node.metatype.weight_definitions.bias_port_id + bias_port_id = node.metatype.bias_port_id target_point = ONNXTargetPoint(TargetType.LAYER, node.node_name, bias_port_id) return ONNXBiasCorrectionCommand(target_point, bias_value) diff --git a/nncf/onnx/graph/transformations/commands.py b/nncf/onnx/graph/transformations/commands.py index d91fa1633e1..92952cec09c 100644 --- a/nncf/onnx/graph/transformations/commands.py +++ b/nncf/onnx/graph/transformations/commands.py @@ -107,7 +107,7 @@ class ONNXModelExtractionCommand(Command): def __init__(self, inputs: List[str], outputs: List[str]): """ - :param inputs: List of the input names that denote the sub-graph beggining. + :param inputs: List of the input names that denote the sub-graph beginning. :param outputs: List of the output names that denote the sub-graph ending. """ super().__init__(TransformationType.EXTRACT) @@ -133,3 +133,19 @@ def __init__(self, target_point: ONNXTargetPoint): def union(self, other: "TransformationCommand") -> "TransformationCommand": # Have a look at nncf/torch/graph/transformations/commands/PTInsertionCommand raise NotImplementedError() + + +class ONNXNullBiasInsertionCommand(TransformationCommand): + """ + Inserts null bias for the corresponding node. + """ + + def __init__(self, target_point: ONNXTargetPoint): + """ + :param target_point: The TargetPoint instance for the insertion that contains layer's information. + """ + super().__init__(TransformationType.INSERT, target_point) + + def union(self, other: "TransformationCommand") -> "TransformationCommand": + # Have a look at nncf/torch/graph/transformations/commands/PTInsertionCommand + raise NotImplementedError() diff --git a/nncf/onnx/hardware/fused_patterns.py b/nncf/onnx/hardware/fused_patterns.py index f13a8a09c17..9572fd42970 100644 --- a/nncf/onnx/hardware/fused_patterns.py +++ b/nncf/onnx/hardware/fused_patterns.py @@ -44,6 +44,26 @@ def create_scale_shift() -> GraphPattern: return pattern +@ONNX_HW_FUSED_PATTERNS.register(HWFusedPatternNames.SHIFT_SCALE) +def create_shift_scale() -> GraphPattern: + pattern = GraphPattern() + add_node = pattern.add_node( + **{ + GraphPattern.LABEL_ATTR: "ADD, SUBTRACT", + GraphPattern.METATYPE_ATTR: [om.ONNXAddLayerMetatype, om.ONNXSubMetatype], + } + ) + mul_node = pattern.add_node( + **{ + GraphPattern.LABEL_ATTR: "MULTIPLY, DIV", + GraphPattern.METATYPE_ATTR: [om.ONNXMulLayerMetatype, om.ONNXDivLayerMetatype], + } + ) + + pattern.add_edge(add_node, mul_node) + return pattern + + @ONNX_HW_FUSED_PATTERNS.register(HWFusedPatternNames.SWISH_WITH_SIGMOID) def create_swish_with_sigmoid() -> GraphPattern: pattern = GraphPattern() @@ -82,71 +102,54 @@ def create_swish_with_hard_sigmoid() -> GraphPattern: return pattern -@ONNX_HW_FUSED_PATTERNS.register(HWFusedPatternNames.MATMUL_SOFTMAX_MATMUL) -def create_matmul_softmax_matmul() -> GraphPattern: +@ONNX_HW_FUSED_PATTERNS.register(HWFusedPatternNames.HSWISH_ACTIVATION_WITHOUT_DENOMINATOR) +def create_hswish_without_denominator() -> GraphPattern: pattern = GraphPattern() - softmax_1 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "SOFTMAX", GraphPattern.METATYPE_ATTR: om.ONNXSoftmaxMetatype} + any_node = pattern.add_node( + **{GraphPattern.LABEL_ATTR: "ANY", GraphPattern.METATYPE_ATTR: GraphPattern.ANY_PATTERN_NODE_TYPE} ) - mat_mul_1_1 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MATMUL_1", GraphPattern.METATYPE_ATTR: om.ONNXLinearMetatype} - ) - mat_mul_2_1 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MATMUL_2", GraphPattern.METATYPE_ATTR: om.ONNXLinearMetatype} + add_node = pattern.add_node(**{GraphPattern.LABEL_ATTR: "ADD", GraphPattern.METATYPE_ATTR: om.ONNXAddLayerMetatype}) + relu_node = pattern.add_node(**{GraphPattern.LABEL_ATTR: "RELU", GraphPattern.METATYPE_ATTR: om.ONNXReluMetatype}) + multiply_node = pattern.add_node( + **{GraphPattern.LABEL_ATTR: "MULTIPLY", GraphPattern.METATYPE_ATTR: om.ONNXMulLayerMetatype} ) - any_1 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "ANY", GraphPattern.METATYPE_ATTR: GraphPattern.NON_PATTERN_NODE_TYPE} - ) + pattern.add_edge(any_node, add_node) + pattern.add_edge(add_node, relu_node) + pattern.add_edge(relu_node, multiply_node) + pattern.add_edge(any_node, multiply_node) + return pattern - pattern.add_edge(mat_mul_1_1, softmax_1) - pattern.add_edge(softmax_1, mat_mul_2_1) - pattern.add_edge(any_1, mat_mul_2_1) - softmax_2 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "SOFTMAX", GraphPattern.METATYPE_ATTR: om.ONNXSoftmaxMetatype} - ) - add_2 = pattern.add_node(**{GraphPattern.LABEL_ATTR: "ADD", GraphPattern.METATYPE_ATTR: om.ONNXAddLayerMetatype}) - mat_mul_1_2 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MATMUL_1", GraphPattern.METATYPE_ATTR: om.ONNXLinearMetatype} - ) - mat_mul_2_2 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MATMUL_2", GraphPattern.METATYPE_ATTR: om.ONNXLinearMetatype} - ) +@ONNX_HW_FUSED_PATTERNS.register(HWFusedPatternNames.HSWISH_ACTIVATION) +def create_hswish() -> GraphPattern: + div_pattern = GraphPattern() + hswish = create_hswish_without_denominator() + div_pattern.add_node(**{GraphPattern.LABEL_ATTR: "DIV", GraphPattern.METATYPE_ATTR: om.ONNXDivLayerMetatype}) + hswish.join_patterns(div_pattern) + return hswish - any_2 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "ANY", GraphPattern.METATYPE_ATTR: GraphPattern.NON_PATTERN_NODE_TYPE} - ) - pattern.add_edge(mat_mul_1_2, add_2) - pattern.add_edge(add_2, softmax_2) - pattern.add_edge(softmax_2, mat_mul_2_2) - pattern.add_edge(any_2, mat_mul_2_2) +# INPUT PROCESSING - return pattern +@ONNX_HW_FUSED_PATTERNS.register(HWFusedPatternNames.INPUT_SCALE_SHIFT) +def create_input_scale_shift() -> GraphPattern: + pattern = GraphPattern() + pattern.add_node(**{GraphPattern.LABEL_ATTR: "MODEL_INPUT", GraphPattern.METATYPE_ATTR: InputNoopMetatype}) + scale_shift = create_scale_shift() -# INPUT PROCESSING + pattern.join_patterns(scale_shift) + return pattern @ONNX_HW_FUSED_PATTERNS.register(HWFusedPatternNames.INPUT_SHIFT_SCALE) def create_input_shift_scale() -> GraphPattern: pattern = GraphPattern() - input_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MODEL_INPUT", GraphPattern.METATYPE_ATTR: InputNoopMetatype} - ) - add_node = pattern.add_node( - **{ - GraphPattern.LABEL_ATTR: "ADD, SUBTRACT", - GraphPattern.METATYPE_ATTR: [om.ONNXAddLayerMetatype, om.ONNXSubMetatype], - } - ) - multiply_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MULTIPLY", GraphPattern.METATYPE_ATTR: om.ONNXMulLayerMetatype} - ) + pattern.add_node(**{GraphPattern.LABEL_ATTR: "MODEL_INPUT", GraphPattern.METATYPE_ATTR: InputNoopMetatype}) + shift_scale = create_shift_scale() - pattern.add_edge(input_node, add_node) - pattern.add_edge(add_node, multiply_node) + pattern.join_patterns(shift_scale) return pattern @@ -167,16 +170,6 @@ def create_input_add() -> GraphPattern: return pattern -@ONNX_HW_FUSED_PATTERNS.register(HWFusedPatternNames.INPUT_SCALE_SHIFT) -def create_input_scale_shift() -> GraphPattern: - pattern = GraphPattern() - pattern.add_node(**{GraphPattern.LABEL_ATTR: "MODEL_INPUT", GraphPattern.METATYPE_ATTR: InputNoopMetatype}) - scale_shift = create_scale_shift() - - pattern.join_patterns(scale_shift) - return pattern - - # COMBINATIONS @@ -357,6 +350,26 @@ def create_bn_scale_shift_activation() -> GraphPattern: return batch_norm +@ONNX_HW_FUSED_PATTERNS.register(HWFusedPatternNames.LINEAR_ARITHMETIC_ACTIVATIONS) +def create_linear_arithmetic_activations() -> GraphPattern: + linear = linear_operations() + arithmetic = arithmetic_operations() + activations = atomic_activations_operations() + + linear.join_patterns(arithmetic) + linear.join_patterns(activations) + return linear + + +@ONNX_HW_FUSED_PATTERNS.register(HWFusedPatternNames.LINEAR_ARITHMETIC_ACTIVATIONS_ARITHMETIC) +def create_linear_arithmetic_activations_arithmetic() -> GraphPattern: + linear_arithmetic_activations = create_linear_arithmetic_activations() + arithmetic = arithmetic_operations() + + linear_arithmetic_activations.join_patterns(arithmetic) + return linear_arithmetic_activations + + # DEVICE PATTERNS @@ -389,6 +402,12 @@ def atomic_activations_operations() -> GraphPattern: swish_hard_sigmoid = create_swish_with_hard_sigmoid() pattern.add_pattern_alternative(swish_hard_sigmoid) + + hswish = create_hswish() + pattern.add_pattern_alternative(hswish) + + hswish_without_denominator = create_hswish_without_denominator() + pattern.add_pattern_alternative(hswish_without_denominator) return pattern diff --git a/nncf/onnx/hardware/pattern_operations.py b/nncf/onnx/hardware/pattern_operations.py index b624dc5f656..39c871a38b6 100644 --- a/nncf/onnx/hardware/pattern_operations.py +++ b/nncf/onnx/hardware/pattern_operations.py @@ -11,6 +11,7 @@ from nncf.common.graph.patterns import GraphPattern from nncf.common.graph.patterns import merge_two_types_of_operations +from nncf.onnx.graph.metatypes.onnx_metatypes import MATMUL_METATYPES from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXAddLayerMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXBatchNormMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXConvolutionMetatype @@ -22,8 +23,6 @@ from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXHardSigmoidMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXHardSwishMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXLeakyReluMetatype -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXLinearMetatype -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXMatMulMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXMulLayerMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXPReluMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXReluMetatype @@ -37,8 +36,7 @@ ONNXDepthwiseConvolutionMetatype, ONNXConvolutionTransposeMetatype, ONNXDeformableConvolutionMetatype, - ONNXLinearMetatype, - ONNXMatMulMetatype, + *MATMUL_METATYPES, ], GraphPattern.LABEL_ATTR: "LINEAR", } diff --git a/nncf/onnx/quantization/default_quantization.py b/nncf/onnx/quantization/default_quantization.py index fbc019de419..0be2240f46f 100644 --- a/nncf/onnx/quantization/default_quantization.py +++ b/nncf/onnx/quantization/default_quantization.py @@ -9,17 +9,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -from nncf.common.graph.operator_metatypes import UnknownMetatype from nncf.common.quantization.quantizer_propagation.structs import QuantizationTrait from nncf.onnx.graph.metatypes import onnx_metatypes +# If a metatype is not in this list, then it is considered to be QuantizationTrait.NON_QUANTIZABLE. + DEFAULT_ONNX_QUANT_TRAIT_TO_OP_DICT = { QuantizationTrait.INPUTS_QUANTIZABLE: [ onnx_metatypes.ONNXConvolutionMetatype, onnx_metatypes.ONNXDepthwiseConvolutionMetatype, onnx_metatypes.ONNXConvolutionTransposeMetatype, - onnx_metatypes.ONNXLinearMetatype, - onnx_metatypes.ONNXMatMulMetatype, + *onnx_metatypes.MATMUL_METATYPES, onnx_metatypes.ONNXAveragePoolMetatype, onnx_metatypes.ONNXGlobalAveragePoolMetatype, onnx_metatypes.ONNXAddLayerMetatype, @@ -30,21 +30,40 @@ onnx_metatypes.ONNXResizeMetatype, onnx_metatypes.ONNXPowMetatype, onnx_metatypes.ONNXReciprocalMetatype, + onnx_metatypes.ONNXMaximumMetatype, + onnx_metatypes.ONNXMinimumMetatype, ], - QuantizationTrait.NON_QUANTIZABLE: [ - onnx_metatypes.ONNXSigmoidMetatype, - onnx_metatypes.ONNXSoftmaxMetatype, - onnx_metatypes.ONNXQuantizeLinearMetatype, - onnx_metatypes.ONNXDequantizeLinearMetatype, - onnx_metatypes.ONNXDeformableConvolutionMetatype, - UnknownMetatype, - # Ticket: 108478 - onnx_metatypes.ONNXReluMetatype, - onnx_metatypes.ONNXExpMetatype, - onnx_metatypes.ONNXLogMetatype, - onnx_metatypes.ONNXAbsMetatype, - onnx_metatypes.ONNXSqrtMetatype, + QuantizationTrait.QUANTIZATION_AGNOSTIC: [ + onnx_metatypes.ONNXMaxPoolMetatype, + onnx_metatypes.ONNXReduceMaxMetatype, + onnx_metatypes.ONNXReshapeMetatype, + onnx_metatypes.ONNXTransposeMetatype, + onnx_metatypes.ONNXSqueezeMetatype, + onnx_metatypes.ONNXUnsqueezeMetatype, + onnx_metatypes.ONNXSplitMetatype, + onnx_metatypes.ONNXTileMetatype, + onnx_metatypes.ONNXCenterCropPadMetatype, + onnx_metatypes.ONNXSliceMetatype, + onnx_metatypes.ONNXPadMetatype, + onnx_metatypes.ONNXGatherMetatype, + onnx_metatypes.ONNXGatherNDMetatype, + onnx_metatypes.ONNXGatherElementsMetatype, + onnx_metatypes.ONNXDepthToSpaceMetatype, + onnx_metatypes.ONNXSpaceToDepthMetatype, + onnx_metatypes.ONNXScatterElementsMetatype, + onnx_metatypes.ONNXScatterNDMetatype, + onnx_metatypes.ONNXScatterMetatype, + onnx_metatypes.ONNXCastLikeMetatype, + onnx_metatypes.ONNXDropoutMetatype, + onnx_metatypes.ONNXFlattenMetatype, + onnx_metatypes.ONNXExpandMetatype, + onnx_metatypes.ONNXIdentityMetatype, + # ONNXReluMetatype is not considered to be QUANTIZATION_AGNOSTIC, because: + # 1. Runtime doesn't provide performance benefits by quantizing the stand-alone RELU's (ticket: 59548) + # 2. It's frequently better for the end accuracy to have quantizers set up after the RELU + # so that the input distribution to the quantizer is non-negative + # and we can therefore have better quantization resolution while preserving the original dynamic range ], - QuantizationTrait.CONCAT: [onnx_metatypes.ONNXConcatLayerMetatype], - QuantizationTrait.OUTPUT_QUANTIZATION_AS_WEIGHTS: [], + QuantizationTrait.CONCAT: [onnx_metatypes.ONNXConcatMetatype], + QuantizationTrait.OUTPUT_QUANTIZATION_AS_WEIGHTS: [onnx_metatypes.ONNXEmbeddingMetatype], } diff --git a/nncf/onnx/quantization/ignored_patterns.py b/nncf/onnx/quantization/ignored_patterns.py index e2d7598fa91..7c0226d590e 100644 --- a/nncf/onnx/quantization/ignored_patterns.py +++ b/nncf/onnx/quantization/ignored_patterns.py @@ -11,48 +11,80 @@ from nncf.common.graph.patterns.patterns import GraphPattern from nncf.common.graph.patterns.patterns import IgnoredPatternNames from nncf.common.utils.registry import Registry -from nncf.onnx.graph.metatypes import onnx_metatypes +from nncf.onnx.graph.metatypes import onnx_metatypes as om +from nncf.onnx.graph.metatypes.onnx_metatypes import MATMUL_METATYPES ONNX_IGNORED_PATTERNS = Registry("IGNORED_PATTERNS") -@ONNX_IGNORED_PATTERNS.register(IgnoredPatternNames.SOFTMAX_MATMUL) -def create_softmax_matmul() -> GraphPattern: - pattern = GraphPattern() +def _add_softmax_matmul(pattern: GraphPattern) -> None: + # SOFTMAX RESHAPE||TRANSPOSE||GATHER||SQUEEZE + # \ / + # \ / + # \ / + # \ / + # \ / + # MATMUL + reshape_transpose_gather_squeeze = [ + om.ONNXReshapeMetatype, + om.ONNXTransposeMetatype, + om.ONNXGatherMetatype, + om.ONNXSqueezeMetatype, + ] softmax = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "SOFTMAX", GraphPattern.METATYPE_ATTR: onnx_metatypes.ONNXSoftmaxMetatype} - ) - matmul = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MATMUL", GraphPattern.METATYPE_ATTR: onnx_metatypes.ONNXLinearMetatype} + **{GraphPattern.LABEL_ATTR: "SOFTMAX", GraphPattern.METATYPE_ATTR: om.ONNXSoftmaxMetatype} ) - non_pattern_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "ANY", GraphPattern.METATYPE_ATTR: GraphPattern.NON_PATTERN_NODE_TYPE} + matmul = pattern.add_node(**{GraphPattern.LABEL_ATTR: "MATMUL", GraphPattern.METATYPE_ATTR: MATMUL_METATYPES}) + matmul_branch_nodes = pattern.add_node( + **{ + GraphPattern.LABEL_ATTR: "RESHAPE||TRANSPOSE||GATHER||SQUEEZE", + GraphPattern.METATYPE_ATTR: reshape_transpose_gather_squeeze, + } ) pattern.add_edge(softmax, matmul) - pattern.add_edge(non_pattern_node, matmul) - return pattern + pattern.add_edge(matmul_branch_nodes, matmul) -@ONNX_IGNORED_PATTERNS.register(IgnoredPatternNames.SOFTMAX_RESHAPE_MATMUL) -def create_softmax_reshape_matmul() -> GraphPattern: - pattern = GraphPattern() +def _add_softmax_reshape_matmul(pattern: GraphPattern) -> None: + # SOFTMAX + # \ + # \ + # \ + # RESHAPE RESHAPE||TRANSPOSE||GATHER||SQUEEZE + # \ / + # \ / + # \ / + # \ / + # \ / + # \ / + # MATMUL + reshape_transpose_gather_squeeze = [ + om.ONNXReshapeMetatype, + om.ONNXTransposeMetatype, + om.ONNXGatherMetatype, + om.ONNXSqueezeMetatype, + ] softmax = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "SOFTMAX", GraphPattern.METATYPE_ATTR: onnx_metatypes.ONNXSoftmaxMetatype} + **{GraphPattern.LABEL_ATTR: "SOFTMAX", GraphPattern.METATYPE_ATTR: om.ONNXSoftmaxMetatype} ) reshape = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "RESHAPE", GraphPattern.METATYPE_ATTR: onnx_metatypes.ONNXReshapeMetatype} - ) - matmul = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MATMUL", GraphPattern.METATYPE_ATTR: onnx_metatypes.ONNXLinearMetatype} + **{GraphPattern.LABEL_ATTR: "RESHAPE", GraphPattern.METATYPE_ATTR: om.ONNXReshapeMetatype} ) - non_pattern_node_1 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "NON_PATTERN_1", GraphPattern.METATYPE_ATTR: GraphPattern.NON_PATTERN_NODE_TYPE} - ) - non_pattern_node_2 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "NON_PATTERN_2", GraphPattern.METATYPE_ATTR: GraphPattern.NON_PATTERN_NODE_TYPE} + matmul = pattern.add_node(**{GraphPattern.LABEL_ATTR: "MATMUL", GraphPattern.METATYPE_ATTR: MATMUL_METATYPES}) + matmul_branch_nodes = pattern.add_node( + **{ + GraphPattern.LABEL_ATTR: "RESHAPE||TRANSPOSE||GATHER||SQUEEZE", + GraphPattern.METATYPE_ATTR: reshape_transpose_gather_squeeze, + } ) pattern.add_edge(softmax, reshape) - pattern.add_edge(non_pattern_node_1, reshape) pattern.add_edge(reshape, matmul) - pattern.add_edge(non_pattern_node_2, matmul) + pattern.add_edge(matmul_branch_nodes, matmul) + + +@ONNX_IGNORED_PATTERNS.register(IgnoredPatternNames.MULTIHEAD_ATTENTION_OUTPUT) +def create_multihead_attention_output() -> GraphPattern: + pattern = GraphPattern() + _add_softmax_matmul(pattern) + _add_softmax_reshape_matmul(pattern) return pattern diff --git a/nncf/onnx/quantization/quantize_model.py b/nncf/onnx/quantization/quantize_model.py index c3bb568c62c..a88a9213f36 100644 --- a/nncf/onnx/quantization/quantize_model.py +++ b/nncf/onnx/quantization/quantize_model.py @@ -16,6 +16,7 @@ from nncf.common.logging.logger import nncf_logger from nncf.common.quantization.structs import QuantizationPreset from nncf.data import Dataset +from nncf.onnx.graph.nncf_graph_builder import GraphConverter from nncf.parameters import ModelType from nncf.parameters import TargetDevice from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters @@ -65,6 +66,7 @@ def quantize_impl( advanced_parameters=advanced_parameters, ) - quantized_model = quantization_algorithm.apply(model, dataset=calibration_dataset) + graph = GraphConverter.create_nncf_graph(model) + quantized_model = quantization_algorithm.apply(model, graph, dataset=calibration_dataset) return quantized_model diff --git a/nncf/onnx/quantization/quantizer_parameters.py b/nncf/onnx/quantization/quantizer_parameters.py index 60cc95bdf6a..71b3d976b50 100644 --- a/nncf/onnx/quantization/quantizer_parameters.py +++ b/nncf/onnx/quantization/quantizer_parameters.py @@ -15,6 +15,7 @@ import numpy as np from nncf.quantization.fake_quantize import FakeQuantizeParameters +from nncf.quantization.fake_quantize import calculate_scale_zero_point @dataclass @@ -75,31 +76,3 @@ def get_level_low_level_high(tensor_type: np.dtype) -> Tuple[int, int]: :return: Minimum level and maximum level of the quantizer. """ return (0, 255) if tensor_type == np.uint8 else (-128, 127) - - -def calculate_scale_zero_point( - input_low: np.ndarray, input_high: np.ndarray, level_low: int, level_high: int, narrow_range: bool -) -> Tuple[np.ndarray, np.ndarray]: - """ - Calculates Quantizer/Dequantizer layer scale level. - Returns scale and zero_point values for the quantizer. - - :param input_low: The minimum limit for an input value based on collected statistics. - :param input_high: The maximum limit for an input value based on collected statistics. - :param level_low: The minimum level in the integer range to quantize. - The default is "0" for an unsigned range, and "-2^(bit-1)" for a signed one . - :param level_high: The maximum level in the integer range to quantize. - The default is "2^bits-1" for an unsigned range, and "2^(bit-1)-1" for a signed one. - :param narrow_range: True if the range of quantized values is narrowed as compared to the - naive case, False otherwise. - :return: Scale and Zero point values. - """ - levels = level_high - level_low if narrow_range else level_high - level_low + 1 - scale = np.array((input_high - input_low) / (levels - 1)) - expected_level_low = level_low + 1 if narrow_range else level_low - zero_point = expected_level_low - np.round(input_low / scale) - zero_point = np.minimum(np.maximum(zero_point.astype(np.int32), level_low), level_high) - scale = np.array(np.squeeze(scale).astype(np.float32)) - zero_point = np.array(np.squeeze(zero_point)) - - return scale, zero_point diff --git a/nncf/onnx/statistics/aggregator.py b/nncf/onnx/statistics/aggregator.py index e48267ac136..e3435382b5d 100644 --- a/nncf/onnx/statistics/aggregator.py +++ b/nncf/onnx/statistics/aggregator.py @@ -14,8 +14,8 @@ import numpy as np import onnx -from nncf.common.factory import NNCFGraphFactory from nncf.common.factory import TModel +from nncf.common.graph.graph import NNCFGraph from nncf.common.graph.transformations.commands import TargetType from nncf.common.graph.transformations.layout import TransformationLayout from nncf.common.tensor_statistics.aggregator import StatisticsAggregator @@ -28,12 +28,11 @@ class ONNXStatisticsAggregator(StatisticsAggregator): - def collect_statistics(self, model: onnx.ModelProto) -> None: - self._nncf_graph = NNCFGraphFactory.create(model) - self.input_edges_mapping = get_input_edges_mapping(self._nncf_graph) + def collect_statistics(self, model: onnx.ModelProto, graph: NNCFGraph) -> None: + self.input_edges_mapping = get_input_edges_mapping(graph) self._onnx_graph = ONNXGraph(model) self._registered_weights = set() - super().collect_statistics(model) + super().collect_statistics(model, graph) def _register_statistics( self, outputs: Dict[str, ONNXNNCFTensor], statistic_points: StatisticPointsContainer @@ -71,7 +70,7 @@ def _get_transformation_layout_extra_outputs( @staticmethod def _get_merged_statistic_points( - statistic_points: StatisticPointsContainer, model: TModel + statistic_points: StatisticPointsContainer, model: TModel, graph: NNCFGraph ) -> StatisticPointsContainer: # TODO: mirgate to experimental statistic collector and use common merging algorithm return statistic_points diff --git a/nncf/onnx/statistics/collectors.py b/nncf/onnx/statistics/collectors.py index 775b1d36c4d..4afb2396a4b 100644 --- a/nncf/onnx/statistics/collectors.py +++ b/nncf/onnx/statistics/collectors.py @@ -15,14 +15,14 @@ from nncf.common.tensor import NNCFTensor from nncf.common.tensor import TensorElementsType -from nncf.common.tensor_statistics.collectors import BatchStatisticCollector from nncf.common.tensor_statistics.collectors import MeanMinMaxStatisticCollector from nncf.common.tensor_statistics.collectors import MeanStatisticCollector from nncf.common.tensor_statistics.collectors import MinMaxStatisticCollector from nncf.common.tensor_statistics.collectors import NNCFCollectorTensorProcessor -from nncf.onnx.statistics.statistics import ONNXBatchTensorStatistic +from nncf.common.tensor_statistics.collectors import RawStatisticCollector from nncf.onnx.statistics.statistics import ONNXMeanTensorStatistic from nncf.onnx.statistics.statistics import ONNXMinMaxTensorStatistic +from nncf.onnx.statistics.statistics import ONNXRawTensorStatistic from nncf.onnx.tensor import ONNXNNCFTensor @@ -166,7 +166,7 @@ def _get_statistics(self) -> ONNXMeanTensorStatistic: return ONNXMeanTensorStatistic(self._mean_aggregate().tensor, self._shape()) -class ONNXBatchStatisticCollector(BatchStatisticCollector): +class ONNXRawStatisticCollector(RawStatisticCollector): @staticmethod def _get_processor() -> NNCFCollectorTensorProcessor: return ONNXNNCFCollectorTensorProcessor() @@ -174,5 +174,5 @@ def _get_processor() -> NNCFCollectorTensorProcessor: def _register_input(self, x: ONNXNNCFTensor): self._register_input_common(x) - def _get_statistics(self) -> ONNXBatchTensorStatistic: - return ONNXBatchTensorStatistic(self._all_values) + def _get_statistics(self) -> ONNXRawTensorStatistic: + return ONNXRawTensorStatistic(self._all_values) diff --git a/nncf/onnx/statistics/statistics.py b/nncf/onnx/statistics/statistics.py index 06ad451d14c..f9d5119201f 100644 --- a/nncf/onnx/statistics/statistics.py +++ b/nncf/onnx/statistics/statistics.py @@ -11,9 +11,9 @@ import numpy as np -from nncf.common.tensor_statistics.statistics import BatchTensorStatistic from nncf.common.tensor_statistics.statistics import MeanTensorStatistic from nncf.common.tensor_statistics.statistics import MinMaxTensorStatistic +from nncf.common.tensor_statistics.statistics import RawTensorStatistic class ONNXMinMaxTensorStatistic(MinMaxTensorStatistic): @@ -28,7 +28,7 @@ def tensor_eq(tensor: np.ndarray, rtol=1e-6) -> bool: return bool(np.all(tensor, rtol=rtol)) -class ONNXBatchTensorStatistic(BatchTensorStatistic): +class ONNXRawTensorStatistic(RawTensorStatistic): @staticmethod def tensor_eq(tensor: np.ndarray, rtol=1e-6) -> bool: return bool(np.all(tensor, rtol=rtol)) diff --git a/nncf/openvino/engine.py b/nncf/openvino/engine.py index 697c0cca9a9..decd31a6364 100644 --- a/nncf/openvino/engine.py +++ b/nncf/openvino/engine.py @@ -18,21 +18,17 @@ from nncf.parameters import TargetDevice -class OVNativeEngine(Engine): +class OVCompiledModelEngine(Engine): """ - Implementation of the engine for OpenVINO backend. + Implementation of the engine to infer OpenVINO compiled model. - OVNativeEngine uses + OVCompiledModelEngine uses [OpenVINO Runtime](https://docs.openvino.ai/latest/openvino_docs_OV_UG_OV_Runtime_User_Guide.html) - to infer the model. + to infer the compiled model. """ - def __init__(self, model: ov.Model, target_device: TargetDevice = TargetDevice.CPU): - if target_device == TargetDevice.ANY: - target_device = TargetDevice.CPU - - ie = ov.Core() - self.compiled_model = ie.compile_model(model, target_device.value) + def __init__(self, model: ov.CompiledModel): + self.compiled_model = model self.input_tensor_names = set() self.number_of_inputs = len(model.inputs) for model_input in model.inputs: @@ -74,3 +70,33 @@ def infer( for tensor_name in tensor.get_names(): output_data[tensor_name] = value return output_data + + +class OVNativeEngine(Engine): + """ + Implementation of the engine for OpenVINO backend. + + OVNativeEngine uses + [OpenVINO Runtime](https://docs.openvino.ai/latest/openvino_docs_OV_UG_OV_Runtime_User_Guide.html) + to infer the model. + """ + + def __init__(self, model: ov.Model, target_device: TargetDevice = TargetDevice.CPU): + if target_device == TargetDevice.ANY: + target_device = TargetDevice.CPU + + ie = ov.Core() + compiled_model = ie.compile_model(model, target_device.value) + self.engine = OVCompiledModelEngine(compiled_model) + + def infer( + self, input_data: Union[np.ndarray, List[np.ndarray], Tuple[np.ndarray], Dict[str, np.ndarray]] + ) -> Dict[str, np.ndarray]: + """ + Runs model on the provided input via OpenVINO Runtime. + Returns the dictionary of model outputs by node names. + + :param input_data: Inputs for the model. + :return output_data: Model's output. + """ + return self.engine.infer(input_data) diff --git a/nncf/openvino/graph/layer_attributes.py b/nncf/openvino/graph/layer_attributes.py new file mode 100644 index 00000000000..588ddd4cd0b --- /dev/null +++ b/nncf/openvino/graph/layer_attributes.py @@ -0,0 +1,129 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any, Dict, List, Optional + +import openvino.runtime as ov + +from nncf.common.graph.layer_attributes import BaseLayerAttributes +from nncf.common.graph.layer_attributes import ConvolutionLayerAttributes +from nncf.common.graph.layer_attributes import GenericWeightedLayerAttributes +from nncf.common.graph.layer_attributes import WeightedLayerAttributes +from nncf.openvino.graph.metatypes.openvino_metatypes import OVConvolutionBackpropDataMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVConvolutionMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVDepthwiseConvolutionMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVGroupConvolutionBackpropDataMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVGroupConvolutionMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVOpMetatype + + +class OVLayerAttributes(BaseLayerAttributes): + """ + This class stores additional information about nodes that needs to be processed during compression. + """ + + def __init__( + self, + constant_attributes: Dict[int, Any], + layer_attributes: Optional[Dict[int, BaseLayerAttributes]] = None, + inputs_attributes: Optional[Dict[Any, Any]] = None, + ): + """ + :param constant_attributes: Map of weights port ID to corresponding const attributes. + :param layer_attributes: Map of weights port ID to corresponding common layer attributes. + :param inputs_attributes: Activation attributes. + """ + self._constant_attributes = constant_attributes + self._layer_attributes = layer_attributes + self._inputs_attributes = inputs_attributes + + @property + def constant_attributes(self) -> Dict[int, Any]: + return self._constant_attributes + + @property + def layer_attributes(self) -> Optional[Dict[int, BaseLayerAttributes]]: + return self._layer_attributes + + @property + def input_attributes(self) -> Optional[Dict[Any, Any]]: + return self._inputs_attributes + + def get_const_port_ids(self) -> List[int]: + """ + Returns indices of input ports corresponding to the constant nodes. + + :returns: List of input port indices with constants. + """ + if self._constant_attributes is not None: + return list(self._constant_attributes.keys()) + return [] + + +def get_weighted_layer_attributes( + ov_node: ov.Node, ov_metatype: OVOpMetatype, constant_attributes: Dict[str, Any] +) -> WeightedLayerAttributes: + """ + Funciton retrieves common layer attributes from the given node. + + :param ov_node: TargetOpenvino graph node instance. + :param ov_metatype: NNCF Openvino metatype of the given node. + :param constant_attributes: Constant attributes collected for the given node. + :return: Weighted layer attributes for the given node. + """ + retval = {} + for port_id, attrs in constant_attributes.items(): + if ov_metatype in [ + OVConvolutionMetatype, + OVDepthwiseConvolutionMetatype, + OVGroupConvolutionMetatype, + OVConvolutionBackpropDataMetatype, + OVGroupConvolutionBackpropDataMetatype, + ]: + node_attrs = ov_node.get_attributes() + kwargs = { + "weight_requires_grad": False, + "stride": tuple(node_attrs["strides"]), + "dilations": node_attrs["dilations"], + "transpose": ov_metatype in [OVConvolutionBackpropDataMetatype, OVGroupConvolutionBackpropDataMetatype], + # TODO: ticket 114378: unify pad attribute + "padding_values": tuple(node_attrs["pads_begin"] + node_attrs["pads_end"]), + } + + const_shape = attrs["shape"] + if ov_metatype in [OVConvolutionMetatype, OVConvolutionBackpropDataMetatype]: + kwargs.update( + { + "in_channels": const_shape[1], + "out_channels": const_shape[0], + "kernel_size": tuple(const_shape[2:]), + "groups": 1, + } + ) + else: + kwargs.update( + { + "in_channels": const_shape[2], + "out_channels": const_shape[1], + "kernel_size": tuple(const_shape[3:]), + "groups": const_shape[0], + } + ) + if kwargs["transpose"]: + kwargs["in_channels"], kwargs["out_channels"] = kwargs["out_channels"], kwargs["in_channels"] + + common_layer_attr = ConvolutionLayerAttributes(**kwargs) + else: + common_layer_attr = GenericWeightedLayerAttributes( + weight_requires_grad=False, weight_shape=attrs.get("shape", None) + ) + retval[port_id] = common_layer_attr + return retval diff --git a/nncf/openvino/graph/metatypes/common.py b/nncf/openvino/graph/metatypes/common.py index ee92e669813..1269b24708a 100644 --- a/nncf/openvino/graph/metatypes/common.py +++ b/nncf/openvino/graph/metatypes/common.py @@ -15,7 +15,6 @@ ov_metatypes.OVMaxPoolMetatype, ov_metatypes.OVReduceMaxMetatype, ov_metatypes.OVReshapeMetatype, - ov_metatypes.OVConcatMetatype, ov_metatypes.OVSqueezeMetatype, ov_metatypes.OVUnsqueezeMetatype, ov_metatypes.OVSplitMetatype, @@ -30,7 +29,20 @@ ov_metatypes.OVMaximumMetatype, ov_metatypes.OVConvertLikeMetatype, ov_metatypes.OVGatherMetatype, + ov_metatypes.OVGatherNDMetatype, + ov_metatypes.OVGatherElementsMetatype, + ov_metatypes.OVScatterUpdateMetatype, + ov_metatypes.OVScatterNDUpdateMetatype, + ov_metatypes.OVScatterElementsUpdateMetatype, ov_metatypes.OVDepthToSpaceMetatype, + ov_metatypes.OVSpaceToDepthMetatype, + ov_metatypes.OVBatchToSpaceMetatype, + ov_metatypes.OVSpaceToBatchMetatype, + # OVReluMetatype is not considered to be QUANTIZATION_AGNOSTIC, because: + # 1. Runtime doesn't provide performance benefits by quantizing the stand-alone RELU's (ticket: 59548) + # 2. It's frequently better for the end accuracy to have quantizers set up after the RELU + # so that the input distribution to the quantizer is non-negative + # and we can therefore have better quantization resolution while preserving the original dynamic range ] diff --git a/nncf/openvino/graph/metatypes/openvino_metatypes.py b/nncf/openvino/graph/metatypes/openvino_metatypes.py index d7a49e2eab5..f3e64e70fb0 100644 --- a/nncf/openvino/graph/metatypes/openvino_metatypes.py +++ b/nncf/openvino/graph/metatypes/openvino_metatypes.py @@ -9,6 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from collections import deque from typing import List, Optional, Type import openvino.runtime as ov @@ -17,6 +18,7 @@ from nncf.common.graph.operator_metatypes import OUTPUT_NOOP_METATYPES from nncf.common.graph.operator_metatypes import OperatorMetatype from nncf.common.graph.operator_metatypes import OperatorMetatypeRegistry +from nncf.common.graph.operator_metatypes import UnknownMetatype from nncf.common.hardware.opset import HWConfigOpName OV_OPERATOR_METATYPES = OperatorMetatypeRegistry("openvino_operator_metatypes") @@ -106,7 +108,9 @@ class OVMatMulMetatype(OVOpMetatype): name = "MatMulOp" op_names = ["MatMul"] hw_config_names = [HWConfigOpName.MATMUL] - const_channel_axis = [1] # const layout: [Y, X] + const_channel_axis = [ + -1 + ] # const layout: [B, ..., Y, X], where const is the second operand of matrix multiplication output_channel_axis = -1 @@ -285,18 +289,36 @@ class OVConvertLikeMetatype(OVOpMetatype): op_names = ["ConvertLike"] +@OV_OPERATOR_METATYPES.register() +class OVSpaceToBatchMetatype(OVOpMetatype): + name = "SpaceToBatchOp" + op_names = ["SpaceToBatch"] + + +@OV_OPERATOR_METATYPES.register() +class OVBatchToSpaceMetatype(OVOpMetatype): + name = "BatchToSpaceOp" + op_names = ["BatchToSpace"] + + @OV_OPERATOR_METATYPES.register() class OVDepthToSpaceMetatype(OVOpMetatype): name = "DepthToSpaceOp" op_names = ["DepthToSpace"] +@OV_OPERATOR_METATYPES.register() +class OVSpaceToDepthMetatype(OVOpMetatype): + name = "SpaceToDepthOp" + op_names = ["SpaceToDepth"] + + @OV_OPERATOR_METATYPES.register() class OVLSTMSequenceMetatype(OVOpMetatype): name = "LSTMSequenceOp" op_names = ["LSTMSequence"] hw_config_names = [HWConfigOpName.LSTMSEQUENCE] - const_channel_axis = [0] # const layout: [num_directions, 4 \* hidden_size, input_size] + const_channel_axis = [1] # const layout: [num_directions, 4 \* hidden_size, input_size] @OV_OPERATOR_METATYPES.register() @@ -304,7 +326,7 @@ class OVGRUSequenceMetatype(OVOpMetatype): name = "GRUSequenceOp" op_names = ["GRUSequence"] hw_config_names = [HWConfigOpName.GRUSEQUENCE] - const_channel_axis = [0] # const layout: [num_directions, 3 \* hidden_size, input_size] + const_channel_axis = [1] # const layout: [num_directions, 3 \* hidden_size, input_size] @OV_OPERATOR_METATYPES.register() @@ -383,6 +405,17 @@ class OVLogicalXorMetatype(OVOpMetatype): hw_config_names = [HWConfigOpName.LOGICALXOR] +@OV_OPERATOR_METATYPES.register() +class OVEmbeddingMetatype(OVOpMetatype): + name = "EmbeddingOp" + hw_config_names = [HWConfigOpName.EMBEDDING] + const_channel_axis = [0] + + @classmethod + def matches(cls, node: ov.Node) -> bool: + return _is_embedding(node) + + @OV_OPERATOR_METATYPES.register() class OVFloorMetatype(OVOpMetatype): name = "FloorOp" @@ -440,6 +473,19 @@ class OVRoiAlignMetatype(OVOpMetatype): class OVGatherMetatype(OVOpMetatype): name = "GatherOp" op_names = ["Gather"] + subtypes = [OVEmbeddingMetatype] + + +@OV_OPERATOR_METATYPES.register() +class OVGatherNDMetatype(OVOpMetatype): + name = "GatherNDOp" + op_names = ["GatherND"] + + +@OV_OPERATOR_METATYPES.register() +class OVGatherElementsMetatype(OVOpMetatype): + name = "GatherElementsOp" + op_names = ["GatherElements"] @OV_OPERATOR_METATYPES.register() @@ -528,6 +574,24 @@ class OVTileMetatype(OVOpMetatype): hw_config_names = [HWConfigOpName.TILE] +@OV_OPERATOR_METATYPES.register() +class OVScatterElementsUpdateMetatype(OVOpMetatype): + name = "ScatterElementsUpdateOp" + op_names = ["ScatterElementsUpdate"] + + +@OV_OPERATOR_METATYPES.register() +class OVScatterNDUpdateMetatype(OVOpMetatype): + name = "ScatterNDUpdateOp" + op_names = ["ScatterNDUpdate"] + + +@OV_OPERATOR_METATYPES.register() +class OVScatterUpdateMetatype(OVOpMetatype): + name = "ScatterUpdateOp" + op_names = ["ScatterUpdate"] + + @OV_OPERATOR_METATYPES.register() class OVSoftmaxMetatype(OVOpMetatype): name = "SoftmaxOp" @@ -579,6 +643,12 @@ class OVSwishMetatype(OVOpMetatype): op_names = ["Swish"] +@OV_OPERATOR_METATYPES.register() +class OVHSwishMetatype(OVOpMetatype): + name = "HSwishhOp" + op_names = ["HSwish"] + + @OV_OPERATOR_METATYPES.register() class OVClampMetatype(OVOpMetatype): name = "ClampOp" @@ -612,6 +682,7 @@ class OVAbsMetatype(OVOpMetatype): OVMatMulMetatype, OVLSTMSequenceMetatype, OVGRUSequenceMetatype, + OVEmbeddingMetatype, ] METATYPES_WITH_CONST_PORT_ID = GENERAL_WEIGHT_LAYER_METATYPES + [OVAddMetatype] @@ -632,6 +703,40 @@ def get_operator_metatypes() -> List[Type[OperatorMetatype]]: return list(OV_OPERATOR_METATYPES.registry_dict.values()) +def get_operation_const_op(operation: ov.Node, const_port_id: int) -> Optional[ov.Node]: + """ + Returns constant node of given operation placed on given const port id. + + :param operation: Given operation. + :param const_port_id: Given constant port id. + :returns: Constant node of given operation placed on given const port id. + """ + node = operation.input_value(const_port_id).get_node() + + # There are several cases here + # (Constant) -> (Operation) + # (Constant) -> (Convert) -> (Operation) + # (Constant) -> (Convert) -> (FakeQuantize) -> (Operation) + # (Constant) -> (Convert) -> (FakeQuantize) -> (Reshape) -> (Operation) + # and etc. We need properly find the constant node. So we start with + # `node` and traverse up until the constant node is not found. + queue = deque([node]) + constant_node = None + allowed_propagation_types_list = ["Convert", "FakeQuantize", "Reshape"] + + while len(queue) != 0: + curr_node = queue.popleft() + if curr_node.get_type_name() == "Constant": + constant_node = curr_node + break + if len(curr_node.inputs()) == 0: + break + if curr_node.get_type_name() in allowed_propagation_types_list: + queue.append(curr_node.input_value(0).get_node()) + + return constant_node + + def _is_depthwise_conv(node: ov.Node) -> bool: """ Returns True if the group convolution is depthwise, False - otherwise. @@ -650,3 +755,38 @@ def _is_depthwise_conv(node: ov.Node) -> bool: inp_channels = inp_channels.get_length() groups = groups.get_length() return groups == inp_channels and inp_channels > 1 + + +def _is_embedding(node: ov.Node) -> bool: + """ + Returns True if the layer can be represented as embedding, False - otherwise. + + :param node: Layer to check whether it is embedding. + :return: True if the layer is embedding, False - otherwise. + """ + allowed_types_list = ["f16", "f32", "f64"] + const_port_id = 0 + input_tensor = node.input_value(const_port_id) + if input_tensor.get_element_type().get_type_name() in allowed_types_list: + const_node = get_operation_const_op(node, const_port_id) + if const_node is not None: + return True + + return False + + +def get_node_metatype(node: ov.Node) -> Type[OperatorMetatype]: + """ + Determine NNCF meta type for OpenVINO node. + + :param node: OpenVINO node. + :return: NNCF meta type which corresponds to OpenVINO node. + """ + node_type = node.get_type_name() + metatype = OV_OPERATOR_METATYPES.get_operator_metatype_by_op_name(node_type) + if metatype is not UnknownMetatype: + if metatype.get_subtypes(): + subtype = metatype.determine_subtype(node) + if subtype is not None: + metatype = subtype + return metatype diff --git a/nncf/openvino/graph/model_transformer.py b/nncf/openvino/graph/model_transformer.py index 653d33952c1..2e0a3c2d3cd 100644 --- a/nncf/openvino/graph/model_transformer.py +++ b/nncf/openvino/graph/model_transformer.py @@ -27,6 +27,8 @@ from nncf.openvino.graph.transformations.commands import OVFQNodeRemovingCommand from nncf.openvino.graph.transformations.commands import OVInplaceFnInsertionCommand from nncf.openvino.graph.transformations.commands import OVModelExtractionCommand +from nncf.openvino.graph.transformations.commands import OVMultiplyInsertionCommand +from nncf.openvino.graph.transformations.commands import OVNullBiasInsertionCommand from nncf.openvino.graph.transformations.commands import OVOutputInsertionCommand from nncf.openvino.graph.transformations.commands import OVQuantizerInsertionCommand from nncf.openvino.graph.transformations.commands import OVWeightUpdateCommand @@ -48,6 +50,8 @@ def __init__(self, model: TModel): (OVModelExtractionCommand, self._apply_model_extraction_transformation), (OVInplaceFnInsertionCommand, self._apply_insert_operation), (OVOutputInsertionCommand, self._apply_output_insertion_transformations), + (OVNullBiasInsertionCommand, self._apply_bias_insertion_transformations), + (OVMultiplyInsertionCommand, self._apply_multiply_insertion_transformations), ] @staticmethod @@ -60,6 +64,25 @@ def _get_name_to_node_mapping(model: ov.Model) -> Dict[str, ov.Node]: """ return {op.get_friendly_name(): op for op in model.get_ops()} + @staticmethod + def _get_activation_node_names(model: ov.Model) -> List[str]: + """ + Returns list of the activation node names. + + :param model: Model to get list. + :return: List with the activation names. + """ + activation_nodes = set() + nodes_queue = deque(model.get_parameters()) + while nodes_queue: + node = nodes_queue.popleft() + if node.name in activation_nodes: + continue + activation_nodes.add(node.name) + for node_output in node.outputs(): + nodes_queue.extend([i.get_node() for i in node_output.get_target_inputs()]) + return list(activation_nodes) + @staticmethod def _update_tensor_name(tensors: List[DescriptorTensor], name: str) -> None: """ @@ -362,22 +385,27 @@ def _apply_model_extraction_transformation( """ transformation = transformations[-1] name_to_node_mapping = OVModelTransformer._get_name_to_node_mapping(model) + activation_node_names = OVModelTransformer._get_activation_node_names(model) params, results = [], [] for input_name in transformation.inputs: input_node = name_to_node_mapping[input_name] if input_name in [tensor.node.get_friendly_name() for tensor in model.inputs]: params.append(input_node) continue - input_port = input_node.input(0) - input_node_output = input_port.get_source_output() - parameter_name = f"Parameter_{input_name}" - new_param = opset.parameter( - shape=input_node_output.partial_shape, dtype=input_node_output.get_element_type(), name=parameter_name - ) - input_port.replace_source_output(new_param.output(0)) - new_param_tensors = [o.get_tensor() for o in new_param.outputs()] - OVModelTransformer._update_tensor_name(new_param_tensors, parameter_name) - params.append(new_param) + for input_port in input_node.inputs(): + if input_port.get_source_output().get_node().name not in activation_node_names: + continue + input_node_output = input_port.get_source_output() + parameter_name = f"Parameter_{input_name}" + new_param = opset.parameter( + shape=input_node_output.partial_shape, + dtype=input_node_output.get_element_type(), + name=parameter_name, + ) + input_port.replace_source_output(new_param.output(0)) + new_param_tensors = [o.get_tensor() for o in new_param.outputs()] + OVModelTransformer._update_tensor_name(new_param_tensors, parameter_name) + params.append(new_param) for output_name in transformation.outputs: output_node = name_to_node_mapping[output_name] @@ -431,3 +459,74 @@ def _insert_inplace_operation( new_node = transformation.inplace_op_fn(output.get_node(), output.get_index()) return (new_node.output(fn_output_port_id), fn_output_port_id) raise RuntimeError(f"Transform type {transform_type} is not supported") + + @staticmethod + def _apply_bias_insertion_transformations( + model: ov.Model, transformations: List[OVNullBiasInsertionCommand] + ) -> ov.Model: + """ + Inserts null bias operation after corresponding layer. + + :param transformations: List of the bias insertion transformations. + :returns: Transformed model with null biases. + """ + name_to_node_mapping = OVModelTransformer._get_name_to_node_mapping(model) + for transformation in transformations: + node_name = transformation.target_point.target_node_name + node = name_to_node_mapping[node_name] + # Since layers that may have biases mostly are Convolution or MatMul variations, + # we may use only 0 output port. + node_shape = node.output(0).partial_shape.get_max_shape() + node_output_port = node.output(transformation.target_point.port_id) + node_output_source_ports = node_output_port.get_target_inputs() + + bias_shape = [1] * len(node_shape) + bias_shape[1] = node_shape[1] + const_value = np.zeros(bias_shape, dtype=node.get_element_type().to_dtype()) + bias_const_node = opset.constant(const_value, dtype=node.get_element_type()) + bias_const_output_port = bias_const_node.output(0) + + add_node = opset.add(node_output_port, bias_const_output_port, name=f"{node_name}/nncf_null_bias_") + + for node_output_source_port in node_output_source_ports: + node_output_source_port.replace_source_output(add_node.output(0)) + + return model + + @staticmethod + def _apply_multiply_insertion_transformations( + model: ov.Model, transformations: List[OVMultiplyInsertionCommand] + ) -> ov.Model: + """ + Inserts Multiply with provided value for corresponding layer. + + :param transformations: List of the smooth insertion transformations. + :returns: Transformed model with Multiply nodes. + """ + name_to_node_mapping = OVModelTransformer._get_name_to_node_mapping(model) + + for transformation in transformations: + node_name = transformation.target_point.target_node_name + node = name_to_node_mapping[node_name] + output_port_id = transformation.target_point.port_id + node_output_port = node.output(output_port_id) + + destination_ports = [] + + for target_input_port in node_output_port.get_target_inputs(): + target_node = target_input_port.get_node() + if target_node.get_friendly_name() in transformation.destination_node_names: + destination_ports.append(target_input_port) + + scale_dtype = ov.Type(np.float32) + fp16_dtype = ov.Type(np.float16) + if all(p.get_element_type() == fp16_dtype for p in destination_ports): + scale_dtype = fp16_dtype + + scale_constant = opset.constant(transformation.scale_value, dtype=scale_dtype) + multiply_node = opset.multiply(node_output_port, scale_constant, name=transformation.multiply_node_name) + + for destination_port in destination_ports: + destination_port.replace_source_output(multiply_node.output(0)) + + return model diff --git a/nncf/openvino/graph/model_utils.py b/nncf/openvino/graph/model_utils.py new file mode 100644 index 00000000000..803b72aa811 --- /dev/null +++ b/nncf/openvino/graph/model_utils.py @@ -0,0 +1,80 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from collections import deque + +import openvino.runtime as ov + +from nncf.common.factory import ModelTransformerFactory +from nncf.common.graph.graph import NNCFGraph +from nncf.common.graph.transformations.layout import TransformationLayout +from nncf.openvino.graph.metatypes.common import FAKE_QUANTIZE_OPERATIONS +from nncf.openvino.graph.metatypes.openvino_metatypes import OVConvolutionBackpropDataMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVConvolutionMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVDepthwiseConvolutionMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVGroupConvolutionBackpropDataMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVGroupConvolutionMetatype +from nncf.openvino.graph.node_utils import is_node_with_bias +from nncf.openvino.graph.transformations.command_creation import OVCommandCreator + + +def insert_null_biases(model: ov.Model, graph: NNCFGraph) -> ov.Model: + """ + This method finds and inserts zero biases for the layers that should have it. + + :param model: ov.Model instance. + :param graph: Model graph. + :return: Updated ov.Model instance with zero biases + """ + types_to_insert_bias = [ + OVConvolutionMetatype, + OVGroupConvolutionMetatype, + OVDepthwiseConvolutionMetatype, + OVConvolutionBackpropDataMetatype, + OVGroupConvolutionBackpropDataMetatype, + ] + nodes_without_biases = graph.get_nodes_by_metatypes(types_to_insert_bias) + nodes_without_biases = [node for node in nodes_without_biases if not is_node_with_bias(node, graph)] + transformation_layout = TransformationLayout() + model_transformer = ModelTransformerFactory.create(model) + for node_without_bias in nodes_without_biases: + bias_insertion_command = OVCommandCreator.create_command_to_insert_bias(node_without_bias) + transformation_layout.register(bias_insertion_command) + return model_transformer.transform(transformation_layout) + + +def remove_fq_from_inputs(model: ov.Model, graph: NNCFGraph) -> ov.Model: + """ + This method removes the activation Fake Quantize nodes from the model. + It's needed for the further bias shift calculation that relates on quantized weights. + + :param model: ov.Model instance. + :param graph: NNCFGraph instance. + :return: ov.Model instance without activation Fake Quantize nodes. + """ + transformation_layout = TransformationLayout() + model_transformer = ModelTransformerFactory.create(model) + + seen_nodes = [] + nodes_queue = deque(graph.get_input_nodes()) + while nodes_queue: + current_node = nodes_queue.popleft() + current_node_name = current_node.node_name + + if current_node_name in seen_nodes: + continue + + seen_nodes.append(current_node_name) + if current_node.metatype in FAKE_QUANTIZE_OPERATIONS: + command = OVCommandCreator.create_command_to_remove_quantizer(current_node) + transformation_layout.register(command) + nodes_queue.extend(graph.get_next_nodes(current_node)) + + return model_transformer.transform(transformation_layout) diff --git a/nncf/openvino/graph/nncf_graph_builder.py b/nncf/openvino/graph/nncf_graph_builder.py index 8cd3489e3c6..d4dda69c4a0 100644 --- a/nncf/openvino/graph/nncf_graph_builder.py +++ b/nncf/openvino/graph/nncf_graph_builder.py @@ -9,24 +9,23 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import deque -from typing import Dict, List, Type +from typing import List, Type import openvino.runtime as ov -from nncf.common.graph import BaseLayerAttributes from nncf.common.graph import NNCFGraph from nncf.common.graph.layer_attributes import Dtype from nncf.common.graph.operator_metatypes import OperatorMetatype -from nncf.common.graph.operator_metatypes import UnknownMetatype +from nncf.openvino.graph.layer_attributes import OVLayerAttributes +from nncf.openvino.graph.layer_attributes import get_weighted_layer_attributes from nncf.openvino.graph.metatypes.openvino_metatypes import METATYPES_WITH_CONST_PORT_ID -from nncf.openvino.graph.metatypes.openvino_metatypes import OV_OPERATOR_METATYPES -from nncf.openvino.graph.metatypes.openvino_metatypes import OVConstantMetatype from nncf.openvino.graph.metatypes.openvino_metatypes import OVConvolutionBackpropDataMetatype from nncf.openvino.graph.metatypes.openvino_metatypes import OVGroupConvolutionBackpropDataMetatype from nncf.openvino.graph.metatypes.openvino_metatypes import OVGRUSequenceMetatype from nncf.openvino.graph.metatypes.openvino_metatypes import OVLSTMSequenceMetatype from nncf.openvino.graph.metatypes.openvino_metatypes import OVMatMulMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import get_node_metatype +from nncf.openvino.graph.metatypes.openvino_metatypes import get_operation_const_op class GraphConverter: @@ -78,23 +77,6 @@ def _filter_weight_input_ports(inputs: List[ov.Input], metatype: Type[OperatorMe return inputs[:6] return inputs - @staticmethod - def _get_node_metatype(node: ov.Node) -> Type[OperatorMetatype]: - """ - Determine NNCF meta type for OpenVINO node. - - :param node: OpenVINO node. - :return: NNCF meta type which corresponds to OpenVINO node. - """ - node_type = node.get_type_name() - metatype = OV_OPERATOR_METATYPES.get_operator_metatype_by_op_name(node_type) - if metatype is not UnknownMetatype: - if metatype.get_subtypes(): - subtype = metatype.determine_subtype(node) - if subtype is not None: - metatype = subtype - return metatype - @staticmethod def _add_edges_to_nncf_graph(model: ov.Model, graph: NNCFGraph) -> None: """ @@ -130,9 +112,10 @@ def _add_nncf_node(node: ov.Node, graph: NNCFGraph) -> None: :param graph: NNCFGraph. """ node_type = node.get_type_name() - metatype = GraphConverter._get_node_metatype(node) + metatype = get_node_metatype(node) graph.add_nncf_node(node_name=node.get_friendly_name(), node_type=node_type, node_metatype=metatype) + # pylint: disable=too-many-branches @staticmethod def create_nncf_graph(model: ov.Model) -> NNCFGraph: """ @@ -159,14 +142,14 @@ def create_nncf_graph(model: ov.Model) -> NNCFGraph: inference_nodes.append(inp.get_node()) for node in model.get_ops(): - metatype = GraphConverter._get_node_metatype(node) + metatype = get_node_metatype(node) # Add nodes from constant subgraphs node_name = node.get_friendly_name() if node_name not in visited: GraphConverter._add_nncf_node(node, nncf_graph) # Set const port id elif metatype in METATYPES_WITH_CONST_PORT_ID: - const_attrs = {} + const_attrs, act_attrs = {}, {} for inp in GraphConverter._filter_weight_input_ports(node.inputs(), metatype): inp_name = inp.get_source_output().get_node().get_friendly_name() if inp_name in visited: @@ -174,6 +157,9 @@ def create_nncf_graph(model: ov.Model) -> NNCFGraph: const_port_id = inp.get_index() const_node = get_operation_const_op(node, const_port_id) + if const_node is None: + continue + ov_dtype = const_node.get_element_type().get_type_name() if GraphConverter.convert_to_nncf_dtype(ov_dtype) == Dtype.INTEGER: continue @@ -184,69 +170,20 @@ def create_nncf_graph(model: ov.Model) -> NNCFGraph: } if metatype == OVMatMulMetatype: + act_port_id = abs(const_port_id - 1) attribute_names = ["transpose_a", "transpose_b"] node_attributes = node.get_attributes() - transpose = node_attributes[attribute_names[const_port_id]] - const_attrs[const_port_id]["transpose"] = transpose + const_transpose_name = attribute_names[const_port_id] + const_attrs[const_port_id]["transpose"] = node_attributes[const_transpose_name] + act_attrs["transpose"] = node_attributes[attribute_names[act_port_id]] + elif metatype == OVGRUSequenceMetatype: + node_attributes = node.get_attributes() + act_attrs["linear_before_reset"] = node_attributes["linear_before_reset"] - if const_attrs: - nncf_node = nncf_graph.get_node_by_name(node_name) - nncf_node.layer_attributes = OVConstantLayerAttributes(const_attrs) + if const_attrs or act_attrs: + nncf_node = nncf_graph.get_node_by_name(node_name) + layer_attributes = get_weighted_layer_attributes(node, metatype, const_attrs) + nncf_node.layer_attributes = OVLayerAttributes(const_attrs, layer_attributes, act_attrs) GraphConverter._add_edges_to_nncf_graph(model, nncf_graph) return nncf_graph - - -class OVConstantLayerAttributes(BaseLayerAttributes): - """ - This class stores mapping weights port indices to constant name and shape. - """ - - def __init__(self, const_attrs: Dict[int, Dict]): - """ - :param const_attrs: Map of weights port ID to corresponding const attributes. - """ - self.const_attrs = const_attrs - - def get_const_port_ids(self) -> List[int]: - """ - Returns indices of input ports corresponding to the constant nodes. - - :returns: List of input port indices with constants. - """ - return list(self.const_attrs.keys()) - - -def get_operation_const_op(operation: ov.Node, const_port_id: int) -> ov.Node: - """ - Returns constant node of given operation placed on given const port id. - - :param operation: Given operation. - :param const_port_id: Given constant port id. - :returns: Constant node of given operation placed on given const port id. - """ - node = operation.input_value(const_port_id).get_node() - - # There are several cases here - # (Constant) -> (Operation) - # (Constant) -> (Convert) -> (Operation) - # (Constant) -> (Convert) -> (FakeQuantize) -> (Operation) - # (Constant) -> (Convert) -> (FakeQuantize) -> (Reshape) -> (Operation) - # and etc. We need properly find the constant node. So we start with - # `node` and traverse up until the constant node is not found. - queue = deque([node]) - constant_node = None - - while len(queue) != 0: - curr_node = queue.popleft() - if OV_OPERATOR_METATYPES.get_operator_metatype_by_op_name(curr_node.get_type_name()) == OVConstantMetatype: - constant_node = curr_node - break - if len(curr_node.inputs()) == 0: - break - queue.append(curr_node.input_value(0).get_node()) - - if constant_node is None: - raise RuntimeError("Constant node was expected but could not find it.") - - return constant_node diff --git a/nncf/openvino/graph/node_utils.py b/nncf/openvino/graph/node_utils.py index 751e94d5710..0679d962e39 100644 --- a/nncf/openvino/graph/node_utils.py +++ b/nncf/openvino/graph/node_utils.py @@ -17,13 +17,13 @@ from nncf.common.graph.graph import NNCFGraph from nncf.common.graph.graph import NNCFNode +from nncf.openvino.graph.layer_attributes import OVLayerAttributes from nncf.openvino.graph.metatypes.openvino_metatypes import GENERAL_WEIGHT_LAYER_METATYPES from nncf.openvino.graph.metatypes.openvino_metatypes import OPERATIONS_WITH_BIAS_METATYPES from nncf.openvino.graph.metatypes.openvino_metatypes import OVAddMetatype from nncf.openvino.graph.metatypes.openvino_metatypes import OVConstantMetatype from nncf.openvino.graph.metatypes.openvino_metatypes import OVConvertMetatype from nncf.openvino.graph.metatypes.openvino_metatypes import OVMatMulMetatype -from nncf.openvino.graph.nncf_graph_builder import OVConstantLayerAttributes InplaceInsertionFnType = Callable[[ov.Node, int], ov.Node] @@ -86,7 +86,7 @@ def get_weight_value(node_with_weight: NNCFNode, model: ov.Model, port_id: int) :param port_id: The input port ID to get weight input. :return: The weight value. """ - const_op_friendly_name = node_with_weight.layer_attributes.const_attrs[port_id]["name"] + const_op_friendly_name = node_with_weight.layer_attributes.constant_attributes[port_id]["name"] friendly_name_to_op_map = {op.get_friendly_name(): op for op in model.get_ops()} const_op = friendly_name_to_op_map[const_op_friendly_name] weight_tensor = get_const_value(const_op) @@ -171,7 +171,7 @@ def get_reduce_op(node: ov.Node, output_port_id: int) -> ov.Node: return op( op_input.output(output_port_id), - reduction_axes=reduction_axes_, + reduction_axes=np.array(reduction_axes_, dtype=np.int64), keep_dims=True, name=get_ov_model_reduce_node_name(output_name, reduce_node_name, name_output_port_id), ) @@ -321,13 +321,46 @@ def get_weight_channel_axes(node: NNCFNode, weights_port_id: int) -> List[int]: if node.metatype not in GENERAL_WEIGHT_LAYER_METATYPES: raise ValueError("Channel axis cannot be defined for operation without weights.") - channel_axis = node.metatype.const_channel_axis + channel_axes = node.metatype.const_channel_axis if node.metatype == OVMatMulMetatype: - assert isinstance(node.layer_attributes, OVConstantLayerAttributes) - const_attrs = node.layer_attributes.const_attrs[weights_port_id] - if const_attrs["transpose"]: - assert len(channel_axis) == 1 - assert channel_axis[0] in [0, 1] - channel_axis = [1 - channel_axis[0]] - - return channel_axis + assert isinstance(node.layer_attributes, OVLayerAttributes) + assert len(channel_axes) == 1 + const_attrs = node.layer_attributes.constant_attributes[weights_port_id] + transpose = const_attrs["transpose"] + ndims = len(const_attrs["shape"]) + channel_axes = get_matmul_channel_axes(weights_port_id, ndims, transpose) + + return channel_axes + + +def get_matmul_channel_axes(weights_port_id: int, ndims: int, transpose: bool) -> List[int]: + """ + Calculate channel axes for the MatMul operation. + + :param weights_port_id: Weight port id of the target node. + :param ndims: The number of MatMul dimensions. + :param transpose: Whether the transpose is applied to weights. + :return: List of channel axes for the MatMul operation. + """ + matmul_channel_axis = OVMatMulMetatype.const_channel_axis[0] + if (weights_port_id == 1) == transpose: + matmul_channel_axis -= 1 + matmul_channel_axis = max(ndims, 2) + matmul_channel_axis + channel_axes = list(range(ndims - 2)) + if matmul_channel_axis < ndims: + channel_axes.append(matmul_channel_axis) + return channel_axes + + +def get_channel_agnostic_reduction_shape(channel_axes: List[int], shape: List[int]) -> Tuple[int]: + """ + Returns filtered reduction shape without axes that corresponds channels. + + :param channel_axes: List of the channel axes. + :param shape: Shape that need to be filtered. + :return: Reduction shape in tuple format. + """ + reduction_shape = list(range(len(shape))) + for channel_axis in sorted(channel_axes, reverse=True): + del reduction_shape[channel_axis] + return tuple(reduction_shape) diff --git a/nncf/openvino/graph/transformations/command_creation.py b/nncf/openvino/graph/transformations/command_creation.py index 7dc80464873..8195cd11408 100644 --- a/nncf/openvino/graph/transformations/command_creation.py +++ b/nncf/openvino/graph/transformations/command_creation.py @@ -9,6 +9,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import List + import numpy as np from nncf.common.graph.graph import NNCFGraph @@ -17,6 +19,8 @@ from nncf.common.graph.transformations.commands import TargetType from nncf.openvino.graph.transformations.commands import OVBiasCorrectionCommand from nncf.openvino.graph.transformations.commands import OVFQNodeRemovingCommand +from nncf.openvino.graph.transformations.commands import OVMultiplyInsertionCommand +from nncf.openvino.graph.transformations.commands import OVNullBiasInsertionCommand from nncf.openvino.graph.transformations.commands import OVTargetPoint from nncf.openvino.graph.transformations.commands import OVWeightUpdateCommand @@ -48,3 +52,20 @@ def create_command_to_update_weight( ) -> OVWeightUpdateCommand: target_point = OVTargetPoint(TargetType.LAYER, node_with_weight.node_name, weight_port_id) return OVWeightUpdateCommand(target_point, weight_value) + + @staticmethod + def create_command_to_insert_bias(node_without_bias: NNCFNode) -> OVNullBiasInsertionCommand: + target_point = OVTargetPoint(TargetType.POST_LAYER_OPERATION, node_without_bias.node_name, 0) + return OVNullBiasInsertionCommand(target_point) + + @staticmethod + def multiply_insertion_command( + source_node: NNCFNode, + destination_nodes: List[NNCFNode], + source_out_port: int, + scale_value: np.ndarray, + multiply_node_name: str, + ) -> OVMultiplyInsertionCommand: + target_point = OVTargetPoint(TargetType.POST_LAYER_OPERATION, source_node.node_name, source_out_port) + destination_node_names = [d.node_name for d in destination_nodes] + return OVMultiplyInsertionCommand(target_point, scale_value, destination_node_names, multiply_node_name) diff --git a/nncf/openvino/graph/transformations/commands.py b/nncf/openvino/graph/transformations/commands.py index 572f675b596..89ad7f147b8 100644 --- a/nncf/openvino/graph/transformations/commands.py +++ b/nncf/openvino/graph/transformations/commands.py @@ -135,7 +135,7 @@ class OVModelExtractionCommand(Command): def __init__(self, inputs: List[str], outputs: List[str]): """ - :param inputs: List of the input names that denote the sub-graph beggining. + :param inputs: List of the input names that denote the sub-graph beginning. :param outputs: List of the output names that denote the sub-graph ending. """ super().__init__(TransformationType.EXTRACT) @@ -145,3 +145,47 @@ def __init__(self, inputs: List[str], outputs: List[str]): def union(self, other: "Command") -> "Command": # Have a look at nncf/torch/graph/transformations/commands/PTInsertionCommand raise NotImplementedError() + + +class OVNullBiasInsertionCommand(TransformationCommand): + """ + Inserts null bias for the corresponding node. + """ + + def __init__(self, target_point: OVTargetPoint): + """ + :param target_point: The TargetPoint instance for the insertion that contains layer's information. + """ + super().__init__(TransformationType.INSERT, target_point) + + def union(self, other: "TransformationCommand") -> "TransformationCommand": + # Have a look at nncf/torch/graph/transformations/commands/PTInsertionCommand + raise NotImplementedError() + + +class OVMultiplyInsertionCommand(OVInsertionCommand): + """ + Inserts Multiply nodes before the corresponding nodes. + """ + + def __init__( + self, + target_point: OVTargetPoint, + scale_value: np.ndarray, + destination_node_names: List[str], + multiply_node_name: str, + ): + """ + :param target_point: The TargetPoint instance for the insertion that contains layer's information. + :param scale_value: Scale value for Multiply layer. + :param destination_node_names: New layer consumers. + :param multiply_node_name: New layer name. + """ + super().__init__(target_point) + self.scale_value = scale_value + self.destination_node_names = destination_node_names + self.multiply_node_name = multiply_node_name + + def union(self, other: "TransformationCommand") -> "TransformationCommand": + # Have a look at nncf/torch/graph/transformations/commands/PTInsertionCommand + raise NotImplementedError() diff --git a/nncf/openvino/hardware/fused_patterns.py b/nncf/openvino/hardware/fused_patterns.py index 11f3d9d3315..b6c08d719a8 100644 --- a/nncf/openvino/hardware/fused_patterns.py +++ b/nncf/openvino/hardware/fused_patterns.py @@ -128,6 +128,25 @@ def create_scale_shift() -> GraphPattern: return pattern +@OPENVINO_HW_FUSED_PATTERNS.register(HWFusedPatternNames.SHIFT_SCALE) +def create_shift_scale() -> GraphPattern: + pattern = GraphPattern() + add_node = pattern.add_node( + **{ + GraphPattern.LABEL_ATTR: "ADD, SUBTRACT", + GraphPattern.METATYPE_ATTR: [om.OVAddMetatype, om.OVSubtractMetatype], + } + ) + mul_node = pattern.add_node( + **{ + GraphPattern.LABEL_ATTR: "MULTIPLY, DIV", + GraphPattern.METATYPE_ATTR: [om.OVMultiplyMetatype, om.OVDivideMetatype], + } + ) + pattern.add_edge(add_node, mul_node) + return pattern + + @OPENVINO_HW_FUSED_PATTERNS.register(HWFusedPatternNames.SE_BLOCK) def create_se_block() -> GraphPattern: pattern = GraphPattern() @@ -166,32 +185,6 @@ def create_se_block() -> GraphPattern: return pattern -@OPENVINO_HW_FUSED_PATTERNS.register(HWFusedPatternNames.STABLE_DIFFUSION) -def create_stable_diffusion() -> GraphPattern: - pattern = GraphPattern() - softmax_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "SOFTMAX", GraphPattern.METATYPE_ATTR: om.OVSoftmaxMetatype} - ) - reshape_node_1 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "RESHAPE", GraphPattern.METATYPE_ATTR: om.OVReshapeMetatype} - ) - transpose_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "TRANSPOSE", GraphPattern.METATYPE_ATTR: om.OVTransposeMetatype} - ) - reshape_node_2 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "RESHAPE", GraphPattern.METATYPE_ATTR: om.OVReshapeMetatype} - ) - matmul_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MATMUL", GraphPattern.METATYPE_ATTR: om.OVMatMulMetatype} - ) - - pattern.add_edge(reshape_node_1, transpose_node) - pattern.add_edge(transpose_node, reshape_node_2) - pattern.add_edge(reshape_node_2, matmul_node) - pattern.add_edge(softmax_node, matmul_node) - return pattern - - @OPENVINO_HW_FUSED_PATTERNS.register(HWFusedPatternNames.SOFTMAX_DIV) def create_softmax_div() -> GraphPattern: pattern = GraphPattern() @@ -207,162 +200,6 @@ def create_softmax_div() -> GraphPattern: return pattern -@OPENVINO_HW_FUSED_PATTERNS.register(HWFusedPatternNames.SOFTMAX_RESHAPE_MATMUL) -def create_softmax_reshape_matmul() -> GraphPattern: - pattern = GraphPattern() - softmax_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "SOFTMAX", GraphPattern.METATYPE_ATTR: om.OVSoftmaxMetatype} - ) - reshape_node_1 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "RESHAPE", GraphPattern.METATYPE_ATTR: om.OVReshapeMetatype} - ) - add_node = pattern.add_node(**{GraphPattern.LABEL_ATTR: "ADD", GraphPattern.METATYPE_ATTR: om.OVAddMetatype}) - reshape_node_2 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "RESHAPE", GraphPattern.METATYPE_ATTR: om.OVReshapeMetatype} - ) - transpose_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "TRANSPOSE", GraphPattern.METATYPE_ATTR: om.OVTransposeMetatype} - ) - matmul_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MATMUL", GraphPattern.METATYPE_ATTR: om.OVMatMulMetatype} - ) - - pattern.add_edge(softmax_node, reshape_node_1) - pattern.add_edge(add_node, reshape_node_2) - pattern.add_edge(reshape_node_2, transpose_node) - pattern.add_edge(transpose_node, matmul_node) - pattern.add_edge(reshape_node_1, matmul_node) - return pattern - - -@OPENVINO_HW_FUSED_PATTERNS.register(HWFusedPatternNames.SOFTMAX_RESHAPE_TRANSPOSE_MATMUL) -def create_softmax_reshape_transpose_matmul() -> GraphPattern: - pattern = GraphPattern() - softmax_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "SOFTMAX", GraphPattern.METATYPE_ATTR: om.OVSoftmaxMetatype} - ) - add_node = pattern.add_node(**{GraphPattern.LABEL_ATTR: "ADD", GraphPattern.METATYPE_ATTR: om.OVAddMetatype}) - reshape_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "RESHAPE", GraphPattern.METATYPE_ATTR: om.OVReshapeMetatype} - ) - transpose_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "TRANSPOSE", GraphPattern.METATYPE_ATTR: om.OVTransposeMetatype} - ) - matmul_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MATMUL", GraphPattern.METATYPE_ATTR: om.OVMatMulMetatype} - ) - - pattern.add_edge(add_node, reshape_node) - pattern.add_edge(reshape_node, transpose_node) - pattern.add_edge(transpose_node, matmul_node) - pattern.add_edge(softmax_node, matmul_node) - return pattern - - -@OPENVINO_HW_FUSED_PATTERNS.register(HWFusedPatternNames.SOFTMAX_RESHAPE_TRANSPOSE_GATHER_MATMUL) -def create_softmax_reshape_transpose_gather_matmul() -> GraphPattern: - pattern = GraphPattern() - softmax_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "SOFTMAX", GraphPattern.METATYPE_ATTR: om.OVSoftmaxMetatype} - ) - add_node = pattern.add_node(**{GraphPattern.LABEL_ATTR: "ADD", GraphPattern.METATYPE_ATTR: om.OVAddMetatype}) - reshape_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "RESHAPE", GraphPattern.METATYPE_ATTR: om.OVReshapeMetatype} - ) - transpose_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "TRANSPOSE", GraphPattern.METATYPE_ATTR: om.OVTransposeMetatype} - ) - gather_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "GATHER", GraphPattern.METATYPE_ATTR: om.OVGatherMetatype} - ) - matmul_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MATMUL", GraphPattern.METATYPE_ATTR: om.OVMatMulMetatype} - ) - - pattern.add_edge(add_node, reshape_node) - pattern.add_edge(reshape_node, transpose_node) - pattern.add_edge(transpose_node, gather_node) - pattern.add_edge(softmax_node, matmul_node) - pattern.add_edge(gather_node, matmul_node) - return pattern - - -@OPENVINO_HW_FUSED_PATTERNS.register(HWFusedPatternNames.EQUAL_LOGICALNOT) -def create_equal_logicalnot() -> GraphPattern: - pattern = GraphPattern() - equal_node = pattern.add_node(**{GraphPattern.LABEL_ATTR: "EQUAL", GraphPattern.METATYPE_ATTR: om.OVEqualMetatype}) - logical_not_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "LOGICAL_NOT", GraphPattern.METATYPE_ATTR: om.OVLogicalNotMetatype} - ) - - pattern.add_edge(equal_node, logical_not_node) - return pattern - - -@OPENVINO_HW_FUSED_PATTERNS.register(HWFusedPatternNames.FC_BN_HSWISH_ACTIVATION) -def create_fc_bn_hswish() -> GraphPattern: - pattern = GraphPattern() - unsqueeze_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "UNSQUEEZE", GraphPattern.METATYPE_ATTR: om.OVUnsqueezeMetatype} - ) - multiply_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MULTIPLY", GraphPattern.METATYPE_ATTR: om.OVMultiplyMetatype} - ) - add_node = pattern.add_node(**{GraphPattern.LABEL_ATTR: "ADD", GraphPattern.METATYPE_ATTR: om.OVAddMetatype}) - squeeze_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "SQUEEZE", GraphPattern.METATYPE_ATTR: om.OVSqueezeMetatype} - ) - - pattern.add_edge(unsqueeze_node, multiply_node) - pattern.add_edge(multiply_node, add_node) - pattern.add_edge(add_node, squeeze_node) - return pattern - - -@OPENVINO_HW_FUSED_PATTERNS.register(HWFusedPatternNames.MATMUL_SOFTMAX_MATMUL) -def create_matmul_softmax_matmul() -> GraphPattern: - pattern = GraphPattern() - softmax_1 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "SOFTMAX", GraphPattern.METATYPE_ATTR: om.OVSoftmaxMetatype} - ) - mat_mul_1_1 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MATMUL_1", GraphPattern.METATYPE_ATTR: om.OVMatMulMetatype} - ) - mat_mul_2_1 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MATMUL_2", GraphPattern.METATYPE_ATTR: om.OVMatMulMetatype} - ) - - any_1 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "ANY", GraphPattern.METATYPE_ATTR: GraphPattern.NON_PATTERN_NODE_TYPE} - ) - - pattern.add_edge(mat_mul_1_1, softmax_1) - pattern.add_edge(softmax_1, mat_mul_2_1) - pattern.add_edge(any_1, mat_mul_2_1) - - softmax_2 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "SOFTMAX", GraphPattern.METATYPE_ATTR: om.OVSoftmaxMetatype} - ) - add_2 = pattern.add_node(**{GraphPattern.LABEL_ATTR: "ADD", GraphPattern.METATYPE_ATTR: om.OVAddMetatype}) - mat_mul_1_2 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MATMUL_1", GraphPattern.METATYPE_ATTR: om.OVMatMulMetatype} - ) - mat_mul_2_2 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MATMUL_2", GraphPattern.METATYPE_ATTR: om.OVMatMulMetatype} - ) - - any_2 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "ANY", GraphPattern.METATYPE_ATTR: GraphPattern.NON_PATTERN_NODE_TYPE} - ) - - pattern.add_edge(mat_mul_1_2, add_2) - pattern.add_edge(add_2, softmax_2) - pattern.add_edge(softmax_2, mat_mul_2_2) - pattern.add_edge(any_2, mat_mul_2_2) - - return pattern - - # ACTIVATIONS @@ -478,27 +315,6 @@ def create_softmax() -> GraphPattern: # INPUT PROCESSING -@OPENVINO_HW_FUSED_PATTERNS.register(HWFusedPatternNames.INPUT_SHIFT_SCALE) -def create_input_shift_scale() -> GraphPattern: - pattern = GraphPattern() - model_input = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MODEL_INPUT", GraphPattern.METATYPE_ATTR: om.OVParameterMetatype} - ) - add_node = pattern.add_node( - **{ - GraphPattern.LABEL_ATTR: "ADD, SUBTRACT", - GraphPattern.METATYPE_ATTR: [om.OVAddMetatype, om.OVSubtractMetatype], - } - ) - multiply_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "MULTIPLY", GraphPattern.METATYPE_ATTR: om.OVMultiplyMetatype} - ) - - pattern.add_edge(model_input, add_node) - pattern.add_edge(add_node, multiply_node) - return pattern - - @OPENVINO_HW_FUSED_PATTERNS.register(HWFusedPatternNames.INPUT_CONVERT_TRANSPOSE_PROCESSING) def create_input_convert_transpose_processing() -> GraphPattern: input_convert_transpose = create_input_convert_transpose() @@ -631,6 +447,16 @@ def create_input_scale_shift() -> GraphPattern: return pattern +@OPENVINO_HW_FUSED_PATTERNS.register(HWFusedPatternNames.INPUT_SHIFT_SCALE) +def create_input_shift_scale() -> GraphPattern: + pattern = GraphPattern() + pattern.add_node(**{GraphPattern.LABEL_ATTR: "MODEL_INPUT", GraphPattern.METATYPE_ATTR: om.OVParameterMetatype}) + shift_scale = create_shift_scale() + + pattern.join_patterns(shift_scale) + return pattern + + @OPENVINO_HW_FUSED_PATTERNS.register(HWFusedPatternNames.INPUT_TRANSPOSE_PROCESSING) def create_input_transpose_processing() -> GraphPattern: pattern = GraphPattern() @@ -745,6 +571,15 @@ def create_linear_arithmetic_activations() -> GraphPattern: return linear +@OPENVINO_HW_FUSED_PATTERNS.register(HWFusedPatternNames.LINEAR_ARITHMETIC_ACTIVATIONS_ARITHMETIC) +def create_linear_arithmetic_activations_arithmetic() -> GraphPattern: + linear_arithmetic_activations = create_linear_arithmetic_activations() + arithmetic = arithmetic_operations() + + linear_arithmetic_activations.join_patterns(arithmetic) + return linear_arithmetic_activations + + @OPENVINO_HW_FUSED_PATTERNS.register(HWFusedPatternNames.LINEAR_SQUEEZE_ACTIVATIONS) def create_linear_squeeze_activation() -> GraphPattern: linear = linear_operations() @@ -768,6 +603,21 @@ def create_mvn_scale_shift_activations() -> GraphPattern: return pattern +@OPENVINO_HW_FUSED_PATTERNS.register(HWFusedPatternNames.LINEAR_ACTIVATIONS_UNSQUEEZE_BN_SQUEEZE) +def create_linear_activations_unsqueeze_bn_squeeze(): + linear_biased = create_biased_op() + activations = atomic_activations_operations() + unsqueeze_op = unsqueeze_operation() + scale_shift = create_scale_shift() + squeeze_op = squeeze_operation() + + linear_biased.join_patterns(activations) + linear_biased.join_patterns(unsqueeze_op) + linear_biased.join_patterns(scale_shift) + linear_biased.join_patterns(squeeze_op) + return linear_biased + + # DEVICE PATTERNS @@ -899,6 +749,12 @@ def squeeze_operation() -> GraphPattern: return pattern +def unsqueeze_operation() -> GraphPattern: + pattern = GraphPattern() + pattern.add_node(**{GraphPattern.LABEL_ATTR: "UNSQUEEZE", GraphPattern.METATYPE_ATTR: om.OVUnsqueezeMetatype}) + return pattern + + def create_input_convert_transpose() -> GraphPattern: pattern = GraphPattern() model_input = pattern.add_node( diff --git a/nncf/openvino/hardware/pattern_operations.py b/nncf/openvino/hardware/pattern_operations.py index 66ac6dea0cc..1e05f577f92 100644 --- a/nncf/openvino/hardware/pattern_operations.py +++ b/nncf/openvino/hardware/pattern_operations.py @@ -39,6 +39,7 @@ ov_metatypes.OVSigmoidMetatype, ov_metatypes.OVHardSigmoidMetatype, ov_metatypes.OVSwishMetatype, + ov_metatypes.OVHSwishMetatype, ], GraphPattern.LABEL_ATTR: "ATOMIC_ACTIVATIONS", } diff --git a/nncf/openvino/pot/engine.py b/nncf/openvino/pot/engine.py index b44323d5e55..60aeae612ab 100644 --- a/nncf/openvino/pot/engine.py +++ b/nncf/openvino/pot/engine.py @@ -100,7 +100,7 @@ def calc_per_sample_metrics( ) -> List[Dict[str, Any]]: per_sample_metrics = [] for inputs in dataset.get_data(subset_indices): - value = val_func(compiled_model, [inputs]) + value, _ = val_func(compiled_model, [inputs]) per_sample_metrics.append( {"sample_id": len(per_sample_metrics), "metric_name": "original_metric", "result": value} ) @@ -146,7 +146,7 @@ def predict( is_full_dataset = subset_indices is None or len(subset_indices) == len(self.data_loader) if self._validation_fn and (is_full_dataset or self.use_original_metric) and not self.statistics_collection: compiled_model = self._ie.compile_model(model=self._model, device_name=self._device) - self._metric.avg_value = self._validation_fn( + self._metric.avg_value, per_sample_metrics = self._validation_fn( compiled_model, self._validation_dataset.get_data(subset_indices) ) if not metric_per_sample and stats_layout is None: @@ -155,7 +155,13 @@ def predict( return metrics, {} if self.use_original_metric and metric_per_sample: - self._per_sample_metrics = self.calculate_per_sample_metrics(subset_indices) + if per_sample_metrics is not None: + self._per_sample_metrics = [ + {"sample_id": i, "metric_name": "original_metric", "result": value} + for i, value in enumerate(per_sample_metrics) + ] + else: + self._per_sample_metrics = self.calculate_per_sample_metrics(subset_indices) if stats_layout is None: metrics = self._metric.avg_value metrics = (sorted(self._per_sample_metrics, key=lambda i: i["sample_id"]), metrics) @@ -188,4 +194,7 @@ def _update_metrics(self, output, annotations, need_metrics_per_sample: bool = F @staticmethod def _process_batch(batch): index, input_data = batch - return [(index, None)], [input_data], None + return [(index, None)], input_data, None + + def _fill_input(self, model, image_batch): + return image_batch diff --git a/nncf/openvino/pot/quantization/quantize_model.py b/nncf/openvino/pot/quantization/quantize_model.py index f9edc7b297b..5bb985bff9e 100644 --- a/nncf/openvino/pot/quantization/quantize_model.py +++ b/nncf/openvino/pot/quantization/quantize_model.py @@ -79,6 +79,7 @@ def _convert_compressed_model_to_openvino_model(model: pot.graph.nx_model.Compre xml_path = paths[0]["model"] bin_path = paths[0]["weights"] ie = ov.Core() + ie.set_property({"ENABLE_MMAP": "NO"}) ov_model = ie.read_model(xml_path, bin_path) return ov_model @@ -335,6 +336,12 @@ def quantize_impl( if advanced_parameters is None: advanced_parameters = AdvancedQuantizationParameters() + if model_type == ModelType.TRANSFORMER and advanced_parameters.smooth_quant_alpha > 0: + nncf_logger.warning( + 'IMPORTANT. The advanced parameter "smooth_quant_alpha > 0" IS NOT SUPPORTED for the POT backend!' + 'Please, use "smooth_quant_alpha = -1".' + ) + algorithm_parameters = _create_quantization_config( preset, target_device, subset_size, fast_bias_correction, model_type, ignored_scope, advanced_parameters ) @@ -433,6 +440,12 @@ def quantize_with_accuracy_control_impl( if advanced_quantization_parameters is None: advanced_quantization_parameters = AdvancedQuantizationParameters() + if model_type == ModelType.TRANSFORMER and advanced_quantization_parameters.smooth_quant_alpha > 0: + nncf_logger.warning( + 'IMPORTANT. The advanced parameter "smooth_quant_alpha > 0" IS NOT SUPPORTED for the POT backend!' + 'Please, use "smooth_quant_alpha = -1".' + ) + if advanced_quantization_parameters.disable_bias_correction: raise ValueError( "Quantization algorithm with accuracy controll from the OpenVINO backend " @@ -465,8 +478,8 @@ def quantize_with_accuracy_control_impl( algotrithm_parameters.update( _create_quantization_config( - target_device, preset, + target_device, subset_size, fast_bias_correction, model_type, diff --git a/nncf/openvino/quantization/default_quantization.py b/nncf/openvino/quantization/default_quantization.py index 7da44d8cba1..d7a30a5b2df 100644 --- a/nncf/openvino/quantization/default_quantization.py +++ b/nncf/openvino/quantization/default_quantization.py @@ -9,9 +9,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from nncf.common.graph.operator_metatypes import UnknownMetatype from nncf.common.quantization.quantizer_propagation.structs import QuantizationTrait from nncf.openvino.graph.metatypes import openvino_metatypes as ov_metatypes +from nncf.openvino.graph.metatypes.common import QUANTIZE_AGNOSTIC_OPERATIONS + +# If a metatype is not in this list, then it is considered to be QuantizationTrait.NON_QUANTIZABLE. DEFAULT_OV_QUANT_TRAIT_TO_OP_DICT = { QuantizationTrait.INPUTS_QUANTIZABLE: [ @@ -49,19 +51,7 @@ ov_metatypes.OVLSTMSequenceMetatype, ov_metatypes.OVGRUSequenceMetatype, ], - QuantizationTrait.NON_QUANTIZABLE: [ - ov_metatypes.OVSigmoidMetatype, - ov_metatypes.OVSoftmaxMetatype, - ov_metatypes.OVAssignMetatype, - ov_metatypes.OVDeformableConvolutionMetatype, - UnknownMetatype, - # Ticket: 108478 - ov_metatypes.OVReluMetatype, - ov_metatypes.OVLogMetatype, - ov_metatypes.OVExpMetatype, - ov_metatypes.OVSqrtMetatype, - ov_metatypes.OVAbsMetatype, - ], + QuantizationTrait.QUANTIZATION_AGNOSTIC: QUANTIZE_AGNOSTIC_OPERATIONS, QuantizationTrait.CONCAT: [ov_metatypes.OVConcatMetatype], - QuantizationTrait.OUTPUT_QUANTIZATION_AS_WEIGHTS: [], + QuantizationTrait.OUTPUT_QUANTIZATION_AS_WEIGHTS: [ov_metatypes.OVEmbeddingMetatype], } diff --git a/nncf/openvino/quantization/ignored_patterns.py b/nncf/openvino/quantization/ignored_patterns.py index 2cd3e127e7e..b61d11e933e 100644 --- a/nncf/openvino/quantization/ignored_patterns.py +++ b/nncf/openvino/quantization/ignored_patterns.py @@ -16,33 +16,95 @@ OPENVINO_IGNORED_PATTERNS = Registry("IGNORED_PATTERNS") -@OPENVINO_IGNORED_PATTERNS.register(IgnoredPatternNames.SOFTMAX_MATMUL) -def softmax_matmul() -> GraphPattern: - pattern = GraphPattern() +def _add_softmax_matmul(pattern: GraphPattern) -> None: + # SOFTMAX RESHAPE||TRANSPOSE||GATHER||SQUEEZE + # \ / + # \ / + # \ / + # \ / + # \ / + # MATMUL + reshape_transpose_gather_squeeze = [ + om.OVReshapeMetatype, + om.OVTransposeMetatype, + om.OVGatherMetatype, + om.OVSqueezeMetatype, + ] softmax = pattern.add_node(**{GraphPattern.LABEL_ATTR: "SOFTMAX", GraphPattern.METATYPE_ATTR: om.OVSoftmaxMetatype}) matmul = pattern.add_node(**{GraphPattern.LABEL_ATTR: "MATMUL", GraphPattern.METATYPE_ATTR: om.OVMatMulMetatype}) - non_pattern_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "ANY", GraphPattern.METATYPE_ATTR: GraphPattern.NON_PATTERN_NODE_TYPE} + matmul_branch_nodes = pattern.add_node( + **{ + GraphPattern.LABEL_ATTR: "RESHAPE||TRANSPOSE||GATHER||SQUEEZE", + GraphPattern.METATYPE_ATTR: reshape_transpose_gather_squeeze, + } ) pattern.add_edge(softmax, matmul) - pattern.add_edge(non_pattern_node, matmul) - return pattern + pattern.add_edge(matmul_branch_nodes, matmul) -@OPENVINO_IGNORED_PATTERNS.register(IgnoredPatternNames.SOFTMAX_RESHAPE_MATMUL) -def softmax_reshape_matmul() -> GraphPattern: - pattern = GraphPattern() +def _add_softmax_reshape_matmul(pattern: GraphPattern) -> None: + # SOFTMAX + # \ + # \ + # \ + # RESHAPE RESHAPE||TRANSPOSE||GATHER||SQUEEZE + # \ / + # \ / + # \ / + # \ / + # \ / + # \ / + # MATMUL + reshape_transpose_gather = [om.OVReshapeMetatype, om.OVTransposeMetatype, om.OVGatherMetatype, om.OVSqueezeMetatype] softmax = pattern.add_node(**{GraphPattern.LABEL_ATTR: "SOFTMAX", GraphPattern.METATYPE_ATTR: om.OVSoftmaxMetatype}) reshape = pattern.add_node(**{GraphPattern.LABEL_ATTR: "RESHAPE", GraphPattern.METATYPE_ATTR: om.OVReshapeMetatype}) matmul = pattern.add_node(**{GraphPattern.LABEL_ATTR: "MATMUL", GraphPattern.METATYPE_ATTR: om.OVMatMulMetatype}) - non_pattern_node_1 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "NON_PATTERN_1", GraphPattern.METATYPE_ATTR: GraphPattern.NON_PATTERN_NODE_TYPE} - ) - non_pattern_node_2 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "NON_PATTERN_2", GraphPattern.METATYPE_ATTR: GraphPattern.NON_PATTERN_NODE_TYPE} + matmul_branch_nodes = pattern.add_node( + **{ + GraphPattern.LABEL_ATTR: "RESHAPE||TRANSPOSE||GATHER||SQUEEZE", + GraphPattern.METATYPE_ATTR: reshape_transpose_gather, + } ) pattern.add_edge(softmax, reshape) - pattern.add_edge(non_pattern_node_1, reshape) pattern.add_edge(reshape, matmul) - pattern.add_edge(non_pattern_node_2, matmul) + pattern.add_edge(matmul_branch_nodes, matmul) + + +@OPENVINO_IGNORED_PATTERNS.register(IgnoredPatternNames.MULTIHEAD_ATTENTION_OUTPUT) +def create_multihead_attention_output() -> GraphPattern: + pattern = GraphPattern() + _add_softmax_matmul(pattern) + _add_softmax_reshape_matmul(pattern) + return pattern + + +@OPENVINO_IGNORED_PATTERNS.register(IgnoredPatternNames.FC_BN_HSWISH_ACTIVATION) +def create_fc_bn_hswish() -> GraphPattern: + pattern = GraphPattern() + unsqueeze_node = pattern.add_node( + **{GraphPattern.LABEL_ATTR: "UNSQUEEZE", GraphPattern.METATYPE_ATTR: om.OVUnsqueezeMetatype} + ) + multiply_node = pattern.add_node( + **{GraphPattern.LABEL_ATTR: "MULTIPLY", GraphPattern.METATYPE_ATTR: om.OVMultiplyMetatype} + ) + add_node = pattern.add_node(**{GraphPattern.LABEL_ATTR: "ADD", GraphPattern.METATYPE_ATTR: om.OVAddMetatype}) + squeeze_node = pattern.add_node( + **{GraphPattern.LABEL_ATTR: "SQUEEZE", GraphPattern.METATYPE_ATTR: om.OVSqueezeMetatype} + ) + + pattern.add_edge(unsqueeze_node, multiply_node) + pattern.add_edge(multiply_node, add_node) + pattern.add_edge(add_node, squeeze_node) + return pattern + + +@OPENVINO_IGNORED_PATTERNS.register(IgnoredPatternNames.EQUAL_LOGICALNOT) +def create_equal_logicalnot() -> GraphPattern: + pattern = GraphPattern() + equal_node = pattern.add_node(**{GraphPattern.LABEL_ATTR: "EQUAL", GraphPattern.METATYPE_ATTR: om.OVEqualMetatype}) + logical_not_node = pattern.add_node( + **{GraphPattern.LABEL_ATTR: "LOGICAL_NOT", GraphPattern.METATYPE_ATTR: om.OVLogicalNotMetatype} + ) + + pattern.add_edge(equal_node, logical_not_node) return pattern diff --git a/nncf/openvino/quantization/quantize_model.py b/nncf/openvino/quantization/quantize_model.py index 9de27dac04c..4368f1a7578 100644 --- a/nncf/openvino/quantization/quantize_model.py +++ b/nncf/openvino/quantization/quantize_model.py @@ -11,18 +11,18 @@ import importlib from copy import deepcopy -from typing import Any, Callable, Dict, Iterable, List, Optional +from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, TypeVar, Union import openvino.runtime as ov from openvino._offline_transformations import compress_quantize_weights_transformation from nncf.common.logging import nncf_logger from nncf.common.quantization.structs import QuantizationPreset -from nncf.common.utils.backend import get_backend -from nncf.common.utils.timer import timer from nncf.data import Dataset +from nncf.openvino.graph.nncf_graph_builder import GraphConverter from nncf.openvino.quantization.backend_parameters import BackendParameters from nncf.openvino.quantization.backend_parameters import is_weight_compression_needed +from nncf.openvino.quantization.weights_compression import insert_pre_compression_operations from nncf.parameters import DropType from nncf.parameters import ModelType from nncf.parameters import TargetDevice @@ -30,8 +30,10 @@ from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters from nncf.quantization.advanced_parameters import convert_to_dict_recursively from nncf.quantization.algorithms.accuracy_control.algorithm import QuantizationAccuracyRestorer -from nncf.quantization.algorithms.accuracy_control.algorithm import get_algo_backend +from nncf.quantization.algorithms.accuracy_control.algorithm import calculate_accuracy_drop +from nncf.quantization.algorithms.accuracy_control.evaluator import Evaluator from nncf.quantization.algorithms.post_training.algorithm import PostTrainingQuantization +from nncf.quantization.quantize_model import quantize_with_tune_hyperparams from nncf.quantization.telemetry_extractors import CompressionStartedWithQuantizeApi from nncf.scopes import IgnoredScope from nncf.telemetry.decorator import tracked_function @@ -39,6 +41,8 @@ USE_POT_AS_DEFAULT = False +TTensor = TypeVar("TTensor") + def should_use_pot(advanced_parameters: Optional[AdvancedQuantizationParameters]) -> bool: """ @@ -112,7 +116,8 @@ def native_quantize_impl( advanced_parameters=advanced_parameters, ) - quantized_model = quantization_algorithm.apply(model, dataset=calibration_dataset) + graph = GraphConverter.create_nncf_graph(model) + quantized_model = quantization_algorithm.apply(model, graph, dataset=calibration_dataset) if is_weight_compression_needed(advanced_parameters): compress_quantize_weights_transformation(quantized_model) @@ -139,7 +144,7 @@ def native_quantize_with_accuracy_control_impl( model: ov.Model, calibration_dataset: Dataset, validation_dataset: Dataset, - validation_fn: Callable[[Any, Iterable[Any]], float], + validation_fn: Callable[[Any, Iterable[Any]], Tuple[float, Union[None, List[float], List[List[TTensor]]]]], max_drop: float = 0.01, drop_type: DropType = DropType.ABSOLUTE, preset: QuantizationPreset = QuantizationPreset.PERFORMANCE, @@ -158,12 +163,6 @@ def native_quantize_with_accuracy_control_impl( if advanced_accuracy_restorer_parameters is None: advanced_accuracy_restorer_parameters = AdvancedAccuracyRestorerParameters() - if advanced_accuracy_restorer_parameters.tune_hyperparams: - raise RuntimeError( - "Quantization algorithm with accuracy control from the " - "OpenVINO backend does not support tuning hyperparams yet" - ) - compress_weights = is_weight_compression_needed(advanced_quantization_parameters) if advanced_quantization_parameters is None: @@ -184,35 +183,75 @@ def native_quantize_with_accuracy_control_impl( copied_parameters, ) - # Backends - backend = get_backend(model) - algo_backend = get_algo_backend(backend) + evaluator = Evaluator(validation_fn) + evaluator.enable_iteration_count() + initial_metric_results = evaluator.collect_metric_results(model, validation_dataset, model_name="initial") + validation_dataset_size = evaluator.num_passed_iterations + evaluator.disable_iteration_count() - nncf_logger.info("Validation of initial model was started") - with timer(): - initial_metric = validation_fn(algo_backend.prepare_for_inference(model), validation_dataset.get_data()) - nncf_logger.info(f"Metric of initial model: {initial_metric}") + quantized_metric_results = evaluator.collect_metric_results( + quantized_model, validation_dataset, model_name="quantized" + ) + + should_terminate, accuracy_drop = calculate_accuracy_drop( + initial_metric_results.metric_value, quantized_metric_results.metric_value, max_drop, drop_type + ) - nncf_logger.info("Validation of quantized model was started") - with timer(): - quantized_metric = validation_fn( - algo_backend.prepare_for_inference(quantized_model), validation_dataset.get_data() + nncf_logger.info(f"Accuracy drop: {accuracy_drop} ({drop_type})") + + # TODO(andrey-churkin): Collect statistics only once + if advanced_accuracy_restorer_parameters.tune_hyperparams and not should_terminate: + tuned_quantized_model = quantize_with_tune_hyperparams( + model, + calibration_dataset, + validation_dataset, + validation_fn, + initial_metric_results, + quantized_metric_results, + subset_size, + preset, + target_device, + subset_size, + fast_bias_correction, + model_type, + ignored_scope, + advanced_quantization_parameters, + ) + tuned_quantized_metric_results = evaluator.collect_metric_results( + tuned_quantized_model, validation_dataset, model_name="tuned" + ) + should_terminate, tuned_accuracy_drop = calculate_accuracy_drop( + initial_metric_results.metric_value, tuned_quantized_metric_results.metric_value, max_drop, drop_type ) - nncf_logger.info(f"Metric of quantized model: {quantized_metric}") - ranking_subset_size = subset_size - if advanced_accuracy_restorer_parameters.ranking_subset_size is not None: - ranking_subset_size = advanced_accuracy_restorer_parameters.ranking_subset_size + nncf_logger.info(f"Accuracy drop (tuned): {tuned_accuracy_drop} ({drop_type})") + + if should_terminate or tuned_accuracy_drop < accuracy_drop: + quantized_model = tuned_quantized_model + quantized_metric_results = tuned_quantized_metric_results + + if not should_terminate: + ranking_subset_size = subset_size + if advanced_accuracy_restorer_parameters.ranking_subset_size is not None: + ranking_subset_size = advanced_accuracy_restorer_parameters.ranking_subset_size + + accuracy_restorer = QuantizationAccuracyRestorer( + ranking_subset_size, + advanced_accuracy_restorer_parameters.max_num_iterations, + max_drop, + drop_type, + advanced_accuracy_restorer_parameters.num_ranking_processes, + ) + quantized_model = accuracy_restorer.apply( + model, + initial_metric_results, + quantized_model, + quantized_metric_results, + validation_dataset, + validation_dataset_size, + evaluator, + ) - accuracy_aware_loop = QuantizationAccuracyRestorer( - ranking_subset_size=ranking_subset_size, - max_num_iterations=advanced_accuracy_restorer_parameters.max_num_iterations, - max_drop=max_drop, - drop_type=drop_type, - ) - quantized_model = accuracy_aware_loop.restore_accuracy( - model, initial_metric, quantized_model, quantized_metric, validation_dataset, validation_fn - ) if compress_weights: compress_quantize_weights_transformation(quantized_model) @@ -268,6 +307,23 @@ def quantize_impl( ) +def wrap_validation_fn(validation_fn): + """ + Wraps validation function to support case when it only returns metric value. + + :param validation_fn: Validation function to wrap. + :return: Wrapped validation function. + """ + + def wrapper(*args, **kwargs): + retval = validation_fn(*args, **kwargs) + if isinstance(retval, tuple): + return retval + return retval, None + + return wrapper + + def quantize_with_accuracy_control_impl( model: ov.Model, calibration_dataset: Dataset, @@ -295,11 +351,14 @@ def quantize_with_accuracy_control_impl( quantize_with_accuracy_control_fn = pot_quantize_with_accuracy_control_impl else: quantize_with_accuracy_control_fn = native_quantize_with_accuracy_control_impl + + val_func = wrap_validation_fn(validation_fn) + return quantize_with_accuracy_control_fn( model, calibration_dataset, validation_dataset, - validation_fn, + val_func, max_drop, drop_type, preset, @@ -311,3 +370,11 @@ def quantize_with_accuracy_control_impl( advanced_quantization_parameters, advanced_accuracy_restorer_parameters, ) + + +def compress_weights_impl(model: ov.Model) -> ov.Model: + """ + Implementation of the `compress_weights()` method for the OpenVINO backend. + """ + insert_pre_compression_operations(model) + return model diff --git a/nncf/openvino/quantization/weights_compression.py b/nncf/openvino/quantization/weights_compression.py new file mode 100644 index 00000000000..8b23834e36d --- /dev/null +++ b/nncf/openvino/quantization/weights_compression.py @@ -0,0 +1,99 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Tuple, Type, Union + +import numpy as np +import openvino.runtime as ov +from openvino.runtime import opset9 as opset + +from nncf.common.graph.operator_metatypes import OperatorMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVEmbeddingMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVMatMulMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import get_node_metatype +from nncf.openvino.graph.metatypes.openvino_metatypes import get_operation_const_op +from nncf.openvino.graph.node_utils import get_const_value +from nncf.openvino.graph.node_utils import get_matmul_channel_axes +from nncf.quantization.fake_quantize import calculate_scale_zero_point + + +def insert_pre_compression_operations(model: ov.Model, bits: int = 8) -> None: + """ + Compress weights of Linear and Embedding layers to uint8. + The result of compression is the same as asymmetric weight quantization. + + :param model: The model to be transformed. + :param bits: Number of bits for quantization. + """ + allowed_metatypes_to_const_port = {OVEmbeddingMetatype: [0], OVMatMulMetatype: [0, 1]} + level_low = 0 + level_high = 2**bits - 1 + + for node in model.get_ops(): + metatype = get_node_metatype(node) + if metatype not in allowed_metatypes_to_const_port: + continue + + for const_port_id in allowed_metatypes_to_const_port[metatype]: + weight_node = get_operation_const_op(node, const_port_id) + if weight_node is None: + continue + + weight_output = weight_node.output(0) + weight_name = weight_node.get_friendly_name() + target_inputs = weight_output.get_target_inputs() + + original_weight_dtype = weight_output.get_element_type().to_dtype() + if original_weight_dtype not in [np.float32, np.float16, np.float64]: + continue + + weight = get_const_value(weight_node) + axes = _get_reduction_axes(metatype, node, const_port_id) + min_values = np.min(weight, axis=axes, keepdims=True) + max_values = np.max(weight, axis=axes, keepdims=True) + + scale, zero_point = calculate_scale_zero_point( + min_values, max_values, level_low, level_high, narrow_range=False + ) + + compressed_weights = np.round(weight / scale + zero_point) + compressed_weights = np.clip(compressed_weights, level_low, level_high).astype(np.uint8) + + compressed_const = opset.constant(compressed_weights, dtype=np.uint8, name=weight_name) + convert = opset.convert(compressed_const, original_weight_dtype) + sub = opset.subtract(convert, zero_point.astype(original_weight_dtype)) + fq_name = f"{node.get_friendly_name()}/fq_weights_{const_port_id}" + mul = opset.multiply(sub, scale.astype(original_weight_dtype), name=fq_name) + + for target_input in target_inputs: + target_input.replace_source_output(mul.output(0)) + + +def _get_reduction_axes(metatype: Type[OperatorMetatype], node: ov.Node, weight_port_id: int) -> Union[int, Tuple[int]]: + """ + Determines reduction axes by given metatype and node information. + + :param metatype: The metatype of the operator. + :param node: The OpenVINO node. + :param weight_port_id: The weight port ID. + + :return: The reduction axes as an integer or a tuple of integers. + """ + if metatype is OVMatMulMetatype: + transpose = node.get_attributes()[f"transpose_{'a' if weight_port_id == 0 else 'b'}"] + ndims = node.input(weight_port_id).get_partial_shape().rank.get_max_length() + channel_axes = get_matmul_channel_axes(weight_port_id, ndims, transpose) + axes = tuple(i for i in range(ndims) if i not in channel_axes) + elif metatype is OVEmbeddingMetatype: + axes = (metatype.const_channel_axis[0] + 1) % 2 + else: + RuntimeError("Unsupported metatype to find reduction axes.") + return axes diff --git a/nncf/openvino/statistics/aggregator.py b/nncf/openvino/statistics/aggregator.py index 60c7bab3e80..7fd5e26c72d 100644 --- a/nncf/openvino/statistics/aggregator.py +++ b/nncf/openvino/statistics/aggregator.py @@ -15,6 +15,7 @@ import numpy as np import openvino.runtime as ov +from nncf.common.graph.graph import NNCFGraph from nncf.common.graph.transformations.commands import TargetType from nncf.common.graph.transformations.layout import TransformationLayout from nncf.common.tensor_statistics.aggregator import StatisticsAggregator @@ -22,16 +23,15 @@ from nncf.common.tensor_statistics.statistic_point import StatisticPointsContainer from nncf.experimental.common.tensor_statistics.collectors import MergedTensorCollector from nncf.experimental.common.tensor_statistics.collectors import TensorCollector -from nncf.openvino.graph.nncf_graph_builder import GraphConverter from nncf.openvino.graph.transformations.commands import OVInplaceFnInsertionCommand from nncf.openvino.graph.transformations.commands import OVOutputInsertionCommand from nncf.openvino.tensor import OVNNCFTensor class OVStatisticsAggregator(StatisticsAggregator): - def collect_statistics(self, model: ov.Model) -> None: + def collect_statistics(self, model: ov.Model, graph: NNCFGraph) -> None: self._name_to_node_mapping = {op.get_friendly_name(): op for op in model.get_ops()} - super().collect_statistics(model) + super().collect_statistics(model, graph) def _register_statistics( self, outputs: Dict[str, OVNNCFTensor], statistic_points: StatisticPointsContainer @@ -48,7 +48,7 @@ def _register_statistics( stat_node_name = output.get_node().get_friendly_name() port_id = output.get_index() else: - RuntimeError(f"Unsupported target point type for statistic aggregator:" f" {target_point.type}") + RuntimeError(f"Unsupported target point type for statistic aggregator: {target_point.type}") input_info = tensor_collector.get_output_info(stat_node_name, port_id) target_inputs = TensorCollector.get_tensor_collector_inputs(outputs, input_info) @@ -75,17 +75,16 @@ def _get_transformation_layout_extra_outputs( @staticmethod # TODO(dlyakhov) Move this to common part def _get_merged_statistic_points( - statistic_points: StatisticPointsContainer, model: ov.Model + statistic_points: StatisticPointsContainer, model: ov.Model, graph: NNCFGraph ) -> StatisticPointsContainer: - nncf_graph = GraphConverter.create_nncf_graph(model) merged_statistic_points = StatisticPointsContainer() target_type_to_tensor_collector_map = defaultdict(lambda: defaultdict(list)) for target_node_name, _statistic_points in statistic_points.data.items(): for statistic_point in _statistic_points: target_point = statistic_point.target_point if target_point.type in [TargetType.PRE_LAYER_OPERATION, TargetType.OPERATION_WITH_WEIGHTS]: - node = nncf_graph.get_node_by_name(target_node_name) - target_input_edge = nncf_graph.get_input_edges(node)[target_point.port_id] + node = graph.get_node_by_name(target_node_name) + target_input_edge = graph.get_input_edges(node)[target_point.port_id] target_type = TargetType.POST_LAYER_OPERATION _target_node_name = target_input_edge.from_node.node_name diff --git a/nncf/openvino/statistics/collectors.py b/nncf/openvino/statistics/collectors.py index 99c48ea483e..61ef776fab7 100644 --- a/nncf/openvino/statistics/collectors.py +++ b/nncf/openvino/statistics/collectors.py @@ -37,8 +37,8 @@ from nncf.openvino.graph.node_utils import get_inplace_min_op from nncf.openvino.graph.node_utils import get_reducer_output_node_names from nncf.openvino.graph.node_utils import get_result_node_name -from nncf.openvino.statistics.statistics import OVBatchTensorStatistic from nncf.openvino.statistics.statistics import OVMeanTensorStatistic +from nncf.openvino.statistics.statistics import OVRawTensorStatistic from nncf.openvino.tensor import OVNNCFTensor from nncf.quantization.advanced_parameters import StatisticsType @@ -265,15 +265,12 @@ def get_mean_stat_collector(num_samples, channel_axis, window_size=None, inplace return collector -def get_mean_batch_stat_collector(num_samples, inplace=True): - # TODO(dlyakhov): use inplace OVBatchMeanReducer - # after migration on openvino-dev=2023.0 - inplace = False - reducer = OVBatchMeanReducer(inplace=inplace) +def get_raw_stat_collector(num_samples, inplace=False): + reducer = OVNoopReducer() aggregator = NoopAggregator(num_samples) - collector = TensorCollector(OVBatchTensorStatistic) - collector.register_statistic_branch(OVBatchTensorStatistic.VALUES_STATS, reducer, aggregator) + collector = TensorCollector(OVRawTensorStatistic) + collector.register_statistic_branch(OVRawTensorStatistic.VALUES_STATS, reducer, aggregator) return collector diff --git a/nncf/openvino/statistics/statistics.py b/nncf/openvino/statistics/statistics.py index 7261796a664..12a0c82af9c 100644 --- a/nncf/openvino/statistics/statistics.py +++ b/nncf/openvino/statistics/statistics.py @@ -11,9 +11,9 @@ import numpy as np -from nncf.common.tensor_statistics.statistics import BatchTensorStatistic from nncf.common.tensor_statistics.statistics import MeanTensorStatistic from nncf.common.tensor_statistics.statistics import MinMaxTensorStatistic +from nncf.common.tensor_statistics.statistics import RawTensorStatistic class OVMinMaxTensorStatistic(MinMaxTensorStatistic): @@ -28,7 +28,7 @@ def tensor_eq(tensor: np.ndarray, rtol=1e-6) -> bool: return bool(np.all(tensor, rtol=rtol)) -class OVBatchTensorStatistic(BatchTensorStatistic): +class OVRawTensorStatistic(RawTensorStatistic): @staticmethod def tensor_eq(tensor: np.ndarray, rtol=1e-6) -> bool: return bool(np.all(tensor, rtol=rtol)) diff --git a/nncf/quantization/__init__.py b/nncf/quantization/__init__.py index 29ffb93b9e7..92a36bcf31c 100644 --- a/nncf/quantization/__init__.py +++ b/nncf/quantization/__init__.py @@ -10,5 +10,6 @@ # limitations under the License. """Post-training quantization APIs.""" from nncf.common.quantization.structs import QuantizationPreset +from nncf.quantization.quantize_model import compress_weights from nncf.quantization.quantize_model import quantize from nncf.quantization.quantize_model import quantize_with_accuracy_control diff --git a/nncf/quantization/advanced_parameters.py b/nncf/quantization/advanced_parameters.py index 47d09a89b85..a3ec119ddeb 100644 --- a/nncf/quantization/advanced_parameters.py +++ b/nncf/quantization/advanced_parameters.py @@ -115,7 +115,7 @@ class AdvancedBiasCorrectionParameters: @dataclass class AdvancedQuantizationParameters: """ - Contains advanced parameters for fine-tuning qunatization algorithm. + Contains advanced parameters for fine-tuning quantization algorithm. :param overflow_fix: This option controls whether to apply the overflow issue fix for the 8-bit quantization, defaults to OverflowFix.FIRST_LAYER. @@ -126,8 +126,14 @@ class AdvancedQuantizationParameters: :param inplace_statistics: Defines whether to calculate quantizers statistics by backend graph operations or by default Python implementation, defaults to True. :type inplace_statistics: bool + :param disable_channel_alignment: Whether to disable the channel alignment. + :type disable_channel_alignment: bool :param disable_bias_correction: Whether to disable the bias correction. :type disable_bias_correction: bool + :param smooth_quant_alpha: SmoothQuant-related parameter. It regulates the calculation of the smooth scale. + The default value is 0.95. A negative value switches off the algorithm. In case of inaccurate results, + this parameter may be adjusted in the range from 0 to 1 or set -1 to disable SmoothQuant algorithm. + :type smooth_quant_alpha: float :param activations_quantization_params: Quantization parameters for activations. :type activations_quantization_params: nncf.quantization.advanced_parameters.QuantizationParameters :param weights_quantization_params: Quantization parameters for weights. @@ -146,7 +152,9 @@ class AdvancedQuantizationParameters: overflow_fix: OverflowFix = OverflowFix.FIRST_LAYER quantize_outputs: bool = False inplace_statistics: bool = True + disable_channel_alignment: bool = True disable_bias_correction: bool = False + smooth_quant_alpha: float = 0.95 # Advanced Quantization parameters activations_quantization_params: QuantizationParameters = field(default_factory=QuantizationParameters) @@ -159,7 +167,7 @@ class AdvancedQuantizationParameters: # Advanced BiasCorrection algorithm parameters bias_correction_params: AdvancedBiasCorrectionParameters = field(default_factory=AdvancedBiasCorrectionParameters) - # backend specific parameters + # Backend specific parameters backend_params: Dict[str, Any] = field(default_factory=dict) @@ -182,11 +190,15 @@ class AdvancedAccuracyRestorerParameters: :param ranking_subset_size: Size of a subset that is used to rank layers by their contribution to the accuracy drop. :type ranking_subset_size: Optional[int] + :param num_ranking_processes: The number of parallel processes that are used to rank + quantization operations. + :type num_ranking_processes: Optional[int] """ max_num_iterations: int = sys.maxsize tune_hyperparams: bool = False ranking_subset_size: Optional[int] = None + num_ranking_processes: Optional[int] = None def changes_asdict(params: Any) -> Dict[str, Any]: @@ -219,9 +231,10 @@ def convert_to_dict_recursively(params: Any) -> Dict[str, Any]: value = getattr(params, f.name) if is_dataclass(value): result[f.name] = convert_to_dict_recursively(value) - if isinstance(value, Enum): + elif isinstance(value, Enum): result[f.name] = value.value - result[f.name] = value + else: + result[f.name] = value return result @@ -283,46 +296,72 @@ def convert_range_estimator_parameters_to_dict(params: RangeEstimatorParameters) "min_percentile": 1 - params.min.quantile_outlier_prob, "max_percentile": 1 - params.max.quantile_outlier_prob, } + elif ( + params.min.statistics_type is None + and params.min.aggregator_type is None + and params.max.statistics_type is None + and params.max.aggregator_type is None + ): + return {} else: - raise RuntimeError("The following range estimator parameters are not supported: " f"{str(params)}") + raise RuntimeError(f"The following range estimator parameters are not supported: {str(params)}") return result -def convert_advanced_parameters_to_dict(params: AdvancedQuantizationParameters) -> Dict[str, Any]: +def apply_advanced_parameters_to_config( + config: Dict[str, Any], params: AdvancedQuantizationParameters +) -> Dict[str, Any]: """ - Converts advanced parameters to the dict in the legacy format + Apply advanced parameters to the config in the legacy format + :param config: NNCF config in legacy format :param params: Advanced quantization parameters :return: advanced quantization parameters as dict in the legacy format """ - result = { - "overflow_fix": params.overflow_fix.value, - "quantize_outputs": params.quantize_outputs, - } + config["overflow_fix"] = params.overflow_fix.value + config["quantize_outputs"] = params.quantize_outputs if params.disable_bias_correction: - result["batchnorm_adaptation"] = {"num_bn_adaptation_samples": 0} + initializer = config.get("initializer", {}) + initializer["batchnorm_adaptation"] = {"num_bn_adaptation_samples": 0} + config["initializer"] = initializer activations_config = convert_quantization_parameters_to_dict(params.activations_quantization_params) if activations_config: - result["activations"] = activations_config + config["activations"] = activations_config weights_config = convert_quantization_parameters_to_dict(params.weights_quantization_params) if weights_config: - result["weights"] = weights_config + config["weights"] = weights_config activations_init_range_config = convert_range_estimator_parameters_to_dict( params.activations_range_estimator_params ) - weights_init_range_config = convert_range_estimator_parameters_to_dict(params.weigths_range_estimator_params) + weights_init_range_config = convert_range_estimator_parameters_to_dict(params.weights_range_estimator_params) + if activations_init_range_config or weights_init_range_config: + initializer = config.get("initializer", {}) + init_range = initializer.get("range", {}) + global_num_init_samples = init_range.get("num_init_samples", None) + global_range_type = init_range.get("type", None) + activations_init_range_config["target_quantizer_group"] = "activations" activations_init_range_config["target_scopes"] = "{re}.*" + if global_num_init_samples is not None: + activations_init_range_config["num_init_samples"] = global_num_init_samples + if "type" not in activations_init_range_config and global_range_type is not None: + activations_init_range_config["type"] = global_range_type + weights_init_range_config["target_quantizer_group"] = "weights" weights_init_range_config["target_scopes"] = "{re}.*" + if global_num_init_samples is not None: + weights_init_range_config["num_init_samples"] = global_num_init_samples + if "type" not in weights_init_range_config and global_range_type is not None: + weights_init_range_config["type"] = global_range_type - result["initializer"]["range"] = [activations_init_range_config, weights_init_range_config] + initializer["range"] = [activations_init_range_config, weights_init_range_config] + config["initializer"] = initializer if params.bias_correction_params.apply_for_all_nodes: raise RuntimeError( @@ -332,4 +371,4 @@ def convert_advanced_parameters_to_dict(params: AdvancedQuantizationParameters) if params.bias_correction_params.threshold is not None: raise RuntimeError("threshold parameter of the BiasCorrection algorithm is not supported in the legacy format") - return result + return config diff --git a/nncf/quantization/algorithms/accuracy_control/algorithm.py b/nncf/quantization/algorithms/accuracy_control/algorithm.py index c9121360ef6..a04575d4fb8 100644 --- a/nncf/quantization/algorithms/accuracy_control/algorithm.py +++ b/nncf/quantization/algorithms/accuracy_control/algorithm.py @@ -9,9 +9,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import operator import sys -from typing import Any, Callable, Iterable, List, TypeVar +from typing import Iterable, List, Optional, Tuple, TypeVar from nncf.common.factory import NNCFGraphFactory from nncf.common.graph import NNCFGraph @@ -21,15 +20,21 @@ from nncf.common.quantization.quantizer_removal import revert_operations_to_floating_point_precision from nncf.common.utils.backend import BackendType from nncf.common.utils.backend import get_backend +from nncf.common.utils.os import get_available_cpu_count +from nncf.common.utils.os import get_available_memory_amount +from nncf.common.utils.os import is_windows from nncf.data.dataset import Dataset from nncf.parameters import DropType from nncf.quantization.algorithms.accuracy_control.backend import AccuracyControlAlgoBackend -from nncf.quantization.algorithms.accuracy_control.rank_functions import normalized_mse -from nncf.quantization.algorithms.accuracy_control.ranker import LogitsBasedRanker -from nncf.quantization.algorithms.accuracy_control.ranker import MetricBasedRanker +from nncf.quantization.algorithms.accuracy_control.evaluator import Evaluator +from nncf.quantization.algorithms.accuracy_control.evaluator import MetricResults from nncf.quantization.algorithms.accuracy_control.ranker import Ranker TModel = TypeVar("TModel") +TTensor = TypeVar("TTensor") +PREPARATION_MODEL_THRESHOLD = 1 +OVERHEAD_COEFFICIENT = 2 +MEMORY_INCREASE_COEFFICIENT = 4 def get_algo_backend(backend: BackendType) -> AccuracyControlAlgoBackend: @@ -45,7 +50,7 @@ def get_algo_backend(backend: BackendType) -> AccuracyControlAlgoBackend: return OVAccuracyControlAlgoBackend() raise RuntimeError( - "Cannot create the backend for the accuracy control algorithm " f"because {backend} is not supported." + f"Cannot create the backend for the accuracy control algorithm because {backend} is not supported." ) @@ -54,6 +59,41 @@ def _create_message(nodes: Iterable[NNCFNode]) -> str: return "\n".join(names) +def calculate_accuracy_drop( + initial_metric: float, quantized_metric: float, max_drop: float, drop_type: DropType +) -> Tuple[bool, Optional[float]]: + """ + Calculates accuracy drop and termination boolean flag. + + :param initial_metric: Metric value for initial model. + :param quantized_metric: Metric value for quantized model. + :param max_drop: Maximum accuracy drop that should be achieved. + :param drop_type: Accuracy drop type. + :return: A tuple (should_terminate, accuracy_drop) where: + - should_terminate: Whether the algorithm should terminate or not. + - accuracy_drop: Accuracy drop value. + """ + should_terminate = None + accuracy_drop = None + + if quantized_metric >= initial_metric: + drop_values_by_drop_type = { + DropType.RELATIVE: None, + DropType.ABSOLUTE: initial_metric - quantized_metric, + } + accuracy_drop = drop_values_by_drop_type[drop_type] + should_terminate = True + else: + drop_values_by_drop_type = { + DropType.RELATIVE: abs(1 - quantized_metric / initial_metric), + DropType.ABSOLUTE: initial_metric - quantized_metric, + } + accuracy_drop = drop_values_by_drop_type[drop_type] + should_terminate = accuracy_drop <= max_drop + + return should_terminate, accuracy_drop + + class QuantizationAccuracyRestorerReport: """ Contains execution information about accuracy-aware algorithm. @@ -105,6 +145,7 @@ def __init__( max_num_iterations: int = sys.maxsize, max_drop: float = 0.01, drop_type: DropType = DropType.ABSOLUTE, + num_ranking_processes: Optional[int] = None, ): """ :param ranking_subset_size: The number of data items that will be selected from @@ -114,55 +155,110 @@ def __init__( :param drop_type: The accuracy drop type, which determines how the maximum accuracy drop between the original model and the compressed model is calculated. + :param num_ranking_processes: The number of parallel processes that are used to rank + quantization operations. """ self.ranking_subset_size = ranking_subset_size self.max_num_iterations = max_num_iterations self.max_drop = max_drop self.drop_type = drop_type - def restore_accuracy( + if is_windows(): + self.num_ranking_processes = 1 + if num_ranking_processes is not None and num_ranking_processes > 1: + nncf_logger.info( + "Number of parallel processes to rank quantized operations > 1 is not supported on Windows OS. " + "num_ranking_processes = 1 will be used." + ) + else: + self.num_ranking_processes = num_ranking_processes + + def apply( self, initial_model: TModel, - initial_metric: float, + initial_metric_results: MetricResults, quantized_model: TModel, - quantized_metric: float, + quantized_metric_results: MetricResults, validation_dataset: Dataset, - validation_fn: Callable[[Any, Iterable[Any]], float], + validation_dataset_size: int, + evaluator: Evaluator, ) -> TModel: """ Restores the accuracy of the quantized model by removing the groups of quantizers that contribute the most to the drop in accuracy. :param initial_model: Initial model (not quantized). - :param initial_metric: Metric value for initial model. + :param initial_metric_results: Initial model metrics. :param quantized_model: Quantized model. - :param quantized_metric: Metric value for quantized model. + :param quantized_metric_results: Quantized model metrics. :param validation_dataset: A dataset for the validation process. - :param validation_fn: A validation function to validate the model. It should take - two argumets: - - `model`: model to be validate. - - `validation_dataset`: dataset that provides data items to - validate the provided model. - The function should return the value of the metric with the following meaning: - A higher value corresponds to better performance of the model. + :param validation_dataset_size: Validation dataset size. + :param evaluator: The instance of `Evaluator` to validate model and collect values + for each item from dataset. :return: The quantized model whose metric `final_metric` is satisfied the maximum accuracy drop condition. """ - backend = get_backend(initial_model) - algo_backend = get_algo_backend(backend) + algo_backend = get_algo_backend(get_backend(initial_model)) - accuracy_drop = self.calculate_accuracy_drop(initial_metric, quantized_metric) - nncf_logger.info(f"Accuracy drop: {accuracy_drop} ({self.drop_type})") + should_terminate, accuracy_drop = calculate_accuracy_drop( + initial_metric_results.metric_value, quantized_metric_results.metric_value, self.max_drop, self.drop_type + ) - if accuracy_drop <= self.max_drop: + if should_terminate: + QuantizationAccuracyRestorer._print_completion_message(accuracy_drop, self.drop_type) return quantized_model + nncf_logger.info(f"Accuracy drop: {accuracy_drop} ({self.drop_type})") + + # Accuracy drop is greater than the maximum drop so we need to restore accuracy + return self._apply( + initial_model, + initial_metric_results, + quantized_model, + quantized_metric_results, + validation_dataset, + validation_dataset_size, + evaluator, + accuracy_drop, + algo_backend, + ) + + def _apply( + self, + initial_model: TModel, + initial_metric_results: MetricResults, + quantized_model: TModel, + quantized_metric_results: MetricResults, + validation_dataset: Dataset, + validation_dataset_size: int, + evaluator: Evaluator, + accuracy_drop: float, + algo_backend: AccuracyControlAlgoBackend, + ) -> TModel: + """ + An internal function that implements an iterative approach to restoring the accuracy of + the quantized model by removing the groups of quantizers that contribute the most to + the drop in accuracy. + + :param initial_model: Initial model (not quantized). + :param initial_metric_results: Initial model metrics. + :param quantized_model: Quantized model. + :param quantized_metric_results: Quantized model metrics. + :param validation_dataset: A dataset for the validation process. + :param validation_dataset_size: Validation dataset size. + :param evaluator: The instance of `Evaluator` to validate model and collect values + for each item from dataset. + :param accuracy_drop: Accuracy drop between initial and quantized models. + :param algo_backend: The `AccuracyControlAlgoBackend` algo backend. + :return: The quantized model whose metric `final_metric` is satisfied + the maximum accuracy drop condition. + """ initial_model_graph = NNCFGraphFactory.create(initial_model) quantized_model_graph = NNCFGraphFactory.create(quantized_model) # Collect original biases and weights because these values are # required to undo bias correction and weight correction. - # Store this data inside the `node.data` dictionary. + # Store this data inside the `node.attributes` dictionary. # This data will be used in the `revert_operations_to_floating_point_precision()` method. QuantizationAccuracyRestorer._collect_original_biases_and_weights( initial_model_graph, quantized_model_graph, initial_model, algo_backend @@ -175,18 +271,34 @@ def restore_accuracy( ) nncf_logger.info(f"Total number of quantized operations in the model: {report.num_quantized_operations}") - nncf_logger.info("Ranking groups of quantizers was started") - ranker = QuantizationAccuracyRestorer._create_ranker( - initial_model, validation_fn, validation_dataset, self.ranking_subset_size, algo_backend - ) + # Calculate number of parallel processes for Ranker + num_ranking_processes = self.num_ranking_processes + if num_ranking_processes is None: + model_size = algo_backend.get_model_size(quantized_model) + num_ranking_processes = self._calculate_number_ranker_parallel_proc( + model_size, + quantized_metric_results.preparation_time, + quantized_metric_results.validation_time, + validation_dataset_size, + ) + + nncf_logger.info(f"Number of parallel processes to rank quantized operations: {num_ranking_processes}") + + ranker = Ranker(self.ranking_subset_size, validation_dataset, algo_backend, evaluator, num_ranking_processes) groups_to_rank = ranker.find_groups_of_quantizers_to_rank(quantized_model_graph) ranked_groups = ranker.rank_groups_of_quantizers( - groups_to_rank, initial_model, quantized_model, quantized_model_graph + groups_to_rank, + quantized_model, + quantized_model_graph, + initial_metric_results.values_for_each_item, + quantized_metric_results.values_for_each_item, ) previous_model = quantized_model + previous_approximate_values_for_each_item = quantized_metric_results.values_for_each_item previous_accuracy_drop = accuracy_drop current_model = None + current_approximate_values_for_each_item = None current_accuracy_drop = None is_step_back = True @@ -212,12 +324,12 @@ def restore_accuracy( ) # Calculate drop for new quantization scope. - current_metric = validation_fn( - algo_backend.prepare_for_inference(current_model), validation_dataset.get_data() + current_metric, current_approximate_values_for_each_item = evaluator.validate( + current_model, validation_dataset ) - current_accuracy_drop = self.calculate_accuracy_drop(initial_metric, current_metric) - nncf_logger.info( - f"Accuracy drop with the new quantization scope is {float(current_accuracy_drop)} ({self.drop_type})" + + should_terminate, current_accuracy_drop = calculate_accuracy_drop( + initial_metric_results.metric_value, current_metric, self.max_drop, self.drop_type ) if not ranked_groups: @@ -229,12 +341,17 @@ def restore_accuracy( break # Accuracy was restored to the acceptable drop. - if current_accuracy_drop <= self.max_drop: + if should_terminate: report.reached_required_drop = True + QuantizationAccuracyRestorer._print_completion_message(current_accuracy_drop, self.drop_type) break + nncf_logger.info( + f"Accuracy drop with the new quantization scope is {float(current_accuracy_drop)} ({self.drop_type})" + ) + # Continue greedy quantizer remove - if self.max_drop < current_accuracy_drop <= previous_accuracy_drop or ( + if current_accuracy_drop <= previous_accuracy_drop or ( current_accuracy_drop > previous_accuracy_drop and is_step_back ): is_step_back = False @@ -243,6 +360,7 @@ def restore_accuracy( if current_accuracy_drop > previous_accuracy_drop: current_model = previous_model + current_approximate_values_for_each_item = previous_approximate_values_for_each_item report.removed_groups.pop() ranked_groups.append(current_group) is_step_back = True @@ -250,8 +368,17 @@ def restore_accuracy( previous_accuracy_drop = current_accuracy_drop nncf_logger.info("Re-calculating ranking scores for remaining groups") + if current_approximate_values_for_each_item is None: + current_approximate_values_for_each_item = evaluator.collect_values_for_each_item( + current_model, validation_dataset + ) + ranked_groups = ranker.rank_groups_of_quantizers( - ranked_groups, initial_model, current_model, quantized_model_graph + ranked_groups, + current_model, + quantized_model_graph, + initial_metric_results.values_for_each_item, + current_approximate_values_for_each_item, ) report.num_iterations = iteration @@ -259,22 +386,40 @@ def restore_accuracy( return current_model - def calculate_accuracy_drop(self, initial_metric, quantized_metric): + def _calculate_number_ranker_parallel_proc( + self, + model_size: int, + preparation_time: float, + validation_time: float, + validation_dataset_size: int, + ) -> int: """ - Calculates accuracy drop. + Calculate the number of parallel ranker processes - :param initial_metric: Metric value for initial model. - :param quantized_metric: Metric value for quantized model. - :return: Accuracy drop value. + :param model_size: Target model size. + :param preparation_time: The time it takes to prepare the model. + :param validation_time: The time it takes to validate the model. + :param validation_dataset_size: Validation dataset size. + :return: The number of parallel ranker processes """ - if self.drop_type == DropType.ABSOLUTE: - accuracy_drop = initial_metric - quantized_metric - elif self.drop_type == DropType.RELATIVE: - accuracy_drop = 1 - quantized_metric / initial_metric - else: - raise ValueError(f"{self.drop_type} drop type is not supported.") + if preparation_time < PREPARATION_MODEL_THRESHOLD: + return 1 + + # Calculate the number of parallel processes needed to override model preparation and + # metric calculation on the ranking subset + ranking_time = validation_time * self.ranking_subset_size / validation_dataset_size + n_proc = max(round((preparation_time / ranking_time + 1) * OVERHEAD_COEFFICIENT), 2) - return accuracy_drop + # Apply limitation by number of CPU cores + n_cores = get_available_cpu_count(logical=True) + n_proc = max(min(n_proc, n_cores // 2), 1) + + # Apply limitation by memory + ram = get_available_memory_amount() + n_copies = ram // (model_size * MEMORY_INCREASE_COEFFICIENT) + n_proc = max(min(n_proc, n_copies - 1), 1) + + return n_proc @staticmethod def _collect_original_biases_and_weights( @@ -284,8 +429,8 @@ def _collect_original_biases_and_weights( algo_backend: AccuracyControlAlgoBackend, ) -> None: """ - Collects initial biases and weights and stores them inside the `node.data['original_bias']` and - `node.data['original_weight']` where `node` is a node from `quantized_model_graph`. + Collects initial biases and weights and stores them inside the `node.attributes['original_bias']` and + `node.attributes['original_weight']` where `node` is a node from `quantized_model_graph`. :param initial_model_graph: Graph for initial model. :param quantized_model_graph: Graph for quantized model. @@ -295,48 +440,14 @@ def _collect_original_biases_and_weights( for node in initial_model_graph.get_all_nodes(): if algo_backend.is_node_with_bias(node, initial_model_graph): node_with_bias = quantized_model_graph.get_node_by_name(node.node_name) - node_with_bias.data["original_bias"] = algo_backend.get_bias_value( + node_with_bias.attributes["original_bias"] = algo_backend.get_bias_value( node, initial_model_graph, initial_model ) if algo_backend.is_node_with_weight(node): node_with_weight = quantized_model_graph.get_node_by_name(node.node_name) for port_id in algo_backend.get_weight_tensor_port_ids(node_with_weight): weight = algo_backend.get_weight_value(node, initial_model, port_id) - node_with_weight.data[f"original_weight.{port_id}"] = weight - - @staticmethod - def _create_ranker( - initial_model: TModel, - validation_fn: Callable[[Any, Iterable[Any]], float], - validation_dataset: Dataset, - ranking_subset_size: int, - algo_backend: AccuracyControlAlgoBackend, - ) -> Ranker: - """ - Creates an instance of the `Ranker` class. - - :param initial_model: Initial model. - :param validation_fn: A validation function to validate the model. - :param validation_dataset: A dataset for the validation process. - :param ranking_subset_size: The number of data items that will be selected from - the dataset to rank groups of quantizers. - :param algo_backend: The `AccuracyControlAlgoBackend` algo backend. - :return: An instance of the `Ranker` class. - """ - # Check whether it is possible to calculate the metric for one data item. - # pylint: disable=W0703 - try: - _ = validation_fn(algo_backend.prepare_for_inference(initial_model), validation_dataset.get_data([0])) - ranker = MetricBasedRanker( - ranking_subset_size, operator.sub, validation_dataset, algo_backend, validation_fn - ) - except Exception: - ranker = LogitsBasedRanker(ranking_subset_size, normalized_mse, validation_dataset, algo_backend) - nncf_logger.info( - f'The {"original" if isinstance(ranker, MetricBasedRanker) else "NMSE"} ' - "metric will be used to rank quantizers" - ) - return ranker + node_with_weight.attributes[f"original_weight.{port_id}"] = weight @staticmethod def _print_report(report: QuantizationAccuracyRestorerReport, max_num_iterations: int) -> None: @@ -359,3 +470,11 @@ def _print_report(report: QuantizationAccuracyRestorerReport, max_num_iterations "were reverted back to the floating-point precision:" f"\n{_create_message(report.reverted_operations)}" ) + + @staticmethod + def _print_completion_message(accuracy_drop: float, drop_type: DropType) -> None: + if accuracy_drop is None or accuracy_drop < 0: + reason = "metric of the quantized model is greater than the metric of the initial model" + else: + reason = f"achieved required accuracy drop {float(accuracy_drop)} ({drop_type})" + nncf_logger.info(f"Algorithm completed: {reason}") diff --git a/nncf/quantization/algorithms/accuracy_control/backend.py b/nncf/quantization/algorithms/accuracy_control/backend.py index 5d57d67065d..ef2ab709670 100644 --- a/nncf/quantization/algorithms/accuracy_control/backend.py +++ b/nncf/quantization/algorithms/accuracy_control/backend.py @@ -18,6 +18,20 @@ from nncf.common.graph.operator_metatypes import OperatorMetatype TModel = TypeVar("TModel") +TPModel = TypeVar("TPModel") + + +class AsyncPreparedModel(ABC): + @abstractmethod + def get(self, timeout) -> TPModel: + """ + Returns the prepared model for inference when it arrives. If timeout is not None and + the result does not arrive within timeout seconds then TimeoutError is raised. If + the remote call raised an exception then that exception will be reraised by get(). + + :param timeout: timeout + :return: A prepared model for inference + """ class AccuracyControlAlgoBackend(ABC): @@ -127,14 +141,34 @@ def get_weight_tensor_port_ids(node: NNCFNode) -> List[Optional[int]]: :return: Weights input port indices. """ + @staticmethod + @abstractmethod + def get_model_size(model: TModel) -> int: + """ + Returns model size + + :param model: A model + :return: Model size (in bytes) + """ + # Preparation of model @staticmethod @abstractmethod - def prepare_for_inference(model: TModel) -> Any: + def prepare_for_inference(model: TModel) -> TPModel: """ Prepares model for inference. :param model: A model that should be prepared. - :retunr: Prepared model for inference. + :return: Prepared model for inference. + """ + + @staticmethod + @abstractmethod + def prepare_for_inference_async(model: TModel) -> AsyncPreparedModel: + """ + Prepares model for inference asynchronously. + + :param model: A model that should be prepared. + :return: AsyncPreparedModel opbject. """ diff --git a/nncf/quantization/algorithms/accuracy_control/evaluator.py b/nncf/quantization/algorithms/accuracy_control/evaluator.py new file mode 100644 index 00000000000..71f59153b98 --- /dev/null +++ b/nncf/quantization/algorithms/accuracy_control/evaluator.py @@ -0,0 +1,340 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dataclasses import dataclass +from typing import Any, Callable, Iterable, List, Optional, Tuple, TypeVar, Union + +from nncf.common.factory import EngineFactory +from nncf.common.logging import nncf_logger +from nncf.common.utils.backend import BackendType +from nncf.common.utils.backend import get_backend +from nncf.common.utils.timer import timer +from nncf.data.dataset import Dataset + +TModel = TypeVar("TModel") +TPModel = TypeVar("TPModel") +TTensor = TypeVar("TTensor") + + +class IterationCounter: + """ + A wrapper for counting the passed iterations of iterable objects. + """ + + def __init__(self, iterable): + self._iterable = iterable + self._num_iterations = 0 + + @property + def num_iterations(self) -> int: + return self._num_iterations + + def __iter__(self): + self._num_iterations = 0 + for x in self._iterable: + self._num_iterations += 1 + yield x + + +@dataclass +class MetricResults: + """ + Results of metrics collection. + + :param metric_value: Aggregated metric value. + :param values_for_each_item: Metric values for each data item. + :param preparation_time: Time that it takes to prepare model for validation. + :param validation_time: Time that it takes to validate model. + """ + + metric_value: float + values_for_each_item: Union[List[float], List[List[TTensor]]] + preparation_time: float + validation_time: float + + +class Evaluator: + """ + Evaluator encapsulates a logic to validate model and collect values for each item. + The value is either calculated metric or model output. This is determined by the + `Evaluator.is_metric_mode()` method. + """ + + def __init__( + self, validation_fn: Callable[[Any, Iterable[Any]], Tuple[float, Union[None, List[float], List[List[TTensor]]]]] + ): + """ + :param validation_fn: Validation function to validate model. + """ + self._validation_fn = validation_fn + self._metric_mode = None + self._num_passed_iterations = 0 + self._enable_iteration_count = False + + @property + def num_passed_iterations(self) -> int: + """ + Number of passed iterations during last validation process if the iteration count is enabled. + + :return: Number of passed iterations during last validation process. + """ + + return self._num_passed_iterations + + def enable_iteration_count(self) -> None: + """ + Enable the iteration count. + """ + self._enable_iteration_count = True + + def disable_iteration_count(self) -> None: + """ + Disable the iteration count. + """ + self._enable_iteration_count = False + + def is_metric_mode(self) -> bool: + """ + Returns mode of `Evaluator`. + + :return: A boolean indicator where `True` means that the `Evaluator` collects + metric value for each item and `False` means that the `Evaluator` collects + logits for each item. + """ + return self._metric_mode + + def prepare_model_for_inference(self, model: TModel) -> TPModel: + """ + Prepares model for inference. + + :param model: A model that should be prepared. + :return: Prepared model for inference. + """ + backend = get_backend(model) + + if backend == BackendType.OPENVINO: + import openvino.runtime as ov + + return ov.compile_model(model) + + raise NotImplementedError( + f"The `prepare_model_for_inference()` method is not implemented for the {backend} backend." + ) + + def validate_model_for_inference( + self, model_for_inference: TPModel, dataset: Dataset, indices: Optional[List[int]] = None + ): + """ + Validates prepared model for inference. + + :param model: Prepared model to validate. + :param dataset: Dataset to validate the model. + :param indices: Zero-based indices of data items that should be selected from + the dataset. + :return: A tuple (metric_value, values_for_each_item) where + - metric_values: This is a metric for the model. + - values_for_each_item: If the `Evaluator.is_metric_mode()` condition is true, + then `values_for_each_item` represents the list of metric value for each item. + Otherwise, if the condition is false, it represents list of logits for each + item. + """ + if self._metric_mode is None: + self._metric_mode = Evaluator.determine_mode(model_for_inference, dataset, self._validation_fn) + + if not self.is_metric_mode() and indices is not None: + raise ValueError("The `indices` parameter can be used only if Evaluator.is_metric_mode() = True") + + validation_dataset = dataset.get_data(indices) + if self._enable_iteration_count: + validation_dataset = IterationCounter(validation_dataset) + + metric, values_for_each_item = self._validation_fn(model_for_inference, validation_dataset) + + self._num_passed_iterations = validation_dataset.num_iterations if self._enable_iteration_count else 0 + + if self.is_metric_mode() and values_for_each_item is not None: + # This casting is necessary to cover the following cases: + # - np.array(1.0, dtype=np.float32) + # - np.array([1.0], dtype=np.float32) + # - torch.tensor(1.0, dtype=torch.float32) + # - torch.tensor([1.0], dtype=torch.float32) + # - tf.constant(1.0, dtype=tf.float32 + # - tf.constant([1.0], dtype=tf.float32) + values_for_each_item = [float(x) for x in values_for_each_item] + + return float(metric), values_for_each_item + + def validate( + self, model: TModel, dataset: Dataset, indices: Optional[List[int]] = None + ) -> Tuple[float, Union[None, List[float], List[List[TTensor]]]]: + """ + Validates model. + + :param model: Model to validate. + :param dataset: Dataset to validate the model. + :param indices: Zero-based indices of data items that should be selected from + the dataset. + :return: A tuple (metric_value, values_for_each_item) where + - metric_values: This is a metric for the model. + - values_for_each_item: If the `Evaluator.is_metric_mode()` condition is true, + then `values_for_each_item` represents the list of metric value for each item. + Otherwise, if the condition is false, it represents list of logits for each + item. + """ + model_for_inference = self.prepare_model_for_inference(model) + return self.validate_model_for_inference(model_for_inference, dataset, indices) + + @staticmethod + def determine_mode( + model_for_inference: TPModel, + dataset: Dataset, + validation_fn: Callable[[Any, Iterable[Any]], Tuple[float, Union[None, List[float], List[List[TTensor]]]]], + ) -> bool: + """ + Determines mode based on the type of returned value from the + validation function. + + :param model_for_inference: Model to validate. + :param dataset: Dataset to validate the model. + :param validation_fn: Validation function to validate model. + :return: A boolean indicator where `True` means that the `Evaluator` collects + metric value for each item and `False` means that the `Evaluator` collects + logits for each item. + """ + metric_mode = None + + data_item = dataset.get_data([0]) + # pylint: disable=W0703 + try: + metric_value, values_for_each_item = validation_fn(model_for_inference, data_item) + except Exception: + metric_mode = False + + if metric_mode is not None: + return metric_mode + + try: + metric_value = metric_value if metric_value is None else float(metric_value) + except Exception as ex: + raise RuntimeError( + f"Metric value of {type(metric_value)} type was returned from the `validation_fn` " + "but the float value is expected." + ) from ex + + convert_to_float_possible = True + if values_for_each_item is not None: + # pylint: disable=W0703 + try: + _ = float(values_for_each_item[0]) + except Exception: + convert_to_float_possible = False + + # Analyze `metric_value` and `values_for_each_item` values: + # +--------------+----------------------+-------------+ + # | metric_value | values_for_each_item | metric_mode | + # +--------------+----------------------+-------------+ + # | float | None | True | + # +--------------+----------------------+-------------+ + # | float | List[float] | True | + # +--------------+----------------------+-------------+ + # | float | List[List[TTensor]] | False | + # +--------------+----------------------+-------------+ + # | None | None | False | + # +--------------+----------------------+-------------+ + # | None | List[float] | UNEXPECTED | + # +--------------+----------------------+-------------+ + # | None | List[List[TTensor]] | False | + # +--------------+----------------------+-------------+ + + metric_mode = False + if isinstance(metric_value, float) and (values_for_each_item is None or convert_to_float_possible): + metric_mode = True + elif values_for_each_item is not None and not isinstance(values_for_each_item[0], list): + raise RuntimeError("Unexpected return value from provided validation function.") + + return metric_mode + + def collect_values_for_each_item_using_model_for_inference( + self, model_for_inference: TPModel, dataset: Dataset, indices: Optional[List[int]] = None + ) -> Union[List[float], List[List[TTensor]]]: + """ + Collects value for each item from the dataset using prepared model for inference. + If `is_metric_mode()` returns `True` then i-th value is a metric for i-th data item. + It is an output of the model for i-th data item otherwise. + + :param model: Model to infer. + :param dataset: Dataset to collect values. + :param indices: The zero-based indices of data items that should be selected from + the dataset. + :return: Collected values. + """ + if self._metric_mode: + # Collect metrics for each item + values_for_each_item = [ + self._validation_fn(model_for_inference, [data_item])[0] for data_item in dataset.get_data(indices) + ] + else: + # Collect outputs for each item + engine = EngineFactory.create(model_for_inference) + + values_for_each_item = [] + for data_item in dataset.get_inference_data(indices): + logits = engine.infer(data_item) + values_for_each_item.append(list(logits.values())) + + self._num_passed_iterations = len(values_for_each_item) if self._enable_iteration_count else 0 + + return values_for_each_item + + def collect_values_for_each_item( + self, model: TModel, dataset: Dataset, indices: Optional[List[int]] = None + ) -> Union[List[float], List[List[TTensor]]]: + """ + Collects value for each item from the dataset. If `is_metric_mode()` + returns `True` then i-th value is a metric for i-th data item. It + is an output of the model for i-th data item otherwise. + + :param model: A target model. + :param dataset: Dataset to collect values. + :param indices: The zero-based indices of data items that should be selected from + the dataset. + :return: Collected values. + """ + model_for_inference = self.prepare_model_for_inference(model) + return self.collect_values_for_each_item_using_model_for_inference(model_for_inference, dataset, indices) + + def collect_metric_results(self, model: TModel, dataset: Dataset, model_name: str = "") -> MetricResults: + """ + Collects metric results. + + :param model: Input model. + :param dataset: Dataset used to collect metrics. + :param model_name: Model name. + :return: Collected metric results. + """ + nncf_logger.info(f"Validation of {model_name} model was started") + + with timer() as preparation_time: + model_for_inference = self.prepare_model_for_inference(model) + + with timer() as validation_time: + metric, values_for_each_item = self.validate_model_for_inference(model_for_inference, dataset) + + nncf_logger.info(f"Metric of {model_name} model: {metric}") + + if values_for_each_item is None: + nncf_logger.info(f"Collecting values for each data item using the {model_name} model") + with timer(): + values_for_each_item = self.collect_values_for_each_item_using_model_for_inference( + model_for_inference, dataset + ) + + return MetricResults(metric, values_for_each_item, preparation_time(), validation_time()) diff --git a/nncf/quantization/algorithms/accuracy_control/openvino_backend.py b/nncf/quantization/algorithms/accuracy_control/openvino_backend.py index b07048aef91..29f99bdf992 100644 --- a/nncf/quantization/algorithms/accuracy_control/openvino_backend.py +++ b/nncf/quantization/algorithms/accuracy_control/openvino_backend.py @@ -9,6 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import multiprocessing from typing import Any, List, Optional import numpy as np @@ -16,18 +17,39 @@ from nncf.common.graph import NNCFGraph from nncf.common.graph import NNCFNode +from nncf.openvino.graph.layer_attributes import OVLayerAttributes from nncf.openvino.graph.metatypes.common import CONSTANT_OPERATIONS from nncf.openvino.graph.metatypes.common import FAKE_QUANTIZE_OPERATIONS from nncf.openvino.graph.metatypes.common import QUANTIZABLE_OPERATIONS from nncf.openvino.graph.metatypes.common import QUANTIZE_AGNOSTIC_OPERATIONS from nncf.openvino.graph.metatypes.common import SHAPEOF_OPERATIONS from nncf.openvino.graph.metatypes.openvino_metatypes import GENERAL_WEIGHT_LAYER_METATYPES +from nncf.openvino.graph.metatypes.openvino_metatypes import OVConcatMetatype from nncf.openvino.graph.metatypes.openvino_metatypes import OVOpMetatype -from nncf.openvino.graph.nncf_graph_builder import OVConstantLayerAttributes from nncf.openvino.graph.node_utils import get_bias_value from nncf.openvino.graph.node_utils import get_weight_value from nncf.openvino.graph.node_utils import is_node_with_bias from nncf.quantization.algorithms.accuracy_control.backend import AccuracyControlAlgoBackend +from nncf.quantization.algorithms.accuracy_control.backend import AsyncPreparedModel + + +def compile_model(model: ov.Model, done_queue: multiprocessing.Queue) -> None: + compiled_model = ov.Core().compile_model(model, "CPU") + model_stream = compiled_model.export_model() + done_queue.put(model_stream) + + +class OVAsyncPreparedModel(AsyncPreparedModel): + def __init__(self, proc: multiprocessing.Process, done_queue: multiprocessing.Queue): + self.proc = proc + self.done_queue = done_queue + + def get(self, timeout=None) -> ov.CompiledModel: + try: + model_stream = self.done_queue.get(timeout=timeout) + except multiprocessing.TimeoutError as ex: + raise TimeoutError() from ex + return ov.Core().import_model(model_stream, "CPU") class OVAccuracyControlAlgoBackend(AccuracyControlAlgoBackend): @@ -51,7 +73,7 @@ def get_quantizable_metatypes() -> List[OVOpMetatype]: @staticmethod def get_quantize_agnostic_metatypes() -> List[OVOpMetatype]: - return QUANTIZE_AGNOSTIC_OPERATIONS + return QUANTIZE_AGNOSTIC_OPERATIONS + [OVConcatMetatype] @staticmethod def get_shapeof_metatypes() -> List[OVOpMetatype]: @@ -65,9 +87,7 @@ def is_node_with_bias(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: @staticmethod def is_node_with_weight(node: NNCFNode) -> bool: - return node.metatype in GENERAL_WEIGHT_LAYER_METATYPES and isinstance( - node.layer_attributes, OVConstantLayerAttributes - ) + return node.metatype in GENERAL_WEIGHT_LAYER_METATYPES and isinstance(node.layer_attributes, OVLayerAttributes) @staticmethod def get_bias_value(node_with_bias: NNCFNode, nncf_graph: NNCFGraph, model: ov.Model) -> np.ndarray: @@ -81,8 +101,24 @@ def get_weight_value(node_with_weight: NNCFNode, model: ov.Model, port_id: int) def get_weight_tensor_port_ids(node: NNCFNode) -> List[Optional[int]]: return node.layer_attributes.get_const_port_ids() + @staticmethod + def get_model_size(model: ov.Model) -> int: + model_size = 0 + for op in model.get_ops(): + if op.get_type_name() == "Constant": + model_size += op.data.nbytes + + return model_size + # Preparation of model @staticmethod def prepare_for_inference(model: ov.Model) -> Any: return ov.compile_model(model) + + @staticmethod + def prepare_for_inference_async(model: ov.Model) -> Any: + queue = multiprocessing.Queue() + p = multiprocessing.Process(target=compile_model, args=(model, queue)) + p.start() + return OVAsyncPreparedModel(p, queue) diff --git a/nncf/quantization/algorithms/accuracy_control/rank_functions.py b/nncf/quantization/algorithms/accuracy_control/rank_functions.py index 9428bd9f91e..d0961236f0d 100644 --- a/nncf/quantization/algorithms/accuracy_control/rank_functions.py +++ b/nncf/quantization/algorithms/accuracy_control/rank_functions.py @@ -9,26 +9,43 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict +from typing import Callable, List, TypeVar import numpy as np +from nncf.common.utils.backend import BackendType -def normalized_mse(x_ref: Dict[str, np.ndarray], x_approx: Dict[str, np.ndarray]) -> float: +TTensor = TypeVar("TTensor") + + +def create_normalized_mse_func(backend: BackendType) -> Callable[[List[TTensor], List[TTensor]], float]: + """ + Factory method to create backend-specific implementation of the normalized_nmse. + + :param backend: A backend type. + :return: The backend-specific implementation of the normalized_nmse. + """ + if backend == BackendType.OPENVINO: + return normalized_mse + + raise RuntimeError(f"Could not create backend-specific implementation! {backend} backend is not supported!") + + +def normalized_mse(ref_outputs: List[np.ndarray], approx_outputs: List[np.ndarray]) -> float: """ - Calculates normalized mean square error between `x_ref` and `x_approx`. + Calculates normalized mean square error between `ref_outputs` and `approx_outputs`. The normalized mean square error is defined as NMSE(x_ref, x_approx) = MSE(x_ref, x_approx) / MSE(x_ref, 0) - :param x_ref: Dictionary of arrays. Represents the reference values. - :param x_approx: Dictionary of arrays. Represents the measured values. - :return: The normalized mean square error between `x_ref` and `x_approx`. + :param ref_outputs: Reference outputs. + :param approx_outputs: Approximate outputs. + :return: The normalized mean square error between `ref_outputs` and `approx_outputs`. """ metrics = [] - for output_name in x_ref: - error_flattened = (x_ref[output_name] - x_approx[output_name]).flatten() - x_ref_flattened = x_ref[output_name].flatten() + for x_ref, x_approx in zip(ref_outputs, approx_outputs): + error_flattened = (x_ref - x_approx).flatten() + x_ref_flattened = x_ref.flatten() nmse = np.dot(error_flattened, error_flattened) / np.dot(x_ref_flattened, x_ref_flattened) metrics.append(nmse) nmse = sum(metrics) / len(metrics) diff --git a/nncf/quantization/algorithms/accuracy_control/ranker.py b/nncf/quantization/algorithms/accuracy_control/ranker.py index fcc8e34c526..97ed8a9d91e 100644 --- a/nncf/quantization/algorithms/accuracy_control/ranker.py +++ b/nncf/quantization/algorithms/accuracy_control/ranker.py @@ -10,50 +10,28 @@ # limitations under the License. import operator -from abc import ABC -from abc import abstractmethod from copy import deepcopy from dataclasses import dataclass -from typing import Any, Callable, Iterable, List, Optional, TypeVar +from typing import Any, Callable, List, Optional, TypeVar, Union -import numpy as np - -from nncf.common.factory import EngineFactory from nncf.common.graph import NNCFGraph from nncf.common.graph import NNCFNode from nncf.common.logging import nncf_logger from nncf.common.quantization.quantizer_removal import find_quantizer_nodes_to_cut from nncf.common.quantization.quantizer_removal import revert_operations_to_floating_point_precision +from nncf.common.utils.backend import BackendType +from nncf.common.utils.backend import get_backend from nncf.common.utils.timer import timer from nncf.data.dataset import Dataset from nncf.quantization.algorithms.accuracy_control.backend import AccuracyControlAlgoBackend +from nncf.quantization.algorithms.accuracy_control.evaluator import Evaluator +from nncf.quantization.algorithms.accuracy_control.rank_functions import create_normalized_mse_func +from nncf.quantization.algorithms.accuracy_control.subset_selection import select_subset from nncf.quantization.passes import remove_shapeof_subgraphs TModel = TypeVar("TModel") - - -def get_ranking_subset_indices(errors: List[float], ranking_subset_size: int) -> List[int]: - """ - Returns `ranking_subset_size` indices of elements in the `errors` list - that have the biggest error value. Returned indices are sorted in - ascending order. - - :param errors: A list of errors. - :param ranking_subset_size: A number of returned indices. - :return: Indices of elements in the `errors` list which have the biggest error value. - """ - ordered_indices = [idx for idx, _ in sorted(enumerate(errors), key=operator.itemgetter(1), reverse=True)] - end_index = min(ranking_subset_size, len(ordered_indices)) - return sorted(ordered_indices[:end_index]) - - -def get_ranking_subset_indices_pot_version(errors: List[float], ranking_subset_size: int) -> List[int]: - """ - POT implementation of the `get_ranking_subset_indices()` method. - """ - ordered_indices = np.flip(np.argsort(errors)).tolist() - end_index = min(ranking_subset_size, len(ordered_indices)) - return sorted(ordered_indices[:end_index]) +TPModel = TypeVar("TPModel") +TTensor = TypeVar("TTensor") @dataclass @@ -70,7 +48,7 @@ class GroupToRank: operations: List[NNCFNode] -class Ranker(ABC): +class Ranker: """ Encapsulates logic to rank groups of quantizers. """ @@ -78,28 +56,29 @@ class Ranker(ABC): def __init__( self, ranking_subset_size: int, - ranking_fn: Callable[[Any, Any], float], dataset: Dataset, algo_backend: AccuracyControlAlgoBackend, + evaluator: Evaluator, + num_processes: int = 1, + ranking_fn: Optional[Callable[[Any, Any], float]] = None, ): """ :param ranking_subset_size: The number of data items that will be selected from the dataset to rank groups of quantizers. The `len(dataset)` data items will be selected if `ranking_subset_size` parameter is greater than the number of elements in the dataset. - :param ranking_fn: A function that compares values returned by - `_collect_values_for_each_item()` for initial and quantized models. :param dataset: Dataset for the ranking process. :param algo_backend: The `AccuracyControlAlgoBackend` algo backend. + :param evaluator: Evaluator to validate model. + :param ranking_fn: a function that compares values returned by + `Evaluator.collect_values_for_each_item()` method for initial and quantized model. """ self._ranking_subset_size = ranking_subset_size - self._ranking_fn = ranking_fn self._dataset = dataset self._algo_backend = algo_backend - # We don't need to re-calculate values for the initial model - # because they don't change. So use this attribute to store - # them to improve execution time. - self._ref_values = None + self._evaluator = evaluator + self._ranking_fn = ranking_fn + self._num_processes = num_processes def find_groups_of_quantizers_to_rank(self, quantized_model_graph: NNCFGraph) -> List[GroupToRank]: """ @@ -141,9 +120,10 @@ def find_groups_of_quantizers_to_rank(self, quantized_model_graph: NNCFGraph) -> def rank_groups_of_quantizers( self, groups_to_rank: List[GroupToRank], - initial_model: TModel, quantized_model: TModel, quantized_model_graph: NNCFGraph, + reference_values_for_each_item: Union[List[float], List[List[TTensor]]], + approximate_values_for_each_item: Union[List[float], List[List[TTensor]]], ) -> List[GroupToRank]: """ Ranks groups of quantizers by their contribution to accuracy drop. Returns a list of @@ -151,172 +131,145 @@ def rank_groups_of_quantizers( score i.e. its contribution to accuracy drop is the greatest. :param groups_to_rank: Groups of quantizers that should be ranked. - :param initial_model: Initial not quantized model. :param quantized_model: Quantized model. :param quantized_model_graph: NNCF graph for quantized model. + :param reference_values_for_each_item: List of reference values. + :param approximate_values_for_each_item: List of approximate values. :return: List of ranked groups of quantizers. """ - # See `Ranker.__init__()` to understand why we should do this. - if self._ref_values is None: - nncf_logger.info("Collecting metrics for each data item using an initial model") - with timer(): - self._ref_values = self._collect_values_for_each_item(initial_model, self._get_data_items()) - - nncf_logger.info("Collecting metrics for each data item using a quantized model") - with timer(): - approx_values = self._collect_values_for_each_item(quantized_model, self._get_data_items()) - - # Create a subset of data items that will be used to rank groups of quantizers. - scores = [self._ranking_fn(ref_val, approx_val) for ref_val, approx_val in zip(self._ref_values, approx_values)] - ranking_subset_indices = get_ranking_subset_indices_pot_version(scores, self._ranking_subset_size) - # TODO(andrey-churkin): The ranking subset size usually is small. So it is possible - # to save all ranking data items in memory and don't read them again. - ranking_data_items = self._get_data_items(ranking_subset_indices) + if self._ranking_fn is None: + self._ranking_fn = self._create_ranking_fn(get_backend(quantized_model)) + + ranking_subset_indices = select_subset( + self._ranking_subset_size, + reference_values_for_each_item, + approximate_values_for_each_item, + self._ranking_fn, + ) nncf_logger.info("Calculating ranking score for groups of quantizers") with timer(): # Calculate ranking score for groups of quantizers. - ranking_scores = [] # ranking_scores[i] is the ranking score for groups_to_rank[i] - for current_group in groups_to_rank: - modified_model = revert_operations_to_floating_point_precision( - current_group.operations, current_group.quantizers, quantized_model, quantized_model_graph + if self._num_processes > 1: + ranking_scores = self._multiprocessing_calculation_ranking_score( + quantized_model, + quantized_model_graph, + groups_to_rank, + ranking_subset_indices, + reference_values_for_each_item, ) - # Calculate the ranking score for the current group of quantizers. - ranking_score = self._calculate_ranking_score( - modified_model, ranking_data_items, ranking_subset_indices + + else: + ranking_scores = self._sequential_calculation_ranking_score( + quantized_model, + quantized_model_graph, + groups_to_rank, + ranking_subset_indices, + reference_values_for_each_item, ) - ranking_scores.append(float(ranking_score)) # Rank groups. ranked_groups = [group for _, group in sorted(zip(ranking_scores, groups_to_rank), key=operator.itemgetter(0))] return ranked_groups - @abstractmethod - def _get_data_items(self, indices: Optional[List[int]] = None) -> Iterable[Any]: - """ - Returns the data items used to validate the model and select the ranking dataset. + def _sequential_calculation_ranking_score( + self, + quantized_model: TModel, + quantized_model_graph: NNCFGraph, + groups_to_rank: List[GroupToRank], + ranking_subset_indices: List[int], + reference_values_for_each_item: Union[List[float], List[List[TTensor]]], + ): + ranking_scores = [] # ranking_scores[i] is the ranking score for groups_to_rank[i] + for current_group in groups_to_rank: + modified_model = revert_operations_to_floating_point_precision( + current_group.operations, current_group.quantizers, quantized_model, quantized_model_graph + ) - :param indices: The zero-based indices of data items that should be selected from - the data source. - :return: Data items. - """ + prepared_model = self._algo_backend.prepare_for_inference(modified_model) + ranking_score = self._calculate_ranking_score( + prepared_model, ranking_subset_indices, reference_values_for_each_item + ) + ranking_scores.append(float(ranking_score)) - @abstractmethod - def _collect_values_for_each_item(self, model: TModel, data_items: Iterable[Any]) -> List[Any]: - """ - Collects value for each item from `data_items`. A `value` is calculated using - model and data item. + return ranking_scores - :param model: Model. - :param data_items: Data items. - :return: Collected values. - """ + def _multiprocessing_calculation_ranking_score( + self, + quantized_model: TModel, + quantized_model_graph: NNCFGraph, + groups_to_rank: List[GroupToRank], + ranking_subset_indices: List[int], + reference_values_for_each_item: Union[List[float], List[List[TTensor]]], + ): + ranking_scores = [] # ranking_scores[i] is the ranking score for groups_to_rank[i] + prepared_model_queue = [] + for idx, current_group in enumerate(groups_to_rank): + modified_model = revert_operations_to_floating_point_precision( + current_group.operations, current_group.quantizers, quantized_model, quantized_model_graph + ) + + prepared_model_queue.append(self._algo_backend.prepare_for_inference_async(modified_model)) + + if idx >= (self._num_processes - 1): + prepared_model = prepared_model_queue.pop(0).get() + ranking_score = self._calculate_ranking_score( + prepared_model, ranking_subset_indices, reference_values_for_each_item + ) + ranking_scores.append(float(ranking_score)) + + for _ in range(self._num_processes - 1): + prepared_model = prepared_model_queue.pop(0).get() + ranking_score = self._calculate_ranking_score( + prepared_model, ranking_subset_indices, reference_values_for_each_item + ) + ranking_scores.append(float(ranking_score)) + + return ranking_scores - @abstractmethod def _calculate_ranking_score( - self, modified_model: TModel, ranking_data_items: Iterable[Any], ranking_subset_indices: List[int] + self, + prepared_model: TPModel, + ranking_subset_indices: List[int], + reference_values_for_each_item: Union[List[float], List[List[TTensor]]], ) -> float: """ Calculates the ranking score for the current group of quantizers. :param modified_model: Model from which the current group of quantizers was removed. - :param ranking_data_items: Data items for ranking score calculation. :param ranking_subset_indices: Indices of the `ranking_data_items` in the whole dataset. + :param reference_values_for_each_item: List of reference values. :return: The ranking score for the current group of quantizers. """ + if self._evaluator.is_metric_mode(): + # Calculate ranking score based on metric + ranking_score, _ = self._evaluator.validate_model_for_inference( + prepared_model, self._dataset, ranking_subset_indices + ) + else: + # Calculate ranking score based on differences in logits + approximate_outputs = self._evaluator.collect_values_for_each_item_using_model_for_inference( + prepared_model, self._dataset, ranking_subset_indices + ) + reference_outputs = [reference_values_for_each_item[i] for i in ranking_subset_indices] + errors = [self._ranking_fn(a, b) for a, b in zip(reference_outputs, approximate_outputs)] + ranking_score = sum(errors) / len(errors) - -class LogitsBasedRanker(Ranker): - """ - Encapsulates logic to rank groups of quantizers based on differences in logits. - """ - - def _get_data_items(self, indices: Optional[List[int]] = None) -> Iterable[Any]: - """ - Returns data items from which ranking dat - """ - return self._dataset.get_inference_data(indices) - - def _collect_values_for_each_item(self, model: TModel, data_items: Iterable[Any]) -> List[Any]: - """ - Infers `model` for each item from the `dataset` and returns collected logits. - - :param model: A model to be inferred. - :param data_items: Data items. - :return: A list that contains logits for each item from the dataset. - """ - engine = EngineFactory.create(model) - outputs = [engine.infer(data_item) for data_item in data_items] - return outputs - - def _calculate_ranking_score( - self, modified_model: TModel, ranking_data_items: Iterable[Any], ranking_subset_indices: List[int] - ) -> float: - approx_values_subset = self._collect_values_for_each_item(modified_model, ranking_data_items) - ref_values_subset = (self._ref_values[i] for i in ranking_subset_indices) - errors = [self._ranking_fn(a, b) for a, b in zip(ref_values_subset, approx_values_subset)] - ranking_score = sum(errors) / len(errors) return ranking_score - -class MetricBasedRanker(Ranker): - """ - Encapsulates logic to rank groups of quantizers based on differences in metric. - """ - - def __init__( - self, - ranking_subset_size: int, - ranking_fn: Callable[[Any, Any], float], - dataset: Dataset, - algo_backend: AccuracyControlAlgoBackend, - validation_fn: Callable[[Any, Iterable[Any]], float], - ): - """ - :param ranking_subset_size: The number of data items that will be selected from - the dataset to rank groups of quantizers. The `len(dataset)` data items will - be selected if `ranking_subset_size` parameter is greater than the number of - elements in the dataset. - :param ranking_fn: A function that compares values returned by - `_collect_values_for_each_item()` for initial and quantized models. - :param dataset: Dataset for the ranking process. - :param algo_backend: The `AccuracyControlAlgoBackend` algo backend. - :param validation_fn: A validation function to validate the model. - It should take two argumets: - - `model`: model to be validate. - - `validation_dataset`: dataset that provides data items to - validate the provided model. - The function should return the value of the metric with the following - meaning: A higher value corresponds to better performance of the model. + def _create_ranking_fn(self, backend: BackendType) -> Callable[[List[TTensor], List[TTensor]], float]: """ - super().__init__(ranking_subset_size, ranking_fn, dataset, algo_backend) - self._validation_fn = validation_fn + Creates ranking function. - def _get_data_items(self, indices: Optional[List[int]] = None) -> Iterable[Any]: - return self._dataset.get_data(indices) - - def _collect_values_for_each_item(self, model: TModel, data_items: Iterable[Any]) -> List[Any]: - """ - Calls `validation_fn` for each item from the `dataset` and returns collected metrics. - - :param model: The model to be inferred. - :param data_items: Data items. - :return: A list that contains a metric for each item from the dataset. + :return: The ranking function. """ - model_for_inference = self._algo_backend.prepare_for_inference(model) - - metrics = [] - for data_item in data_items: - value = self._validation_fn(model_for_inference, [data_item]) - metrics.append(value) - - return metrics - - def _calculate_ranking_score( - self, modified_model: TModel, ranking_data_items: Iterable[Any], ranking_subset_indices: List[int] - ) -> float: - ranking_score = self._validation_fn( - self._algo_backend.prepare_for_inference(modified_model), ranking_data_items - ) - return ranking_score + if self._evaluator.is_metric_mode(): + ranking_fn = operator.sub + metric_name = "ORIGINAL" + else: + ranking_fn = create_normalized_mse_func(backend) + metric_name = "NMSE" + nncf_logger.info(f"{metric_name} metric is used to rank quantizers") + + return ranking_fn diff --git a/nncf/quantization/algorithms/accuracy_control/subset_selection.py b/nncf/quantization/algorithms/accuracy_control/subset_selection.py new file mode 100644 index 00000000000..0715487f248 --- /dev/null +++ b/nncf/quantization/algorithms/accuracy_control/subset_selection.py @@ -0,0 +1,70 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import operator +from typing import Callable, List, TypeVar, Union + +import numpy as np + +TTensor = TypeVar("TTensor") + + +def get_subset_indices(errors: List[float], subset_size: int) -> List[int]: + """ + Returns `subset_size` indices of elements in the `errors` list + that have the biggest error value. Returned indices are sorted in + ascending order. + + :param errors: A list of errors. + :param subset_size: A number of returned indices. + :return: Indices of elements in the `errors` list which have the biggest error value. + """ + ordered_indices = [idx for idx, _ in sorted(enumerate(errors), key=operator.itemgetter(1), reverse=True)] + end_index = min(subset_size, len(ordered_indices)) + return sorted(ordered_indices[:end_index]) + + +def get_subset_indices_pot_version(errors: List[float], subset_size: int) -> List[int]: + """ + POT implementation of the `get_subset_indices()` method. + """ + ordered_indices = np.flip(np.argsort(errors)).tolist() + end_index = min(subset_size, len(ordered_indices)) + return sorted(ordered_indices[:end_index]) + + +def select_subset( + subset_size: int, + reference_values_for_each_item: Union[List[float], List[List[TTensor]]], + approximate_values_for_each_item: Union[List[float], List[List[TTensor]]], + error_fn: Callable[[Union[float, List[TTensor]], Union[float, List[TTensor]]], float], +) -> List[int]: + """ + Selects first `subset_size` indices of data items for which `error_fn` function gives maximal value. + Assumes that `reference_values_for_each_item` and `approximate_values_for_each_item` lists have same + number of items. + + :param subset_size: Number of indices that will be selected. The `len(reference_values_for_each_item)` + indices will be selected if `subset_size` parameter is greater than the number of elements in + the `reference_values_for_each_item`. + :param reference_values_for_each_item: List of reference values. + :param approximate_values_for_each_item: List of approximate values. + :param error_fn: A function used to calculate difference between `reference_values_for_each_item[i]` + and `approximate_values_for_each_item[i]` list. + :return: First `subset_size` indices of data items for which `error_fn` function gives maximal value. + """ + errors = [ + error_fn(ref_val, approx_val) + for ref_val, approx_val in zip(reference_values_for_each_item, approximate_values_for_each_item) + ] + subset_indices = get_subset_indices_pot_version(errors, subset_size) + + return subset_indices diff --git a/nncf/quantization/algorithms/algorithm.py b/nncf/quantization/algorithms/algorithm.py index cd6d511bc52..432f8024491 100644 --- a/nncf/quantization/algorithms/algorithm.py +++ b/nncf/quantization/algorithms/algorithm.py @@ -14,18 +14,13 @@ from typing import Dict, Optional, TypeVar from nncf import Dataset +from nncf.common.graph.graph import NNCFGraph from nncf.common.tensor_statistics.statistic_point import StatisticPointsContainer from nncf.common.utils.backend import BackendType TModel = TypeVar("TModel") -class AlgorithmParameters(ABC): - """ - Base class for Post-Training algorithm parameters. - """ - - class Algorithm(ABC): """ Base class for all Post-Training algorithms. @@ -35,43 +30,35 @@ class Algorithm(ABC): @abstractmethod def available_backends(self) -> Dict[str, BackendType]: """ - Returns dictionary of the avaliable backends for the algorithm + Returns dictionary of the available backends for the algorithm. - :return: Dict of backends supported by the algorithm + :return: Dict of backends supported by the algorithm. """ + @abstractmethod def apply( self, model: TModel, + graph: NNCFGraph, statistic_points: Optional[StatisticPointsContainer] = None, dataset: Optional[Dataset] = None, - ) -> TModel: - """ - Checks that statistic point exists, sets model into transformer - and applies the algorithm to the model. - :param model: model for applying algorithm - :param engine: engine for the model execution - :param statistic_points: StatisticPointsContainer - :return: model after algorithm - """ - if statistic_points is None: - return self._apply(model, statistic_points=None, dataset=dataset) - _statistic_points = self.get_statistic_points(model) - for edge_name in _statistic_points.keys(): - if statistic_points.get(edge_name) is None: - raise RuntimeError(f"No statistics collected for the layer {edge_name}") - return self._apply(model, statistic_points) - - @abstractmethod - def _apply( - self, model: TModel, statistic_points: StatisticPointsContainer, dataset: Optional[Dataset] = None ) -> TModel: """ Applies the algorithm to the model. + + :param model: Model for applying algorithm. + :param graph: Model graph. + :param statistic_points: Statistic points with collected statistics values. + :param dataset: A representative dataset for the calibration process. + :return: A resulting model. """ @abstractmethod - def get_statistic_points(self, model: TModel) -> StatisticPointsContainer: + def get_statistic_points(self, model: TModel, graph: NNCFGraph) -> StatisticPointsContainer: """ - Returns activation layers, for which StatisticsCollector should collect statistics. + Returns statistic points, for which StatisticsCollector should collect statistics. + + :param model: Model for statistics collection. + :param graph: Model graph. + :return: Statistic points, for which StatisticsCollector should collect statistics. """ diff --git a/nncf/quantization/algorithms/bias_correction/algorithm.py b/nncf/quantization/algorithms/bias_correction/algorithm.py index 7e988346b5c..221edd992de 100644 --- a/nncf/quantization/algorithms/bias_correction/algorithm.py +++ b/nncf/quantization/algorithms/bias_correction/algorithm.py @@ -9,8 +9,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import deque -from typing import Any, Dict, List, Optional, TypeVar +from collections import defaultdict +from typing import Any, Dict, List, Optional, Tuple, TypeVar import numpy as np from tqdm import tqdm @@ -22,6 +22,7 @@ from nncf.common.factory import NNCFGraphFactory from nncf.common.graph import NNCFGraph from nncf.common.graph import NNCFNode +from nncf.common.graph.definitions import NNCFGraphNodeType from nncf.common.graph.transformations.commands import TargetType from nncf.common.graph.transformations.commands import TransformationCommand from nncf.common.graph.transformations.layout import TransformationLayout @@ -36,6 +37,7 @@ TModel = TypeVar("TModel") BIAS_CORRECTION_THRESHOLD = 1000 +OUTPUT_PORT_OF_NODE = 0 class BiasCorrection(Algorithm): @@ -94,8 +96,9 @@ def __init__( self.backend_params = backend_params self.nncf_graph = None self._backend_entity = None - self._collected_stat_inputs = set() - self._fp_inputs = {} + self._collected_stat_inputs_map = {} + self._fp_inputs = defaultdict(list) + self._algorithm_key = f"BC_{hash(self)}" if self.apply_for_all_nodes: raise RuntimeError("BiasCorrection algorithm does not support apply_for_all_nodes=True yet") @@ -124,18 +127,21 @@ def _set_backend_entity(self, model: TModel) -> None: "Cannot return backend-specific entity because {} is not supported!".format(model_backend) ) - def _apply( + def apply( self, model: TModel, + graph: NNCFGraph, statistic_points: Optional[StatisticPointsContainer] = None, dataset: Optional[Dataset] = None, ) -> TModel: self._set_backend_entity(model) + model = self._backend_entity.insert_null_biases(model, graph) main_transformations_layout = TransformationLayout() main_model_transformer = ModelTransformerFactory.create(model) model_copy = copy_model(model) - model_copy = self._remove_fq_from_inputs(model_copy) + graph_copy = NNCFGraphFactory.create(model_copy) + model_copy = self._backend_entity.remove_fq_from_inputs(model_copy, graph_copy) nncf_graph = NNCFGraphFactory.create(model_copy) nodes_with_bias = [] @@ -144,10 +150,17 @@ def _apply( node, nncf_graph ): nodes_with_bias.append(node) + + # We pre-collect information about the subgraph we need in order + # to collect statistics for the change in the bias of each layer. + # Also here we collect a list of layers that depend on the current one. + + # The collected information contains lists of input and output layers, + # for which we will create a subgraph for inference and collection of statistics. subgraphs_data = [self._get_subgraph_data_for_node(node, nncf_graph) for node in nodes_with_bias] for position, (node, subgraph_data) in tqdm( - list(enumerate(zip(nodes_with_bias, subgraphs_data))), desc="Biases correction" + list(enumerate(zip(nodes_with_bias, subgraphs_data))), desc="Applying Bias correction" ): node_name = node.node_name @@ -155,11 +168,13 @@ def _apply( # the model transformer (that uses during sub-graph extraction) already does this internally when creating. model_copy_subgraph = self._prepare_subgraph(node, model_copy, nncf_graph, subgraph_data) + # Then we create the necessary data lists from the previously collected statistics, + # for the subgraph inference. feed_dicts = self._create_feed_dicts(model_copy_subgraph, subgraph_data, statistic_points) bias_shift = self._compute_bias_shift(node, model_copy_subgraph, feed_dicts, statistic_points) - current_bias = self._backend_entity.get_bias_value(node, model, nncf_graph) + current_bias = self._backend_entity.get_bias_value(node, model_copy, nncf_graph) channel_axis = node.metatype.output_channel_axis if current_bias.ndim > 1: @@ -181,40 +196,15 @@ def _apply( else: nncf_logger.debug(f"{node_name} bias skipped by threshold. Magnitude: {magnitude}") - self._collect_new_stats(nncf_graph, model_copy_subgraph, feed_dicts, subgraph_data) - self._remove_unnecessary_stats(position, subgraphs_data) - return main_model_transformer.transform(main_transformations_layout) - - def _remove_fq_from_inputs(self, model: TModel) -> TModel: - """ - This model removes the activation Fake Quantize nodes (or Quantize-Dequantize pairs) from the model. - It's needed for the further bias shift calculation that relates on quantized weights. - - :param model: Backend-specific model. - :return: Backend-specific model without activation Fake Quantize nodes (or Quantize-Dequantize pairs). - """ - transformation_layout = TransformationLayout() - nncf_graph = NNCFGraphFactory.create(model) - - model_transformer = ModelTransformerFactory.create(model) - - seen_nodes = [] - nodes_queue = deque(nncf_graph.get_input_nodes()) - while nodes_queue: - current_node = nodes_queue.popleft() - current_node_name = current_node.node_name - - if current_node_name in seen_nodes: - continue + # After collecting data to change the bias value, we need to collect statistics for subsequent nodes, + # but already take into account the bias update made earlier. + self._collect_new_stats(model_copy_subgraph, feed_dicts, subgraph_data) - seen_nodes.append(current_node_name) - if current_node.metatype in self._backend_entity.quantizer_types: - target_point = self._backend_entity.target_point(TargetType.LAYER, current_node_name, 0) - command = self._backend_entity.node_removing_command(target_point) - transformation_layout.register(command) - nodes_queue.extend(nncf_graph.get_next_nodes(current_node)) + # Also, we need to remove unnecessary statistics that we don't need anymore, + # to reduce memory usage during the algorithm's pipeline. + self._remove_unnecessary_stats(position, subgraphs_data) - return model_transformer.transform(transformation_layout) + return main_model_transformer.transform(main_transformations_layout) def _get_subgraph_data_for_node(self, node: NNCFNode, nncf_graph: NNCFGraph) -> Dict[str, List[str]]: """ @@ -222,44 +212,74 @@ def _get_subgraph_data_for_node(self, node: NNCFNode, nncf_graph: NNCFGraph) -> This data contains the nodes (NNCFNode) for the subgraph building and statistics collection (for the next step). - :param node: NNCFNode instance. This is the main node that with bias that would be corrected (or not). + :param node: NNCFNode instance. This is the main node with bias that would be corrected (or not). :param nncf_graph: NNCFGraph instance for graph analysis. :return: A dict with the list of the nodes for the subgraph input and statistics collection. """ - stats_nodes, input_nodes, output_nodes = [], [], [] + statistic_nodes, subgraph_input_nodes, subgraph_output_nodes, subgraph_output_ids = [], [], [], [] - def traverse_to_layers_with_bias(node, output): - if node in output: - return True, output - if self._backend_entity.is_node_with_bias(node, nncf_graph): - output.append(node) - self._collected_stat_inputs.add(node.node_name) - activation_input = nncf_graph.get_input_edges(node)[0].from_node - - output_nodes.append(activation_input) - return True, output - return False, output - - def traverse_to_input_layers(node, output): - if node in output + input_nodes: - return True, output - if node.node_name in self._collected_stat_inputs and node not in stats_nodes: - output.append(node) - return True, output - return False, output + def fill_statistic_nodes(node): + # A small hack to speed up graph traversal. + if node in statistic_nodes or node in visited_nodes: + return + visited_nodes.append(node) + # If we found a node with bias, we have to collect it as a statistic node, + # and its input for _collected_stat_inputs_map, + # which will be used during the collection of statistics for the next node. + if self._backend_entity.is_node_with_bias(node, nncf_graph) and self._backend_entity.is_quantized_weights( + node, nncf_graph + ): + statistic_nodes.append(node) + activation_node, output_port_id = self._get_activation_node_and_port(node, nncf_graph) + subgraph_output_nodes.append(activation_node) + + output_id = (activation_node.node_name, output_port_id) + subgraph_output_ids.append(output_id) + self._collected_stat_inputs_map[node.node_name] = output_id + return + + for next_node in nncf_graph.get_next_nodes(node): + fill_statistic_nodes(next_node) + + def fill_subgraph_input_nodes(node): + # A small hack to speed up graph traversal. + if node in subgraph_input_nodes or node in visited_nodes: + return + visited_nodes.append(node) + + # Since we need to find the inputs for the subgraph, + # we can take only those layers for which we have already collected statistics. + if node.node_name in self._collected_stat_inputs_map and node not in statistic_nodes: + subgraph_input_nodes.append(node) + return + + for previous_node in nncf_graph.get_previous_nodes(node): + fill_subgraph_input_nodes(previous_node) + + # First, we need to find out the nodes with bias that follow by main node. + # To collect statistics for next nodes. + visited_nodes = [] for next_node in nncf_graph.get_next_nodes(node): - stats_nodes.extend(nncf_graph.traverse_graph(next_node, traverse_to_layers_with_bias)) - - stats_nodes = stats_nodes if stats_nodes else nncf_graph.get_next_nodes(node) - for stat_node in stats_nodes: - input_nodes.extend(nncf_graph.traverse_graph(stat_node, traverse_to_input_layers, traverse_forward=False)) - - output_nodes = output_nodes if output_nodes else stats_nodes + fill_statistic_nodes(next_node) + + # We then need to find nodes for which statistics have already been collected, + # to use them as inputs for the subgraph. + statistic_nodes = statistic_nodes if statistic_nodes else nncf_graph.get_next_nodes(node) + visited_nodes = [] + for stat_node in statistic_nodes: + fill_subgraph_input_nodes(stat_node) + + # In case the outputs were not found during the collection of statistics nodes, + # we use the latter as the outputs of the subgraph. + subgraph_output_nodes = subgraph_output_nodes if subgraph_output_nodes else statistic_nodes + subgraph_output_names = [ + n.node_name for n in subgraph_output_nodes if NNCFGraphNodeType.OUTPUT_NODE not in n.node_name + ] subgraph_data = { - "input_node_names": [input_node.node_name for input_node in input_nodes], - "output_node_names": [n.node_name for n in output_nodes], - "statistic_node_names": [stat_node.node_name for stat_node in stats_nodes], + "subgraph_input_names": set(n.node_name for n in subgraph_input_nodes), + "subgraph_output_names": set(subgraph_output_names), + "subgraph_output_ids": set(subgraph_output_ids), } return subgraph_data @@ -269,19 +289,21 @@ def _prepare_subgraph(self, node: NNCFNode, model: TModel, nncf_graph: NNCFGraph This method prepares the subgraph from the model for the further inference. :param node: NNCFNode instance for the current layer. - :param model: Backend-specifig model instance. + :param model: Backend-specific model instance. :param nncf_graph: Instance of NNCFGraph. :param subgraph_data: A dictionary with the layers for the graph building. :return: Backend-specific subgraph extracted from the model. """ - input_node_names, output_node_names = subgraph_data["input_node_names"], subgraph_data["output_node_names"] - extracted_model = self.extract_model(model, input_node_names, output_node_names) + extracted_model = self.extract_model( + model, subgraph_data["subgraph_input_names"], subgraph_data["subgraph_output_names"] + ) transformation_layout = TransformationLayout() model_transformer = ModelTransformerFactory.create(extracted_model) - _, output_port_id = self._backend_entity.get_activation_port_ids_for_bias_node(node) + + # For layers with weights, there is only one output port - 0. statistic_point = self._backend_entity.target_point( - TargetType.POST_LAYER_OPERATION, node.node_name, output_port_id + TargetType.POST_LAYER_OPERATION, node.node_name, port_id=OUTPUT_PORT_OF_NODE ) output_insertion_command = self._backend_entity.output_insertion_command(nncf_graph, statistic_point) transformation_layout.register(output_insertion_command) @@ -291,7 +313,7 @@ def _create_feed_dicts( self, model: TModel, subgraph_data: Dict, statistic_points: StatisticPointsContainer ) -> List[Dict]: """ - Creates the list of the dictionaries that contains the input data for the model exection. + Creates the list of the dictionaries that contains the input data for the model execution. :param model: TModel instance. :param subgraph_data: A dictionary with the necessary data for current node. @@ -302,19 +324,21 @@ def _create_feed_dicts( statistics_size = self.subset_size statistics_per_input = {} - for input_node_name in subgraph_data["input_node_names"]: + for input_node_name in subgraph_data["subgraph_input_names"]: input_tensor_name = self._backend_entity.get_input_name(model, input_node_name) - input_fp = self._get_fp_inputs(statistic_points, input_node_name) + activation_name, port_id = self._collected_stat_inputs_map[input_node_name] + input_fp = self._get_fp_inputs(statistic_points, node_name=activation_name, port_id=port_id) statistics_per_input[input_tensor_name] = input_fp statistics_size = min(statistics_size, len(input_fp)) for stat_id in range(statistics_size): feed_dict = {} - for input_node_name in subgraph_data["input_node_names"]: + for input_node_name in subgraph_data["subgraph_input_names"]: input_tensor_name = self._backend_entity.get_input_name(model, input_node_name) - feed_dict[input_tensor_name] = np.mean( - statistics_per_input[input_tensor_name][stat_id], axis=0, keepdims=True - ) + # Since we do not use as inputs the layers from which the statistics are gathered, + # but those that follow them, we need to take this into account when creating feed dicts. + activation_name, port_id = self._collected_stat_inputs_map[input_node_name] + feed_dict[input_tensor_name] = statistics_per_input[input_tensor_name][stat_id] feed_dicts.append(feed_dict) return feed_dicts @@ -322,16 +346,16 @@ def _compute_bias_shift( self, node: NNCFNode, model: TModel, feed_dicts: List, statistic_points: StatisticPointsContainer ) -> np.ndarray: """ - Computes bias shift that will be used for the futher bias correction. + Computes bias shift that will be used for the further bias correction. :param node: NNCFNode instance, current layer. :param model: Backend-specific model. - :param feed_dicts: List of dictionaries with the input data for model execition. + :param feed_dicts: List of dictionaries with the input data for model execution. :param statistic_points: StatisticPointsContainer instance. :return: Calculated bias shift value. """ output_fp = self._get_fp_outputs(statistic_points, node.node_name) - output_tensor_name = self._backend_entity.get_output_name(model, node.node_name) + output_tensor_name = self._backend_entity.get_output_name(model, node.node_name, OUTPUT_PORT_OF_NODE) engine = EngineFactory.create(model) channel_axis = node.metatype.output_channel_axis q_outputs = [] @@ -339,6 +363,7 @@ def _compute_bias_shift( q_output = engine.infer(feed_dict) q_output = self._backend_entity.process_model_output(q_output, output_tensor_name) q_outputs.append(self._backend_entity.tensor_processor.mean_per_channel(q_output, channel_axis).tensor) + # Here we get the per-sample average, so the axis is 0. q_output = np.mean(q_outputs, axis=0) return output_fp - q_output @@ -358,7 +383,7 @@ def _get_bias_shift_magnitude(current_bias_value: np.ndarray, updated_bias_value def _correct_bias(self, model: TModel, bias_correction_command: TransformationCommand) -> TModel: """ - Returns the model (which can be represended as subgraph) with the updated bias value for the current layer. + Returns the model (which can be represented as subgraph) with the updated bias value for the current layer. :param model: Backend-specific model. :param bias_correction_command: TransformationCommand instance for the bias correction. @@ -369,12 +394,11 @@ def _correct_bias(self, model: TModel, bias_correction_command: TransformationCo transformation_layout.register(bias_correction_command) return model_transformer.transform(transformation_layout) - def _collect_new_stats(self, nncf_graph: NNCFGraph, model: TModel, feed_dicts: List, subgraph_data: Dict) -> None: + def _collect_new_stats(self, model: TModel, feed_dicts: List, subgraph_data: Dict) -> None: """ Updates the self._fp_inputs with the new statistics for the next layers after the correction of the bias for the current. - :param nncf_graph: NNCFGraph instance. :param model: Backend-specific subgraph. :param feed_dicts: List of dictionaries with the input data for the subgraph. :param subgraph_data: A dictionary with the needed list of the statistic nodes that will be updated. @@ -382,12 +406,9 @@ def _collect_new_stats(self, nncf_graph: NNCFGraph, model: TModel, feed_dicts: L engine = EngineFactory.create(model) for feed_dict in feed_dicts: new_q_output = engine.infer(feed_dict) - output_data = zip(subgraph_data["statistic_node_names"], subgraph_data["output_node_names"]) - for stat_node_name, output_node_name in output_data: - output_tensor_name = self._backend_entity.get_output_name(model, output_node_name) - if stat_node_name not in self._fp_inputs: - self._fp_inputs[stat_node_name] = [] - self._fp_inputs[stat_node_name].append(new_q_output[output_tensor_name]) + for output_node_name, output_id in subgraph_data["subgraph_output_ids"]: + output_tensor_name = self._backend_entity.get_output_name(model, output_node_name, output_id) + self._fp_inputs[(output_node_name, output_id)].append(new_q_output[output_tensor_name]) def _remove_unnecessary_stats(self, position: int, subgraphs_data: Dict[str, Dict]) -> None: """ @@ -400,39 +421,48 @@ def _remove_unnecessary_stats(self, position: int, subgraphs_data: Dict[str, Dic # Collects list of the statistics that needed for the future layers. needed_stats_list = [] for i in range(position + 1, len(subgraphs_data)): - needed_stats_list.extend(subgraphs_data[i]["input_node_names"]) + input_names = subgraphs_data[i]["subgraph_input_names"] + needed_stats_list.extend([self._collected_stat_inputs_map[name][0] for name in input_names]) - node_inputs_name = subgraphs_data[position]["input_node_names"] + node_inputs_name = subgraphs_data[position]["subgraph_input_names"] for node_input_name in node_inputs_name: - if node_input_name not in needed_stats_list and node_input_name in self._fp_inputs: - nncf_logger.debug(f"Dropped {node_input_name}") - self._fp_inputs[node_input_name] = [] + activation_name, port_id = self._collected_stat_inputs_map[node_input_name] + input_id = (activation_name, port_id) + if activation_name not in needed_stats_list and input_id in self._fp_inputs: + nncf_logger.debug(f"Dropped {activation_name} output statistics.") + self._fp_inputs[input_id] = [] - def _get_fp_inputs(self, statistic_points: StatisticPointsContainer, node_name: str) -> np.ndarray: + def _get_fp_inputs(self, statistic_points: StatisticPointsContainer, node_name: str, port_id: int) -> np.ndarray: """ Makes out pre-layer needed data from the floating-point collected statistics. :param statistic_points: Filled StatisticPointsContainer. :param node_name: Name of the current layer. + :param port_id: Port id for statistics collection. :return: Collected mean tensor data and shape for the further bias calculation. """ def input_filter_func(point): + # For the floating-point statistics collected in POST_LAYER style, + # we also need to determine the output port id. + # For the cases when the layer has more than one (0) output port. return ( - BiasCorrection in point.algorithm_to_tensor_collectors - and point.target_point.type == TargetType.PRE_LAYER_OPERATION + self._algorithm_key in point.algorithm_to_tensor_collectors + and point.target_point.type == TargetType.POST_LAYER_OPERATION + and point.target_point.port_id == port_id ) - if node_name in self._fp_inputs: - return self._fp_inputs[node_name] + input_id = (node_name, port_id) + if input_id in self._fp_inputs: + return self._fp_inputs[input_id] input_fp = [] for tensor_collector in statistic_points.get_algo_statistics_for_node( - node_name, input_filter_func, BiasCorrection + node_name, input_filter_func, self._algorithm_key ): input_fp.extend(tensor_collector.get_statistics().values) - self._fp_inputs[node_name] = input_fp - return self._fp_inputs[node_name] + self._fp_inputs[input_id] = input_fp + return self._fp_inputs[input_id] def _get_fp_outputs(self, statistic_points: StatisticPointsContainer, node_name: str) -> np.ndarray: """ @@ -445,98 +475,147 @@ def _get_fp_outputs(self, statistic_points: StatisticPointsContainer, node_name: def output_filter_func(point): return ( - BiasCorrection in point.algorithm_to_tensor_collectors + self._algorithm_key in point.algorithm_to_tensor_collectors and point.target_point.type == TargetType.POST_LAYER_OPERATION ) output_fp = [] for tensor_collector in statistic_points.get_algo_statistics_for_node( - node_name, output_filter_func, BiasCorrection + node_name, output_filter_func, self._algorithm_key ): output_fp.extend(tensor_collector.get_statistics().mean_values) return np.array(output_fp) - def get_statistic_points(self, model: TModel) -> StatisticPointsContainer: + def get_statistic_points(self, model: TModel, graph: NNCFGraph) -> StatisticPointsContainer: self._set_backend_entity(model) - model_copy = self._remove_fq_from_inputs(copy_model(model)) - nncf_graph = NNCFGraphFactory.create(model_copy) if self.nncf_graph is None else self.nncf_graph + model_copy = self._backend_entity.remove_fq_from_inputs(copy_model(model), graph) + graph_copy = NNCFGraphFactory.create(model_copy) + model_copy = self._backend_entity.insert_null_biases(model_copy, graph_copy) + nncf_graph = NNCFGraphFactory.create(model_copy) statistic_container = StatisticPointsContainer() nodes_with_bias = [ node for node in nncf_graph.topological_sort() if self._backend_entity.is_node_with_bias(node, nncf_graph) ] model_inputs = nncf_graph.get_input_nodes() - biased_after_input_nodes = self._get_biased_after_input_nodes(nncf_graph, model_inputs) + # Collection of statistics after layers where biases will be corrected. for node in nodes_with_bias: node_name = node.node_name channel_axis = node.metatype.output_channel_axis - input_port_id, output_port_id = self._backend_entity.get_activation_port_ids_for_bias_node(node) - if node_name in biased_after_input_nodes: - self._collected_stat_inputs.add(node_name) - statistic_point = self._backend_entity.target_point( - TargetType.PRE_LAYER_OPERATION, node_name, input_port_id - ) - stat_collector = self._backend_entity.batch_statistic_collector( - num_samples=self.subset_size, inplace=self.inplace_statistics - ) - statistic_container.add_statistic_point( - StatisticPoint( - target_point=statistic_point, tensor_collector=stat_collector, algorithm=BiasCorrection - ) - ) + + # For layers with weights, there is only one output port - 0. statistic_point = self._backend_entity.target_point( - TargetType.POST_LAYER_OPERATION, node_name, output_port_id + TargetType.POST_LAYER_OPERATION, node_name, port_id=OUTPUT_PORT_OF_NODE ) stat_collector = self._backend_entity.mean_statistic_collector( reduction_shape=channel_axis, num_samples=self.subset_size, inplace=self.inplace_statistics ) statistic_container.add_statistic_point( - StatisticPoint(target_point=statistic_point, tensor_collector=stat_collector, algorithm=BiasCorrection) + StatisticPoint( + target_point=statistic_point, tensor_collector=stat_collector, algorithm=self._algorithm_key + ) ) - for input_node in model_inputs: - for next_input_node in nncf_graph.get_next_nodes(input_node): - self._collected_stat_inputs.add(next_input_node.node_name) - statistic_point = self._backend_entity.target_point( - TargetType.PRE_LAYER_OPERATION, next_input_node.node_name, port_id=0 - ) - stat_collector = self._backend_entity.batch_statistic_collector( - num_samples=self.subset_size, inplace=self.inplace_statistics + # We must collect the nodes with biases following the model inputs. + biased_after_input_nodes = self._get_biased_after_nodes(nncf_graph, model_inputs, model_copy) + + for biased_after_input_node in biased_after_input_nodes: + # We need to collect activation input to register it for the biased layer as the layer with statistics. + activation_node, output_port_id = self._get_activation_node_and_port(biased_after_input_node, nncf_graph) + activation_node_name = activation_node.node_name + + self._collected_stat_inputs_map[biased_after_input_node.node_name] = (activation_node_name, output_port_id) + statistic_point = self._backend_entity.target_point( + TargetType.POST_LAYER_OPERATION, activation_node_name, port_id=output_port_id + ) + stat_collector = self._backend_entity.raw_statistic_collector( + num_samples=self.subset_size, inplace=self.inplace_statistics + ) + statistic_container.add_statistic_point( + StatisticPoint( + target_point=statistic_point, tensor_collector=stat_collector, algorithm=self._algorithm_key ) - statistic_container.add_statistic_point( - StatisticPoint( - target_point=statistic_point, tensor_collector=stat_collector, algorithm=BiasCorrection - ) + ) + + # Then we need also to collect model input statistics to prevent cases when nodes with bias have no input data. + for input_node in model_inputs: + # We assume that input node has only one output port + input_name = input_node.node_name + if input_name in statistic_container: + continue + for next_layer in nncf_graph.get_next_nodes(input_node): + self._collected_stat_inputs_map[next_layer.node_name] = (input_node.node_name, OUTPUT_PORT_OF_NODE) + statistic_point = self._backend_entity.target_point( + TargetType.POST_LAYER_OPERATION, input_node.node_name, port_id=OUTPUT_PORT_OF_NODE + ) + stat_collector = self._backend_entity.raw_statistic_collector( + num_samples=self.subset_size, inplace=self.inplace_statistics + ) + statistic_container.add_statistic_point( + StatisticPoint( + target_point=statistic_point, tensor_collector=stat_collector, algorithm=self._algorithm_key ) + ) return statistic_container - def _get_biased_after_input_nodes(self, nncf_graph: NNCFGraph, model_inputs: List[NNCFNode]) -> Dict[str, str]: + def _get_activation_node_and_port(self, node: NNCFNode, nncf_graph: NNCFGraph) -> Tuple[NNCFNode, int]: + """ + This method returns the activation layer and corresponding port id for the node. + + :param node: NNCFGraph node for which the activation is sought. + :param nncf_graph: NNCFGraph instance with the node. + :return: Tuple with the activation node and port id. """ - This method finds and returns the first nodes with the bias in the model that follows after the input nodes. + activation_port = self._backend_entity.get_activation_port_id(node, nncf_graph) + activation_node = nncf_graph.get_input_edges(node)[activation_port].from_node + port_id = nncf_graph.get_edge(activation_node, node).output_port_id + return activation_node, port_id + + def _get_biased_after_nodes(self, nncf_graph: NNCFGraph, nodes: List[NNCFNode], model: TModel) -> List[NNCFNode]: + """ + This method finds and returns nodes with the bias in the model that follows after the input nodes. :param nncf_graph: NNCFGraph instance. - :param model_inputs: List of the model inputs as NNCFNodes. - :return: A dictionary with the names of the nodes with bias as keys and their input node names as values. + :param nodes: List of the model inputs as NNCFNodes. + :param model: TModel instance. + :return: List of the nodes with bias. """ - def traverse_to_biased(node, output): - if node in output: - return True, output + def traverse_to_biased(node, condition_container): + # A small hack to speed up graph traversal. + if node in visited_nodes: + return + visited_nodes.append(node) + + # We need to collect nodes for the next recursion step. + node_children = nncf_graph.get_next_nodes(node) + + # Check that node is with bias. if self._backend_entity.is_node_with_bias(node, nncf_graph): - output.append(node) - return True, output - return False, output + condition_container.add(node) + return - biased_after_param_nodes = {} + for node_child in node_children: + traverse_to_biased(node_child, condition_container) - for model_input in model_inputs: - nodes_with_bias = nncf_graph.traverse_graph(model_input, traverse_to_biased) - for node in nodes_with_bias: - activation_input = nncf_graph.get_input_edges(node)[0].from_node - biased_after_param_nodes[node.node_name] = activation_input.node_name - return biased_after_param_nodes + biased_nodes = set() + visited_nodes = [] + for node in nodes: + nncf_logger.debug(f"Looking for biased nodes after {node.node_name} layer.") + traverse_to_biased(node, condition_container=biased_nodes) + + dependant_nodes = set() + # After finding the nodes following the provided layers, we need to make sure + # that the found nodes really only depend on the main layers, and not on each other. + for biased_node in biased_nodes: + visited_nodes = [] + nncf_logger.debug(f"Filtering biased nodes after {biased_node.node_name} layer.") + for next_node in nncf_graph.get_next_nodes(biased_node): + traverse_to_biased(next_node, condition_container=dependant_nodes) + + return list(biased_nodes - dependant_nodes) def extract_model(self, model: TModel, input_node_names: List[str], output_node_names: List[str]) -> TModel: """ @@ -549,8 +628,6 @@ def extract_model(self, model: TModel, input_node_names: List[str], output_node_ """ transformation_layout = TransformationLayout() model_transformer = ModelTransformerFactory.create(model) - model_extraction_command = self._backend_entity.model_extraction_command( - set(input_node_names), set(output_node_names) - ) + model_extraction_command = self._backend_entity.model_extraction_command(input_node_names, output_node_names) transformation_layout.register(model_extraction_command) return model_transformer.transform(transformation_layout) diff --git a/nncf/quantization/algorithms/bias_correction/backend.py b/nncf/quantization/algorithms/bias_correction/backend.py index c189bd2bf20..a85f2fffb0a 100644 --- a/nncf/quantization/algorithms/bias_correction/backend.py +++ b/nncf/quantization/algorithms/bias_correction/backend.py @@ -11,7 +11,7 @@ from abc import ABC from abc import abstractmethod -from typing import List, Optional, Tuple, TypeVar +from typing import List, Optional, TypeVar import numpy as np @@ -39,13 +39,6 @@ def tensor_processor(self): Returns backend-specific instance of the NNCFCollectorTensorProcessor. """ - @property - @abstractmethod - def quantizer_types(self): - """ - Returns backend-specific list of the quantizer metatypes. - """ - @staticmethod @abstractmethod def target_point(target_type: TargetType, target_node_name: str, port_id: int) -> TargetPoint: @@ -75,7 +68,7 @@ def model_extraction_command(inputs: List[str], outputs: List[str]) -> Transform """ Returns backend-specific command to extract sub-model based on input & output names. - :param inputs: List of the input names for sub-model beggining. + :param inputs: List of the input names for sub-model beginning. :param outputs: List of the output names for sub-model end. :return: Backend-specific TransformationCommand for the model extraction. """ @@ -91,16 +84,6 @@ def output_insertion_command(nncf_graph: NNCFGraph, target_point: TargetPoint) - :return: Backend-specific command that inserts output. """ - @staticmethod - @abstractmethod - def node_removing_command(target_point: TargetPoint) -> TransformationCommand: - """ - Returns backend-specific command that removes node. - - :param target_point: TargetPoint instance. - :return: Backend-specific command that remove node. - """ - @staticmethod @abstractmethod def mean_statistic_collector( @@ -121,9 +104,10 @@ def mean_statistic_collector( @staticmethod @abstractmethod - def batch_statistic_collector(inplace: bool, num_samples: int = None) -> TensorStatisticCollectorBase: + def raw_statistic_collector(inplace: bool, num_samples: int = None) -> TensorStatisticCollectorBase: """ - Returns backend-specific batch statistic collector. + Returns backend-specific raw statistic collector. + This statistic collector uses for raw data calculation, without aggregating. :param inplace: Whether to calculate statistic inplace or not. :param num_samples: Maximum number of samples to collect. @@ -143,13 +127,15 @@ def process_model_output(raw_data: OutputType, output_name: str) -> NNCFTensor: @staticmethod @abstractmethod - def get_activation_port_ids_for_bias_node(node: NNCFNode) -> Tuple[int, int]: + def get_activation_port_id(node: NNCFNode, nncf_graph: NNCFGraph) -> int: """ - Returns Input Port ID and Output Port ID corresponding to activation input and output edges for + Returns input port id corresponding to activation input edge for the node. Supports only nodes that could have bias value. :param node: Node of NNCFGraph with bias value. + :param nncf_graph: NNCFGraph instance with the node. + :return: boolean port id. """ @staticmethod @@ -177,12 +163,13 @@ def get_input_name(model: TModel, node_name: str) -> str: @staticmethod @abstractmethod - def get_output_name(model: TModel, node_name: str) -> str: + def get_output_name(model: TModel, node_name: str, output_id: int) -> str: """ Returns output tensor name for the specific node. :param model: Backend-specific model. :param node_name: Name of the backend-specific node. + :param output_id: Port Id for output. :return: Output tensor name. """ @@ -207,3 +194,26 @@ def is_node_with_bias(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: :param nncf_graph: NNCFGraph instance with the node. :return: Boolean indicating whether the node has a bias or not. """ + + @staticmethod + @abstractmethod + def remove_fq_from_inputs(model: TModel, nncf_graph: NNCFGraph) -> TModel: + """ + This method removes the activation Fake Quantize nodes (or Quantize-Dequantize pairs) from the model. + It's needed for the further bias shift calculation that relates on quantized weights. + + :param model: TModel instance. + :param nncf_graph: NNCFGraph instance. + :return: TModel without activation Fake Quantize nodes (or Quantize-Dequantize pairs). + """ + + @staticmethod + @abstractmethod + def insert_null_biases(model: TModel, nncf_graph: NNCFGraph) -> TModel: + """ + This method finds and inserts zero biases for the layers that should have it. + + :param model: TModel instance. + :param nncf_graph: NNCFGraph instance. + :return: TModel instance with zero biases + """ diff --git a/nncf/quantization/algorithms/bias_correction/onnx_backend.py b/nncf/quantization/algorithms/bias_correction/onnx_backend.py index e4a92fc43f4..0e9ad720a10 100644 --- a/nncf/quantization/algorithms/bias_correction/onnx_backend.py +++ b/nncf/quantization/algorithms/bias_correction/onnx_backend.py @@ -16,24 +16,23 @@ from nncf.common.graph import NNCFGraph from nncf.common.graph import NNCFNode -from nncf.common.graph.operator_metatypes import OperatorMetatype from nncf.common.graph.transformations.commands import TargetType from nncf.common.tensor_statistics.collectors import ReductionShape from nncf.common.utils.backend import BackendType -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXDequantizeLinearMetatype -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXQuantizeLinearMetatype +from nncf.onnx.graph.model_utils import remove_fq_from_inputs from nncf.onnx.graph.node_utils import get_bias_value +from nncf.onnx.graph.node_utils import is_any_weight_quantized from nncf.onnx.graph.node_utils import is_node_with_bias from nncf.onnx.graph.onnx_graph import ONNXGraph from nncf.onnx.graph.transformations.command_creation import create_bias_correction_command from nncf.onnx.graph.transformations.commands import ONNXBiasCorrectionCommand from nncf.onnx.graph.transformations.commands import ONNXModelExtractionCommand +from nncf.onnx.graph.transformations.commands import ONNXNullBiasInsertionCommand from nncf.onnx.graph.transformations.commands import ONNXOutputInsertionCommand -from nncf.onnx.graph.transformations.commands import ONNXQDQNodeRemovingCommand from nncf.onnx.graph.transformations.commands import ONNXTargetPoint -from nncf.onnx.statistics.collectors import ONNXBatchStatisticCollector from nncf.onnx.statistics.collectors import ONNXMeanStatisticCollector from nncf.onnx.statistics.collectors import ONNXNNCFCollectorTensorProcessor +from nncf.onnx.statistics.collectors import ONNXRawStatisticCollector from nncf.onnx.tensor import ONNXNNCFTensor from nncf.quantization.algorithms.bias_correction.backend import ALGO_BACKENDS from nncf.quantization.algorithms.bias_correction.backend import BiasCorrectionAlgoBackend @@ -47,8 +46,8 @@ def tensor_processor(self) -> ONNXNNCFCollectorTensorProcessor: return ONNXNNCFCollectorTensorProcessor() @property - def quantizer_types(self) -> List[OperatorMetatype]: - return [ONNXQuantizeLinearMetatype, ONNXDequantizeLinearMetatype] + def types_to_insert_bias(self): + return [] @staticmethod def target_point(target_type: TargetType, target_node_name: str, port_id: int) -> ONNXTargetPoint: @@ -64,6 +63,10 @@ def create_bias_correction_command( def model_extraction_command(inputs: List[str], outputs: List[str]) -> ONNXModelExtractionCommand: return ONNXModelExtractionCommand(inputs, outputs) + @staticmethod + def create_bias_insertion_command(node: NNCFNode) -> ONNXNullBiasInsertionCommand: + return ONNXNullBiasInsertionCommand(node) + @staticmethod def output_insertion_command(nncf_graph: NNCFGraph, target_point: ONNXTargetPoint) -> ONNXOutputInsertionCommand: nncf_input_node_next_nodes = {} @@ -72,10 +75,6 @@ def output_insertion_command(nncf_graph: NNCFGraph, target_point: ONNXTargetPoin nncf_input_node_next_nodes[input_node.node_name] = [node.node_name for node in next_nodes] return ONNXOutputInsertionCommand(target_point, nncf_input_node_next_nodes) - @staticmethod - def node_removing_command(target_point: ONNXTargetPoint) -> ONNXQDQNodeRemovingCommand: - return ONNXQDQNodeRemovingCommand(target_point) - @staticmethod def mean_statistic_collector( reduction_shape: ReductionShape, @@ -86,16 +85,16 @@ def mean_statistic_collector( return ONNXMeanStatisticCollector(reduction_shape, num_samples, window_size) @staticmethod - def batch_statistic_collector(inplace: bool, num_samples: int = None) -> ONNXMeanStatisticCollector: - return ONNXBatchStatisticCollector(num_samples) + def raw_statistic_collector(inplace: bool, num_samples: int = None) -> ONNXMeanStatisticCollector: + return ONNXRawStatisticCollector(num_samples) @staticmethod def process_model_output(raw_data: Dict, output_name: str) -> ONNXNNCFTensor: return ONNXNNCFTensor(raw_data[output_name]) @staticmethod - def get_activation_port_ids_for_bias_node(node: NNCFNode) -> Tuple[int, int]: - return 0, 0 + def get_activation_port_id(node: NNCFNode, nncf_graph: NNCFGraph) -> Tuple[int, int]: + return 0 @staticmethod def get_bias_value(node: NNCFNode, model: onnx.ModelProto, nncf_graph: NNCFGraph) -> np.ndarray: @@ -108,18 +107,23 @@ def get_input_name(model: onnx.ModelProto, node_name: str) -> str: return node.input[0] @staticmethod - def get_output_name(model: onnx.ModelProto, node_name: str) -> List[str]: + def get_output_name(model: onnx.ModelProto, node_name: str, output_id: int) -> List[str]: onnx_graph = ONNXGraph(model) node = onnx_graph.get_node_by_name(node_name) - return node.output[0] + return node.output[output_id] @staticmethod def is_quantized_weights(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: - input_nodes = [edge.from_node for edge in nncf_graph.get_input_edges(node)] - weight_port_id = node.metatype.weight_definitions.weight_port_id - weight_node = input_nodes[weight_port_id] - return weight_node.metatype == ONNXDequantizeLinearMetatype + return is_any_weight_quantized(node, nncf_graph) @staticmethod def is_node_with_bias(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: return is_node_with_bias(node) + + @staticmethod + def remove_fq_from_inputs(model: onnx.ModelProto, nncf_graph: NNCFGraph) -> onnx.ModelProto: + return remove_fq_from_inputs(model, nncf_graph) + + @staticmethod + def insert_null_biases(model: onnx.ModelProto, nncf_graph: NNCFGraph) -> onnx.ModelProto: + return model diff --git a/nncf/quantization/algorithms/bias_correction/openvino_backend.py b/nncf/quantization/algorithms/bias_correction/openvino_backend.py index 1a7d10b887b..6b77d0260ea 100644 --- a/nncf/quantization/algorithms/bias_correction/openvino_backend.py +++ b/nncf/quantization/algorithms/bias_correction/openvino_backend.py @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, List, Optional, Tuple +from typing import Dict, List, Optional import numpy as np import openvino.runtime as ov @@ -21,18 +21,18 @@ from nncf.common.utils.backend import BackendType from nncf.experimental.common.tensor_statistics.collectors import TensorCollector from nncf.openvino.graph.metatypes.common import FAKE_QUANTIZE_OPERATIONS -from nncf.openvino.graph.metatypes.openvino_metatypes import OVOpMetatype +from nncf.openvino.graph.model_utils import insert_null_biases +from nncf.openvino.graph.model_utils import remove_fq_from_inputs from nncf.openvino.graph.node_utils import get_bias_value from nncf.openvino.graph.node_utils import is_node_with_bias from nncf.openvino.graph.transformations.command_creation import OVCommandCreator from nncf.openvino.graph.transformations.commands import OVBiasCorrectionCommand -from nncf.openvino.graph.transformations.commands import OVFQNodeRemovingCommand from nncf.openvino.graph.transformations.commands import OVModelExtractionCommand from nncf.openvino.graph.transformations.commands import OVOutputInsertionCommand from nncf.openvino.graph.transformations.commands import OVTargetPoint from nncf.openvino.statistics.collectors import OVNNCFCollectorTensorProcessor -from nncf.openvino.statistics.collectors import get_mean_batch_stat_collector from nncf.openvino.statistics.collectors import get_mean_stat_collector +from nncf.openvino.statistics.collectors import get_raw_stat_collector from nncf.openvino.tensor import OVNNCFTensor from nncf.quantization.algorithms.bias_correction.backend import ALGO_BACKENDS from nncf.quantization.algorithms.bias_correction.backend import BiasCorrectionAlgoBackend @@ -45,10 +45,6 @@ class OVBiasCorrectionAlgoBackend(BiasCorrectionAlgoBackend): def tensor_processor(self) -> OVNNCFCollectorTensorProcessor: return OVNNCFCollectorTensorProcessor - @property - def quantizer_types(self) -> List[OVOpMetatype]: - return FAKE_QUANTIZE_OPERATIONS - @staticmethod def target_point(target_type: TargetType, target_node_name: str, port_id: int) -> OVTargetPoint: return OVTargetPoint(target_type, target_node_name, port_id) @@ -67,10 +63,6 @@ def model_extraction_command(inputs: List[str], outputs: List[str]) -> OVModelEx def output_insertion_command(nncf_graph: NNCFGraph, target_point: OVTargetPoint) -> OVOutputInsertionCommand: return OVOutputInsertionCommand(target_point) - @staticmethod - def node_removing_command(target_point: OVTargetPoint) -> OVFQNodeRemovingCommand: - return OVFQNodeRemovingCommand(target_point) - @staticmethod def mean_statistic_collector( reduction_shape: ReductionShape, @@ -81,16 +73,21 @@ def mean_statistic_collector( return get_mean_stat_collector(num_samples, reduction_shape, window_size, inplace) @staticmethod - def batch_statistic_collector(inplace: bool, num_samples: int = None) -> TensorCollector: - return get_mean_batch_stat_collector(num_samples, inplace) + def raw_statistic_collector(inplace: bool, num_samples: int = None) -> TensorCollector: + return get_raw_stat_collector(num_samples, inplace) @staticmethod def process_model_output(raw_data: Dict, output_name: str) -> OVNNCFTensor: return OVNNCFTensor(raw_data[output_name]) @staticmethod - def get_activation_port_ids_for_bias_node(node: NNCFNode) -> Tuple[int, int]: - return 0, 0 + def get_activation_port_id(node: NNCFNode, nncf_graph: NNCFGraph) -> int: + constant_ports = node.layer_attributes.get_const_port_ids() + activation_ports = [ + e.input_port_id for e in nncf_graph.get_input_edges(node) if e.input_port_id not in constant_ports + ] + assert len(activation_ports) == 1 + return activation_ports[0] @staticmethod def get_bias_value(node: NNCFNode, model: ov.Model, nncf_graph: NNCFGraph) -> np.ndarray: @@ -113,18 +110,20 @@ def get_input_name(model: ov.Model, node_name: str) -> str: raise RuntimeError(f"Input layer not found for {node_name}") @staticmethod - def get_output_name(model: ov.Model, node_name: str) -> str: + def get_output_name(model: ov.Model, node_name: str, output_id: int) -> str: ops_dict = {op.get_friendly_name(): op for op in model.get_ops()} - for output_port in ops_dict[node_name].outputs(): - for output_input_port in output_port.get_target_inputs(): - output_node = output_input_port.get_node() - if output_node.get_type_name() == "Result": - return output_port.get_any_name() + output_port = ops_dict[node_name].output(output_id) + for output_input_port in output_port.get_target_inputs(): + output_node = output_input_port.get_node() + if output_node.get_type_name() == "Result": + return output_port.get_any_name() raise RuntimeError(f"Output layer not found for {node_name}") @staticmethod def is_quantized_weights(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: + if node.layer_attributes is None: + return False const_port_ids = node.layer_attributes.get_const_port_ids() assert len(const_port_ids) == 1 weight_node = nncf_graph.get_input_edges(node)[const_port_ids[0]].from_node @@ -133,3 +132,11 @@ def is_quantized_weights(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: @staticmethod def is_node_with_bias(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: return is_node_with_bias(node, nncf_graph) + + @staticmethod + def remove_fq_from_inputs(model: ov.Model, nncf_graph: NNCFGraph) -> ov.Model: + return remove_fq_from_inputs(model, nncf_graph) + + @staticmethod + def insert_null_biases(model: ov.Model, nncf_graph: NNCFGraph) -> ov.Model: + return insert_null_biases(model, nncf_graph) diff --git a/nncf/experimental/openvino/quantization/__init__.py b/nncf/quantization/algorithms/channel_alignment/__init__.py similarity index 100% rename from nncf/experimental/openvino/quantization/__init__.py rename to nncf/quantization/algorithms/channel_alignment/__init__.py diff --git a/nncf/quantization/algorithms/channel_alignment/algorithm.py b/nncf/quantization/algorithms/channel_alignment/algorithm.py new file mode 100644 index 00000000000..db956997750 --- /dev/null +++ b/nncf/quantization/algorithms/channel_alignment/algorithm.py @@ -0,0 +1,481 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any, Dict, List, Optional, Tuple, TypeVar + +import numpy as np +from tqdm import tqdm + +from nncf import Dataset +from nncf.common.factory import CommandCreatorFactory +from nncf.common.factory import ModelTransformerFactory +from nncf.common.graph.graph import NNCFGraph +from nncf.common.graph.graph import NNCFNode +from nncf.common.graph.patterns import GraphPattern +from nncf.common.graph.transformations.commands import TargetPoint +from nncf.common.graph.transformations.commands import TargetType +from nncf.common.graph.transformations.layout import TransformationLayout +from nncf.common.tensor_statistics.statistic_point import StatisticPoint +from nncf.common.tensor_statistics.statistic_point import StatisticPointsContainer +from nncf.common.utils.backend import BackendType +from nncf.common.utils.backend import get_backend +from nncf.quantization.algorithms.algorithm import Algorithm +from nncf.quantization.algorithms.channel_alignment.backend import ALGO_BACKENDS +from nncf.quantization.algorithms.channel_alignment.backend import ChannelAlignmentAlgoBackend +from nncf.quantization.algorithms.channel_alignment.backend import LayoutDescriptor + +TModel = TypeVar("TModel") + + +class ChannelAlignment(Algorithm): + """ + Post-training ChannelAlignment algorithm implementation. + + The main purpose of this algorithm to reduce quantization error + via correction the parameters of the Convolutions, FullyConnected and their biases. + Algorithm consists of following steps: + - algorithm is searching for convolution -> convolution pairs in the target model. + - minimal and maximal activations quantiles of first convolutions are collected on the target subset. + - algorithm calculates median of collected values, it is used then to adjust + convolution layers biases and weights. + - biases of matched subgraphs convolutions are adjusted, so mean points of first + convolution activations quantile medians are translated to zero. + - weights of matched subgraph convolutions are adjusted, so all first convolutions activations + which were between median of low quantile and median of high quantile are translated to [-1, 1] range. + In case processed network has one or more convolution -> convolution pairs, activations of the first convolution + become more quantization friendly as, in most cases activations mean is equal to zero and + most activations values are in range [-1, 1]. + """ + + def __init__( + self, + subset_size: int = 100, + inplace_statistics: bool = True, + backend_params: Optional[Dict[str, Any]] = None, + ): + """ + :param subset_size: Size of a subset for the statistics collection, + defaults to 100. + :param inplace_statistics: Defines wheather to calculate quantizers statistics + by backend graph operations or by default Python implementation, defaults + to True. + :param backend_params: Backend specific parameters. + """ + super().__init__() + self.subset_size = subset_size + self.inplace_statistics = inplace_statistics + self.backend_params = backend_params + self._backend_entity = None + self._quantile = 1e-4 + self._algorithm_key = f"CA_{hash(self)}" + + @property + def available_backends(self) -> Dict[str, BackendType]: + return ALGO_BACKENDS.registry_dict + + def _set_backend_entity(self, model: TModel) -> None: + """ + Creates a helper class with a backed-specific logic of the algorithm. + + :param model: Backend-specific input model. + """ + model_backend = get_backend(model) + if model_backend == BackendType.OPENVINO: + from nncf.quantization.algorithms.channel_alignment.openvino_backend import OVChannelAlignmentAlgoBackend + + self._backend_entity = OVChannelAlignmentAlgoBackend() + + def apply( + self, + model: TModel, + graph: NNCFGraph, + statistic_points: Optional[StatisticPointsContainer] = None, + dataset: Optional[Dataset] = None, + ) -> TModel: + self._set_backend_entity(model) + model_transformer = ModelTransformerFactory.create(model) + transformation_layout = TransformationLayout() + + def filter_func(point: StatisticPoint) -> bool: + return self._algorithm_key in point.algorithm_to_tensor_collectors and point.target_point == target_point + + for conv_in, add_in, conv_out in tqdm(self._get_node_pairs(graph), desc="Channel alignment"): + target_point, node_in = self._get_target_point_and_node_in(conv_in, add_in) + tensor_collectors = list( + statistic_points.get_algo_statistics_for_node(node_in.node_name, filter_func, self._algorithm_key) + ) + assert len(tensor_collectors) == 1 + stat = tensor_collectors[0].get_statistics() + if stat.min_values is None or stat.max_values is None: + continue + + conv_in_cont = ConvParamsContainer(conv_in, model, graph, self._backend_entity) + conv_out_cont = ConvParamsContainer(conv_out, model, graph, self._backend_entity) + + if conv_in_cont.has_bias() and conv_out_cont.has_bias(): + amean = (stat.max_values + stat.min_values) * 0.5 + conv_in_cont.bias, conv_out_cont.bias = self._align_means( + conv_in_cont.bias, + conv_out_cont.bias, + conv_out_cont.weight, + amean, + conv_out_cont.dims, + ) + + ascale = (stat.max_values - stat.min_values).astype(np.float32) + eps = np.finfo(ascale.dtype).eps + if (ascale > eps).any(): + conv_in_cont.weight, conv_out_cont.weight, conv_in_cont.bias = self._align_scales( + conv_in_cont.weight, + conv_out_cont.weight, + conv_in_cont.bias, + ascale, + conv_in_cont.dims, + conv_out_cont.dims, + eps, + ) + + command_creator = CommandCreatorFactory.create(model) + for container in [conv_in_cont, conv_out_cont]: + if container.stated_weight.is_modified(): + transformation_layout.register( + command_creator.create_command_to_update_weight( + container.op, container.weight, container.weight_port_id + ) + ) + + if container.stated_bias.is_modified(): + transformation_layout.register( + command_creator.create_command_to_update_bias(container.op, container.bias, graph), + ) + + transformed_model = model_transformer.transform(transformation_layout) + return transformed_model + + @staticmethod + def _align_means( + bias_in_value: np.ndarray, + bias_out_value: np.ndarray, + conv_out_value: np.ndarray, + amean: np.ndarray, + conv_out_descr: LayoutDescriptor, + ) -> Tuple[np.ndarray, np.ndarray]: + """ + Function which calculates new add_in_value and add_out_value + in ChannelAlignment pattern, so output activations of the second convolution bias + are the same, but the first convolution bias is shifted with minus by amean value. + + :param bias_in_value: Bias of the first convolution in the ChannelAlignment pattern. + :param bias_out_value: Bias of the second convolution in the ChannelAlignment pattern. + :param amean: Mean value to shift first and second convolutions biases. + :param conv_out_descr: The second convolution weights layout descriptor. + """ + updated_add_in_value = bias_in_value - amean.reshape(bias_in_value.shape) + + weight_dims = conv_out_value.ndim + updated_conv_out_value = conv_out_value + if weight_dims > 2: + axes = list(range(weight_dims)) + axes.remove(conv_out_descr.conv_weight_in_channels_dim) + axes.remove(conv_out_descr.conv_weight_out_channels_dim) + updated_conv_out_value = np.sum(conv_out_value, axis=tuple(axes)) + + out_channel_dim, in_channel_dim = 0, 1 + if conv_out_descr.conv_weight_out_channels_dim > conv_out_descr.conv_weight_in_channels_dim: + out_channel_dim, in_channel_dim = in_channel_dim, out_channel_dim + + updated_conv_out_value = np.transpose( + updated_conv_out_value, + (out_channel_dim, in_channel_dim), + ) + shift = updated_conv_out_value.dot(amean.reshape(updated_conv_out_value.shape[1])) + + updated_add_out_value = bias_out_value + shift.reshape(bias_out_value.shape) + return updated_add_in_value, updated_add_out_value + + @staticmethod + def _align_scales( + conv_in_value: np.ndarray, + conv_out_value: np.ndarray, + bias_in_value: Optional[np.ndarray], + ascale: np.ndarray, + conv_in_descr: LayoutDescriptor, + conv_out_descr: LayoutDescriptor, + eps: float, + ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + """ + Function which calculates new conv_in_value, conv_out_value and bias_in_value + in ChannelAlignment pattern, so output activations of conv_out are the same, + but activations of conv_in are scale times smaller. Negative scales are skipped, + too small (<1e-2) and too big (>1e2) scales are clamped. + + :param conv_in_value: Weights of the first convolution in the ChannelAlignment pattern. + :param conv_out_value: Weights of the second convolution in the ChannelAlignment pattern. + :param bias_in_value: Bias of the first convolution in the ChannelAlignment pattern. Could be None. + :param ascale: Scale value to apply to convolutions weights. + :param conv_in_descr: The first convolution weights layout descriptor. + :param conv_out_descr: The second convolution weights layout descriptor. + :param eps: Minimal significant value > 0 for convolution weights and biases precision. + """ + conv_in_shape = conv_in_value.shape + # TODO(dlyakhov) support group convolutions with groups number not in [1, out_channels] + if conv_in_shape[conv_in_descr.conv_weight_out_channels_dim] != ascale.shape[conv_in_descr.bias_channels_dim]: + return conv_in_value, conv_out_value, bias_in_value + + positive_scales_mask = ascale > eps + scale_factor = ascale / np.median(ascale[positive_scales_mask]) + scale_factor[~positive_scales_mask] = 1 + scale_factor = np.clip(scale_factor, 1e-2, 1e2) + + scale_in_shape = np.ones(len(conv_in_shape), dtype=int) + scale_in_shape[conv_in_descr.conv_weight_out_channels_dim] = scale_factor.shape[conv_in_descr.bias_channels_dim] + updated_conv_in_value = conv_in_value / scale_factor.reshape(scale_in_shape) + + if bias_in_value is not None: + updated_bias_in_value = bias_in_value / scale_factor.reshape(bias_in_value.shape) + else: + updated_bias_in_value = None + + scale_out_shape = np.ones(len(conv_out_value.shape), dtype=int) + scale_out_shape[conv_out_descr.conv_weight_in_channels_dim] = scale_factor.shape[ + conv_in_descr.bias_channels_dim + ] + updated_conv_out_value = conv_out_value * scale_factor.reshape(scale_out_shape) + return updated_conv_in_value, updated_conv_out_value, updated_bias_in_value + + def _check_consumer_conv_node(self, conv_node: NNCFNode) -> bool: + attrs = self._backend_entity.get_conv_layer_attributes(conv_node) + if attrs is None: + return False + # Check groups amount == 1 + if attrs.groups != 1: + return False + # Check node has no padding + if any(attrs.padding_values): + return False + # Check node has valid stride + if any(elem != 1 for elem in attrs.stride): + return False + # Check Node has valid dilation + if any(elem != 1 for elem in attrs.dilations): + return False + return True + + def _check_producer_conv_node(self, conv_node: NNCFNode): + return not conv_node.layer_attributes is None + + def _get_target_patterns(self) -> GraphPattern: + input_attrs = { + GraphPattern.LABEL_ATTR: "INPUT", + GraphPattern.METATYPE_ATTR: GraphPattern.NON_PATTERN_NODE_TYPE, + } + producer_attrs = { + GraphPattern.LABEL_ATTR: "CONV_PRODUCER", + GraphPattern.NODE_TYPE_ATTR: self._backend_entity.get_conv_metatypes() + + self._backend_entity.get_linear_metatypes(), + } + bias_attrs = { + GraphPattern.LABEL_ATTR: "BIAS_PRODUCER", + GraphPattern.NODE_TYPE_ATTR: self._backend_entity.get_add_metatypes(), + } + bias_const_attrs = { + GraphPattern.LABEL_ATTR: "BIAS_CONSTANT", + GraphPattern.METATYPE_ATTR: GraphPattern.NON_PATTERN_NODE_TYPE, + } + consumer_attrs = { + GraphPattern.LABEL_ATTR: "CONV_CONSUMER", + GraphPattern.NODE_TYPE_ATTR: self._backend_entity.get_conv_metatypes(), + } + conv_const_attrs = { + GraphPattern.LABEL_ATTR: "CONV_CONSTANT", + GraphPattern.METATYPE_ATTR: GraphPattern.NON_PATTERN_NODE_TYPE, + } + + use_constant = True + + def get_conv_conv_pattern() -> GraphPattern: + conv_conv = GraphPattern() + if use_constant: + input_node = conv_conv.add_node(**input_attrs) + producer_constant = conv_conv.add_node(**conv_const_attrs) + consumer_constant = conv_conv.add_node(**conv_const_attrs) + + pattern_conv_producer = conv_conv.add_node(**producer_attrs) + pattern_conv_consumer = conv_conv.add_node(**consumer_attrs) + + if use_constant: + conv_conv.add_edge(input_node, pattern_conv_producer) + conv_conv.add_edge(producer_constant, pattern_conv_producer) + conv_conv.add_edge(consumer_constant, pattern_conv_consumer) + + conv_conv.add_edge(pattern_conv_producer, pattern_conv_consumer) + return conv_conv + + def get_conv_add_conv_pattern() -> GraphPattern: + conv_bias_conv = GraphPattern() + if use_constant: + input_node = conv_bias_conv.add_node(**input_attrs) + producer_constant = conv_bias_conv.add_node(**conv_const_attrs) + bias_producer_const = conv_bias_conv.add_node(**bias_const_attrs) + consumer_constant = conv_bias_conv.add_node(**conv_const_attrs) + + pattern_conv_producer = conv_bias_conv.add_node(**producer_attrs) + pattern_bias_producer = conv_bias_conv.add_node(**bias_attrs) + pattern_conv_consumer = conv_bias_conv.add_node(**consumer_attrs) + + if use_constant: + conv_bias_conv.add_edge(input_node, pattern_conv_producer) + conv_bias_conv.add_edge(producer_constant, pattern_conv_producer) + conv_bias_conv.add_edge(consumer_constant, pattern_conv_consumer) + conv_bias_conv.add_edge(bias_producer_const, pattern_bias_producer) + + conv_bias_conv.add_edge(pattern_conv_producer, pattern_bias_producer) + conv_bias_conv.add_edge(pattern_bias_producer, pattern_conv_consumer) + return conv_bias_conv + + pattern = get_conv_conv_pattern() + pattern.add_pattern_alternative(get_conv_add_conv_pattern()) + return pattern + + def _get_node_pairs(self, nncf_graph: NNCFGraph) -> List[Tuple[NNCFNode, Optional[NNCFNode], NNCFNode]]: + pairs = [] + patterns = self._get_target_patterns() + for subgraph in nncf_graph.find_matching_subgraphs(patterns): + if len(subgraph) == 2: + add_in = None + conv_in, conv_out = subgraph + else: + conv_in, add_in, conv_out = subgraph + + if not self._check_producer_conv_node(conv_in): + continue + + if not self._check_consumer_conv_node(conv_out): + continue + + pairs.append((conv_in, add_in, conv_out)) + return pairs + + def _get_target_point_and_node_in(self, conv_in, add_in) -> Tuple[TargetPoint, NNCFNode]: + node_in = conv_in if add_in is None else add_in + input_port_id, _ = self._backend_entity.get_activation_port_ids_for_node(node_in) + return ( + self._backend_entity.target_point(TargetType.POST_LAYER_OPERATION, node_in.node_name, input_port_id), + node_in, + ) + + def get_statistic_points(self, model: TModel, graph: NNCFGraph) -> StatisticPointsContainer: + self._set_backend_entity(model) + + statistic_container = StatisticPointsContainer() + for conv_in, add_in, _ in self._get_node_pairs(graph): + target_point, node_in = self._get_target_point_and_node_in(conv_in, add_in) + channel_axis = conv_in.metatype.output_channel_axis + reduction_shape = list(range(len(graph.get_output_edges(node_in)[0].tensor_shape))) + reduction_shape.remove(channel_axis) + + statistic_collector = self._backend_entity.get_statistic_collector( + tuple(reduction_shape), self._quantile, self.subset_size, self.inplace_statistics + ) + statistic_container.add_statistic_point( + StatisticPoint( + target_point=target_point, + tensor_collector=statistic_collector, + algorithm=self._algorithm_key, + ) + ) + + return statistic_container + + +class StatedTensor: + """ + Tensor wrapper with additional method is_modified which is true if + given tensor was modified at least once after the initialization. + """ + + def __init__(self, value: np.ndarray): + """ + :param value: Tensor to wrap. + """ + self._value = value + self._mod_times = 0 + + @property + def val(self): + return self._value + + @val.setter + def val(self, value): + if self._value is None and value is None: + return + self._mod_times += 1 + self._value = value + + def is_modified(self) -> bool: + """ + :return: True if wrapped tensor was changed at least once after the + initialization else False. + """ + return self._mod_times > 0 + + +class ConvParamsContainer: + """ + Convolution container class which is incapsulating common convolutional parameters collection. + """ + + def __init__(self, conv_op, model, nncf_graph, backend_entity: ChannelAlignmentAlgoBackend): + """ + :param conv_op: Backend-specific conv node. + :param model: Backend-specific model instance. + :param nncf_graph: NNCFGraph of given backend-specific model. + :param backend_entity: Current backend entity to retrieve parameters from given conv node + """ + _, self._weights_port_id = backend_entity.get_weights_port_ids_for_node(conv_op) + self.stated_weight = StatedTensor(backend_entity.get_weight_value(conv_op, model, self._weights_port_id)) + bias = None + if backend_entity.is_node_with_bias(conv_op, nncf_graph): + bias = backend_entity.get_bias_value(conv_op, model, nncf_graph) + self.stated_bias = StatedTensor(bias) + self._op = conv_op + self._dims = backend_entity.get_dims_descriptor(conv_op) + + @property + def weight(self): + return self.stated_weight.val + + @weight.setter + def weight(self, value): + self.stated_weight.val = value + + @property + def bias(self): + return self.stated_bias.val + + @bias.setter + def bias(self, value): + self.stated_bias.val = value + + @property + def op(self): + return self._op + + @property + def weight_port_id(self): + return self._weights_port_id + + @property + def dims(self) -> LayoutDescriptor: + return self._dims + + def has_bias(self) -> bool: + return self.bias is not None diff --git a/nncf/quantization/algorithms/channel_alignment/backend.py b/nncf/quantization/algorithms/channel_alignment/backend.py new file mode 100644 index 00000000000..cf431604b7b --- /dev/null +++ b/nncf/quantization/algorithms/channel_alignment/backend.py @@ -0,0 +1,144 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import abstractmethod +from dataclasses import dataclass +from typing import Optional, Tuple, TypeVar + +import numpy as np + +from nncf.common.graph import NNCFGraph +from nncf.common.graph import NNCFNode +from nncf.common.graph.layer_attributes import ConvolutionLayerAttributes +from nncf.common.graph.transformations.commands import TargetPoint +from nncf.common.graph.transformations.commands import TargetType +from nncf.common.tensor_statistics.collectors import TensorStatisticCollectorBase +from nncf.common.utils.registry import Registry + +TModel = TypeVar("TModel") +ALGO_BACKENDS = Registry("algo_backends") + + +@dataclass +class LayoutDescriptor: + """ + Container to store convolutional and linear layers layout information. + """ + + conv_weight_out_channels_dim: int + conv_weight_in_channels_dim: int + bias_channels_dim: int + + +class ChannelAlignmentAlgoBackend: + @staticmethod + def target_point(target_type: TargetType, target_node_name: str, port_id: int) -> TargetPoint: + """ + Returns backend-specific target point. + + :param target_type: Type of the location that should be modified. + :param target_node_name: Name of the located node. + :param port_id: id of the port for the statistics distribution. + :return: Backend-specific TargetPoint. + """ + + @staticmethod + @abstractmethod + def get_bias_value(node: NNCFNode, model: TModel, nncf_graph: NNCFGraph) -> np.ndarray: + """ + Returns bias value in the NumPy format of provided node. + + :param node: Node of NNCFGraph with bias value. + :param model: Backend-specific model for the initializer finding. + :param nncf_graph: NNCFGraph instance with the node. + :return: Bias value in the NumPy format. + """ + + @staticmethod + @abstractmethod + def get_weight_value(node: NNCFNode, model: TModel, port_id: int) -> np.ndarray: + """ + Returns bias value in the NumPy format of provided node. + + :param node: Node of NNCFGraph with bias value. + :param model: Backend-specific model for the initializer finding. + :param nncf_graph: NNCFGraph instance with the node. + :return: Bias value in the NumPy format. + """ + + @staticmethod + @abstractmethod + def get_activation_port_ids_for_node(node: NNCFNode) -> Tuple[int, int]: + """ + Returns Input Port ID and Output Port ID corresponding to activation input and output edges for + the node. + Supports only nodes that could have bias value. + + :param node: Node of NNCFGraph with bias value. + """ + + @staticmethod + @abstractmethod + def get_weights_port_ids_for_node(node: NNCFNode) -> Tuple[int, int]: + """ + Returns Input Port ID and Output Port ID corresponding to node weights input port id and + constant output port id the node. + + :param node: Node of NNCFGraph. + """ + + @staticmethod + @abstractmethod + def get_statistic_collector( + reduction_shape, q: float, num_samples: int, inplace: bool + ) -> TensorStatisticCollectorBase: + """ + Get backend-specific tensor collector that collects medians of minimal and maximal quantiles. + + :param reduction_shape: Target reduction shape for the reduction. + :param q: Minimal quantile for the tensor collector. + :param num_samples: Num samples to collect by the tensor collector. + :param inplace: Should statistic be calculated inplace or out of place. + :return: Backend-specific tensor collector that collects medians of minimal and maximal quantiles. + """ + + @staticmethod + @abstractmethod + def is_node_with_bias(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: + """ + Checks if the node has a bias or not. + + :param node: The node to check. + :param nncf_graph: The NNCF graph. + :return: True` if `node` corresponds to the operation with bias + (bias is added to the output tensor of that operation), `False` otherwise. + """ + + @staticmethod + @abstractmethod + def get_dims_descriptor(node: NNCFNode) -> LayoutDescriptor: + """ + Return weights layout descriptor of the given node if it is possible and None otherwise. + Only convolutional and linear nodes are supported. + + :param node: NNCFNode to get layout descriptor from. + :return: Weights layout descriptor of the given node if it is possible and None otherwise. + """ + + @staticmethod + @abstractmethod + def get_conv_layer_attributes(node: NNCFNode) -> Optional[ConvolutionLayerAttributes]: + """ + Returns convolutional layer attributes of given node if they are present and None otherwise. + + :param node: NNCFNode to take convolutional layer attributes from. + :return: Convolutional layer attributes of given node if they are present and None otherwise + """ diff --git a/nncf/quantization/algorithms/channel_alignment/openvino_backend.py b/nncf/quantization/algorithms/channel_alignment/openvino_backend.py new file mode 100644 index 00000000000..9ab94bb8e70 --- /dev/null +++ b/nncf/quantization/algorithms/channel_alignment/openvino_backend.py @@ -0,0 +1,148 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional, Tuple + +import numpy as np +import openvino.runtime as ov + +from nncf.common.graph import NNCFGraph +from nncf.common.graph import NNCFNode +from nncf.common.graph.layer_attributes import ConvolutionLayerAttributes +from nncf.common.graph.transformations.commands import TargetType +from nncf.common.tensor_statistics.collectors import TensorStatisticCollectorBase +from nncf.common.utils.backend import BackendType +from nncf.experimental.common.tensor_statistics.collectors import MedianAggregator +from nncf.experimental.common.tensor_statistics.collectors import TensorCollector +from nncf.openvino.graph.layer_attributes import OVLayerAttributes +from nncf.openvino.graph.metatypes.openvino_metatypes import OVAddMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVConvolutionMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVDepthwiseConvolutionMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVGroupConvolutionMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVMatMulMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVSubtractMetatype +from nncf.openvino.graph.node_utils import get_bias_value +from nncf.openvino.graph.node_utils import get_node_with_bias_value +from nncf.openvino.graph.node_utils import get_weight_value +from nncf.openvino.graph.transformations.commands import OVTargetPoint +from nncf.openvino.statistics.collectors import OVNNCFCollectorTensorProcessor +from nncf.openvino.statistics.collectors import OVQuantileReducer +from nncf.openvino.statistics.statistics import OVMinMaxTensorStatistic +from nncf.quantization.algorithms.channel_alignment.backend import ALGO_BACKENDS +from nncf.quantization.algorithms.channel_alignment.backend import ChannelAlignmentAlgoBackend +from nncf.quantization.algorithms.channel_alignment.backend import LayoutDescriptor + + +@ALGO_BACKENDS.register(BackendType.OPENVINO) +class OVChannelAlignmentAlgoBackend(ChannelAlignmentAlgoBackend): + @staticmethod + def target_point(target_type: TargetType, target_node_name: str, port_id: int) -> OVTargetPoint: + return OVTargetPoint(target_type, target_node_name, port_id) + + @staticmethod + def get_bias_value(node: NNCFNode, model: ov.Model, nncf_graph: NNCFGraph) -> np.ndarray: + return get_bias_value(node, nncf_graph, model) + + @staticmethod + def get_weight_value(node: NNCFNode, model: ov.Model, port_id: int) -> np.ndarray: + return get_weight_value(node, model, port_id) + + @staticmethod + def get_activation_port_ids_for_node(node: NNCFNode) -> Tuple[int, int]: + return 0, 0 + + @staticmethod + def get_weights_port_ids_for_node(node: NNCFNode) -> Tuple[int, int]: + return 0, 1 + + @staticmethod + def get_conv_metatypes(): + return [OVConvolutionMetatype, OVGroupConvolutionMetatype, OVDepthwiseConvolutionMetatype] + + @staticmethod + def get_linear_metatypes(): + return [OVMatMulMetatype] + + @staticmethod + def get_add_metatypes(): + return [OVAddMetatype, OVSubtractMetatype] + + @staticmethod + def get_statistic_collector( + reduction_shape, q: float, num_samples: int, inplace: bool + ) -> TensorStatisticCollectorBase: + tensor_collector = TensorCollector(OVMinMaxTensorStatistic) + quantile_reducer = OVQuantileReducer(reduction_shape, (q, 1 - q), inplace) + + for port_id, container_key in enumerate([OVMinMaxTensorStatistic.MIN_STAT, OVMinMaxTensorStatistic.MAX_STAT]): + aggregator = MedianAggregator(OVNNCFCollectorTensorProcessor, num_samples=num_samples) + tensor_collector.register_statistic_branch(container_key, quantile_reducer, aggregator, port_id) + return tensor_collector + + @staticmethod + def is_node_with_bias(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: + next_nodes = nncf_graph.get_next_nodes(node) + if not next_nodes: + return False + + add_node = next_nodes[0] + if add_node.metatype != OVAddMetatype: + return False + + bias_constant = get_node_with_bias_value(add_node, nncf_graph) + return bias_constant is not None + + @staticmethod + def get_dims_descriptor(node: NNCFNode): + if node.metatype == OVConvolutionMetatype: + return LayoutDescriptor( + conv_weight_out_channels_dim=0, + conv_weight_in_channels_dim=1, + bias_channels_dim=node.metatype.output_channel_axis, + ) + if node.metatype in [OVGroupConvolutionMetatype, OVDepthwiseConvolutionMetatype]: + # Using groups dim as output channels dim for ChannelAlignment algorithm + # TODO(dlyakhov) support group convolutions with groups number not in [1, out_channels] + return LayoutDescriptor( + conv_weight_out_channels_dim=0, + conv_weight_in_channels_dim=2, + bias_channels_dim=node.metatype.output_channel_axis, + ) + if node.metatype == OVMatMulMetatype: + if node.layer_attributes is None: + raise RuntimeError(f"Attempt to align matmul node {node.node_name} that have no any constant inputs") + layer_attributes: OVLayerAttributes = node.layer_attributes + key = layer_attributes.get_const_port_ids() + assert len(key) == 1 + key = key[0] + const_attr = layer_attributes.constant_attributes[key] + a, b = list(range(len(const_attr["shape"])))[-2:] + assert key in [a, b] + if key == a: + out_ch_dim = a + in_ch_dim = b + else: + out_ch_dim = b + in_ch_dim = a + if const_attr.get("transpose", False): + out_ch_dim, in_ch_dim = in_ch_dim, out_ch_dim + return LayoutDescriptor( + conv_weight_in_channels_dim=in_ch_dim, + conv_weight_out_channels_dim=out_ch_dim, + bias_channels_dim=node.metatype.output_channel_axis, + ) + raise RuntimeError(f"Could not retrieve dims description for node {node} with metatype {node.metatype}") + + @staticmethod + def get_conv_layer_attributes(node: NNCFNode) -> Optional[ConvolutionLayerAttributes]: + if node.layer_attributes is None: + return None + return node.layer_attributes.layer_attributes[1] diff --git a/nncf/quantization/algorithms/fast_bias_correction/algorithm.py b/nncf/quantization/algorithms/fast_bias_correction/algorithm.py index 6ae0a5e3632..4a294b5a0f7 100644 --- a/nncf/quantization/algorithms/fast_bias_correction/algorithm.py +++ b/nncf/quantization/algorithms/fast_bias_correction/algorithm.py @@ -9,21 +9,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict, List, Optional, Tuple, TypeVar +from typing import Any, Dict, List, Optional, Tuple, TypeVar, Union -import numpy as np from tqdm import tqdm from nncf import Dataset from nncf.common.factory import EngineFactory from nncf.common.factory import ModelTransformerFactory -from nncf.common.factory import NNCFGraphFactory +from nncf.common.graph.graph import NNCFGraph from nncf.common.graph.model_transformer import ModelTransformer from nncf.common.graph.transformations.commands import TargetPoint from nncf.common.graph.transformations.commands import TargetType from nncf.common.graph.transformations.layout import TransformationLayout from nncf.common.logging import nncf_logger -from nncf.common.tensor import NNCFTensor from nncf.common.tensor_statistics.statistic_point import StatisticPoint from nncf.common.tensor_statistics.statistic_point import StatisticPointsContainer from nncf.common.utils.backend import BackendType @@ -32,6 +30,7 @@ from nncf.quantization.algorithms.fast_bias_correction.backend import ALGO_BACKENDS TModel = TypeVar("TModel") +TTensor = TypeVar("TTensor") FAST_BIAS_CORRECTION_THRESHOLD = 2 @@ -83,8 +82,8 @@ def __init__( self.apply_for_all_nodes = apply_for_all_nodes self.inplace_statistics = inplace_statistics self.backend_params = backend_params - self.nncf_graph = None self._backend_entity = None + self._algorithm_key = f"FBC_{hash(self)}" if self.apply_for_all_nodes: raise RuntimeError("FastBiasCorrection algorithm does not support apply_for_all_nodes=True yet") @@ -110,38 +109,47 @@ def _set_backend_entity(self, model: TModel) -> None: ) self._backend_entity = OVFastBiasCorrectionAlgoBackend() + elif model_backend == BackendType.TORCH: + from nncf.quantization.algorithms.fast_bias_correction.torch_backend import PTFastBiasCorrectionAlgoBackend + + self._backend_entity = PTFastBiasCorrectionAlgoBackend() else: raise RuntimeError( "Cannot return backend-specific entity because {} is not supported!".format(model_backend) ) - def _apply( + def apply( self, model: TModel, + graph: NNCFGraph, statistic_points: Optional[StatisticPointsContainer] = None, dataset: Optional[Dataset] = None, ) -> TModel: self._set_backend_entity(model) - nncf_graph = NNCFGraphFactory.create(model) - node_and_bias_value = ( - (node, self._backend_entity.get_bias_value(node, nncf_graph, model)) - for node in nncf_graph.get_all_nodes() - if self._backend_entity.is_node_with_bias(node, nncf_graph) - ) model_transformer = ModelTransformerFactory.create(model) + + node_and_bias_value = [ + (node, self._backend_entity.get_bias_value(node, graph, model)) + for node in graph.get_all_nodes() + if self._backend_entity.is_node_with_bias(node, graph) + ] + # Fill `node_and_new_bias_value` list. It is a correspondence between nodes # for which we should update bias and new bias values. node_and_new_bias_value = [] - for node, bias_value in tqdm(list(node_and_bias_value), desc="Biases correction"): + + for node, bias_value in tqdm(node_and_bias_value, desc="Applying Fast Bias correction"): node_name = node.node_name - if not self._backend_entity.is_quantized_weights(node, nncf_graph): + if not self._backend_entity.is_quantized_weights(node, graph): nncf_logger.debug(f"Skipping node {node_name} because weights were not quantized") continue - input_fp, input_shape = self._get_fp_inputs(statistic_points, node_name) - output_fp = self._get_fp_outputs(statistic_points, node_name) + in_node_name, out_node_name = self._backend_entity.get_node_names_for_input_output_statistics(node, graph) + + input_fp, input_shape = self._get_fp_inputs(statistic_points, in_node_name) + output_fp = self._get_fp_outputs(statistic_points, out_node_name) extracted_model = self._extract_submodel(model_transformer, node_name) @@ -151,7 +159,7 @@ def _apply( if bias_value.ndim > 1: # Make index positive channel_axis = range(bias_value.ndim)[channel_axis] - input_blob = self._create_input_data(input_shape, input_fp, sub_input_name, channel_axis) + input_blob = self._backend_entity.create_input_data(input_shape, input_fp, sub_input_name, channel_axis) bias_shift = self._get_bias_shift( model=extracted_model, input_blob=input_blob, @@ -160,12 +168,9 @@ def _apply( output_name=sub_output_name, ) - if bias_value.ndim > 1: - axes = [i for i in range(bias_value.ndim) if i != channel_axis] - bias_shift = np.expand_dims(bias_shift, axes) - + bias_shift = self.reshape_bias_shift(bias_shift, bias_value, channel_axis) updated_bias = bias_value + bias_shift - magnitude = self._get_bias_shift_magnitude(bias_value, updated_bias) + magnitude = self._backend_entity.get_bias_shift_magnitude(bias_value, updated_bias) if magnitude < self.threshold: nncf_logger.debug(f"{node_name} bias would be changed") @@ -176,13 +181,27 @@ def _apply( # Create commands of bias correction and apply them to the model. transformation_layout = TransformationLayout() for node, bias_value in node_and_new_bias_value: - transformation_layout.register( - self._backend_entity.create_bias_correction_command(node, bias_value, nncf_graph) - ) + transformation_layout.register(self._backend_entity.create_bias_correction_command(node, bias_value, graph)) transformed_model = model_transformer.transform(transformation_layout) return transformed_model + def reshape_bias_shift(self, bias_shift: TTensor, bias_value: TTensor, channel_axis: int) -> TTensor: + """ + Reshape bias_shift tensor in case of dimensions of bias_value is more then 1. + + :param bias_shift: Bias shift tensor. + :param bias_value: Bias value tensor. + :param channel_axis: Axis to update bias. + + :return TTensor: Updated bias_shift. + """ + if bias_value.ndim > 1: + new_shape = [1] * bias_value.ndim + new_shape[channel_axis] = bias_shift.shape[0] + bias_shift = self._backend_entity.reshape_tensor(bias_shift, new_shape) + return bias_shift + def _get_fp_inputs(self, statistic_points: StatisticPointsContainer, node_name: str) -> Tuple[List, List]: """ Makes out per-layer needed data from the floating-point collected statistics. @@ -193,22 +212,22 @@ def _get_fp_inputs(self, statistic_points: StatisticPointsContainer, node_name: """ def input_filter_func(point): - return ( - FastBiasCorrection in point.algorithm_to_tensor_collectors - and point.target_point.type == TargetType.PRE_LAYER_OPERATION - ) + return self._algorithm_key in point.algorithm_to_tensor_collectors and point.target_point.type in [ + TargetType.PRE_LAYER_OPERATION, + TargetType.OPERATOR_PRE_HOOK, + ] input_fp = [] input_shape = [] for tensor_collector in statistic_points.get_algo_statistics_for_node( - node_name, input_filter_func, FastBiasCorrection + node_name, input_filter_func, self._algorithm_key ): statistics = tensor_collector.get_statistics() input_fp.extend(statistics.mean_values) input_shape.extend(statistics.shape) return input_fp, input_shape - def _get_fp_outputs(self, statistic_points: StatisticPointsContainer, node_name: str) -> List[np.ndarray]: + def _get_fp_outputs(self, statistic_points: StatisticPointsContainer, node_name: str) -> List[TTensor]: """ Makes out per-layer needed data from the floating-point collected statistics. @@ -218,14 +237,14 @@ def _get_fp_outputs(self, statistic_points: StatisticPointsContainer, node_name: """ def output_filter_func(point): - return ( - FastBiasCorrection in point.algorithm_to_tensor_collectors - and point.target_point.type == TargetType.POST_LAYER_OPERATION - ) + return self._algorithm_key in point.algorithm_to_tensor_collectors and point.target_point.type in [ + TargetType.POST_LAYER_OPERATION, + TargetType.OPERATOR_POST_HOOK, + ] output_fp = [] for tensor_collector in statistic_points.get_algo_statistics_for_node( - node_name, output_filter_func, FastBiasCorrection + node_name, output_filter_func, self._algorithm_key ): output_fp.extend(tensor_collector.get_statistics().mean_values) return output_fp @@ -256,32 +275,17 @@ def _add_statistic_point(self, container: StatisticPointsContainer, point: Targe reduction_shape=axis, num_samples=self.subset_size, inplace=self.inplace_statistics ) container.add_statistic_point( - StatisticPoint(target_point=point, tensor_collector=stat_collector, algorithm=FastBiasCorrection) + StatisticPoint(target_point=point, tensor_collector=stat_collector, algorithm=self._algorithm_key) ) - def _create_input_data( - self, input_shape: Tuple[int], input_fp: List[np.ndarray], input_name: str, channel_axis: int - ) -> Dict[str, NNCFTensor]: - """ - Creates input blob for the bias shift calculation. - :param input_shape: Input shape for the blob. - :param input_fp: Input data for the blob. - :param input_name: Name for the output dictionary. - :param channel_axis: Axis to fill the blob with provided data. - :return: The dictionary of the blob by input name. - """ - input_blob = self._backend_entity.create_blob(input_shape, input_fp, channel_axis) - input_data = {input_name: input_blob} - return input_data - def _get_bias_shift( self, model: TModel, - input_blob: Dict[str, NNCFTensor], + input_blob: Union[TTensor, Dict[str, TTensor]], channel_axis: Tuple[int], - output_fp: List[np.ndarray], + output_fp: List[TTensor], output_name: str, - ) -> np.ndarray: + ) -> TTensor: """ Calculates updated bias. @@ -297,38 +301,25 @@ def _get_bias_shift( raw_output = engine.infer(input_blob) q_outputs = self._backend_entity.process_model_output(raw_output, output_name) q_outputs = self._backend_entity.tensor_processor.mean_per_channel(q_outputs, channel_axis).tensor - bias_shift = np.array(output_fp) - q_outputs + bias_shift = self._backend_entity.post_process_output_data(output_fp) - q_outputs return bias_shift - @staticmethod - def _get_bias_shift_magnitude(current_bias_value: np.ndarray, updated_bias_value: np.ndarray) -> float: - """ - Calculates bias shift magnitude based on the current and updated values. - - :param current_bias_value: The original bias value. - :param updated_bias_value: The updated bias value. - :return: Magnitude between original and updated bias values. - """ - bias_shift_magnitude = np.inf - if np.count_nonzero(current_bias_value == 0) == 0: - bias_shift_magnitude = np.max(np.abs((updated_bias_value - current_bias_value) / current_bias_value)) - return bias_shift_magnitude - - def get_statistic_points(self, model: TModel) -> StatisticPointsContainer: + def get_statistic_points(self, model: TModel, graph: NNCFGraph) -> StatisticPointsContainer: self._set_backend_entity(model) - nncf_graph = NNCFGraphFactory.create(model) if self.nncf_graph is None else self.nncf_graph nodes_with_bias = [ - node for node in nncf_graph.get_all_nodes() if self._backend_entity.is_node_with_bias(node, nncf_graph) + node for node in graph.get_all_nodes() if self._backend_entity.is_node_with_bias(node, graph) ] statistic_container = StatisticPointsContainer() for node in nodes_with_bias: input_port_id, output_port_id = self._backend_entity.get_activation_port_ids_for_bias_node(node) + in_node_name, out_node_name = self._backend_entity.get_node_names_for_input_output_statistics(node, graph) + pre_layer_statistic_point = self._backend_entity.target_point( - TargetType.PRE_LAYER_OPERATION, node.node_name, input_port_id + TargetType.PRE_LAYER_OPERATION, in_node_name, input_port_id ) post_layer_statistic_point = self._backend_entity.target_point( - TargetType.POST_LAYER_OPERATION, node.node_name, output_port_id + TargetType.POST_LAYER_OPERATION, out_node_name, output_port_id ) channel_axis = node.metatype.output_channel_axis diff --git a/nncf/quantization/algorithms/fast_bias_correction/backend.py b/nncf/quantization/algorithms/fast_bias_correction/backend.py index 136c6f4f800..ca25adc2fb2 100644 --- a/nncf/quantization/algorithms/fast_bias_correction/backend.py +++ b/nncf/quantization/algorithms/fast_bias_correction/backend.py @@ -11,7 +11,7 @@ from abc import ABC from abc import abstractmethod -from typing import List, Optional, Tuple, TypeVar +from typing import Dict, List, Optional, Tuple, TypeVar, Union import numpy as np @@ -26,18 +26,12 @@ from nncf.common.utils.registry import Registry TModel = TypeVar("TModel") +TTensor = TypeVar("TTensor") OutputType = TypeVar("OutputType") ALGO_BACKENDS = Registry("algo_backends") class FastBiasCorrectionAlgoBackend(ABC): - @property - @abstractmethod - def operation_metatypes(self): - """ - Property for the backend-specific metatypes. - """ - @property @abstractmethod def tensor_processor(self): @@ -59,7 +53,9 @@ def target_point(target_type: TargetType, target_node_name: str, port_id: int) - @staticmethod @abstractmethod - def create_bias_correction_command(node: NNCFNode, bias_value: np.ndarray, nncf_graph: NNCFGraph): + def create_bias_correction_command( + node: NNCFNode, bias_value: np.ndarray, nncf_graph: NNCFGraph + ) -> TransformationCommand: """ Creates backend-specific command to update bias value. @@ -75,7 +71,7 @@ def model_extraction_command(inputs: List[str], outputs: List[str]) -> Transform """ Returns backend-specific command to extract sub-model based on input & output names. - :param inputs: List of the input names for sub-model beggining. + :param inputs: List of the input names for sub-model beginning. :param outputs: List of the output names for sub-model end. :return: Backend-specific TransformationCommand for the model extraction. """ @@ -110,14 +106,17 @@ def get_sub_input_output_names(subgraph: TModel) -> Tuple[str, str]: @staticmethod @abstractmethod - def create_blob(shape: Tuple[int], data: List[np.ndarray], channel_axis: int) -> np.ndarray: + def create_input_data( + shape: Tuple[int], data: List[TTensor], input_name: str, channel_axis: int + ) -> Union[Dict[str, TTensor], TTensor]: """ - Creates the backend-specific (because of layout) blob. + Creates input data for the bias shift calculation. :param shape: Shape of the blob. :param data: Data to fill the blob. + :param input_name: Name for the output dictionary. :param channel_axis: Axis to fill the blob with provided data. - :return: np.ndarray blob. + :return: Created data. """ @staticmethod @@ -152,6 +151,7 @@ def is_quantized_weights(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: :param node: NNCFNode to check. :param nncf_graph: NNCFGraph instance. + :return: boolean indicating whether the node has a quantized weights or not """ @@ -176,3 +176,48 @@ def is_node_with_bias(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: :param nncf_graph: NNCFGraph that contains node. :return: Boolean indicating whether the node has a bias or not. """ + + @staticmethod + @abstractmethod + def get_bias_shift_magnitude(current_bias_value: TTensor, updated_bias_value: TTensor) -> float: + """ + Calculates bias shift magnitude based on the current and updated values. + + :param current_bias_value: The original bias value. + :param updated_bias_value: The updated bias value. + :return: Magnitude between original and updated bias values. + """ + + @staticmethod + @abstractmethod + def post_process_output_data(data: List[TTensor]) -> TTensor: + """ + Convert data to backend specific type. + + :param data: List of data. + :return: Converted data. + """ + + @staticmethod + @abstractmethod + def reshape_tensor(data: TTensor, new_shape: List[int]) -> TTensor: + """ + Reshape tensor. + + :param data: Tensor. + :param new_shape: New shape. + """ + + @staticmethod + @abstractmethod + def get_node_names_for_input_output_statistics(node: NNCFNode, nncf_graph: NNCFGraph) -> Tuple[str, str]: + """ + Return name of nodes to collect statistics. + + :param node: NNCFNode to check. + :param nncf_graph: NNCFGraph instance. + + :return: + Name of node to collect input statistics + Name of node to collect output statistics + """ diff --git a/nncf/quantization/algorithms/fast_bias_correction/onnx_backend.py b/nncf/quantization/algorithms/fast_bias_correction/onnx_backend.py index 71ac7468d4f..733018e9bd2 100644 --- a/nncf/quantization/algorithms/fast_bias_correction/onnx_backend.py +++ b/nncf/quantization/algorithms/fast_bias_correction/onnx_backend.py @@ -19,14 +19,13 @@ from nncf.common.graph.transformations.commands import TargetType from nncf.common.tensor_statistics.collectors import ReductionShape from nncf.common.utils.backend import BackendType -from nncf.common.utils.registry import Registry -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNX_OPERATION_METATYPES -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXDequantizeLinearMetatype from nncf.onnx.graph.node_utils import get_bias_value +from nncf.onnx.graph.node_utils import is_any_weight_quantized from nncf.onnx.graph.node_utils import is_node_with_bias from nncf.onnx.graph.transformations.command_creation import create_bias_correction_command from nncf.onnx.graph.transformations.commands import ONNXBiasCorrectionCommand from nncf.onnx.graph.transformations.commands import ONNXModelExtractionCommand +from nncf.onnx.graph.transformations.commands import ONNXNullBiasInsertionCommand from nncf.onnx.graph.transformations.commands import ONNXTargetPoint from nncf.onnx.statistics.collectors import ONNXMeanStatisticCollector from nncf.onnx.statistics.collectors import ONNXNNCFCollectorTensorProcessor @@ -38,8 +37,8 @@ @ALGO_BACKENDS.register(BackendType.ONNX) class ONNXFastBiasCorrectionAlgoBackend(FastBiasCorrectionAlgoBackend): @property - def operation_metatypes(self) -> Registry: - return ONNX_OPERATION_METATYPES + def types_to_insert_bias(self): + return [] @property def tensor_processor(self) -> ONNXNNCFCollectorTensorProcessor: @@ -49,6 +48,10 @@ def tensor_processor(self) -> ONNXNNCFCollectorTensorProcessor: def target_point(target_type: TargetType, target_node_name: str, port_id: int) -> ONNXTargetPoint: return ONNXTargetPoint(target_type, target_node_name, port_id) + @staticmethod + def create_bias_insertion_command(node: NNCFNode) -> ONNXNullBiasInsertionCommand: + return ONNXNullBiasInsertionCommand(node) + @staticmethod def create_bias_correction_command( node: NNCFNode, bias_value: np.ndarray, nncf_graph: NNCFGraph @@ -73,13 +76,16 @@ def get_sub_input_output_names(subgraph: onnx.ModelProto) -> Tuple[str, str]: return subgraph.graph.input[0].name, subgraph.graph.output[0].name @staticmethod - def create_blob(shape: Tuple[int], data: List[np.ndarray], channel_axis: int) -> np.ndarray: + def create_input_data( + shape: Tuple[int], data: List[np.ndarray], input_name: str, channel_axis: int + ) -> Dict[str, np.array]: blob = np.zeros(shape) for j, idx in enumerate(np.ndindex(blob.shape[channel_axis])): index = tuple(slice(None) if i != channel_axis else idx for i in range(blob.ndim)) blob[index] = data[j] blob = blob.astype(data[0].dtype) - return blob + input_data = {input_name: blob} + return input_data @staticmethod def get_bias_value(node: NNCFNode, nncf_graph: NNCFGraph, model: onnx.ModelProto) -> np.ndarray: @@ -95,11 +101,27 @@ def process_model_output(raw_data: Dict, output_name: str) -> ONNXNNCFTensor: @staticmethod def is_quantized_weights(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: - input_nodes = [edge.from_node for edge in nncf_graph.get_input_edges(node)] - weight_port_id = node.metatype.weight_definitions.weight_port_id - weight_node = input_nodes[weight_port_id] - return weight_node.metatype == ONNXDequantizeLinearMetatype + return is_any_weight_quantized(node, nncf_graph) @staticmethod def is_node_with_bias(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: return is_node_with_bias(node) + + @staticmethod + def get_bias_shift_magnitude(current_bias_value: np.ndarray, updated_bias_value: np.ndarray) -> float: + bias_shift_magnitude = np.inf + if np.count_nonzero(current_bias_value == 0) == 0: + bias_shift_magnitude = np.max(np.abs((updated_bias_value - current_bias_value) / current_bias_value)) + return bias_shift_magnitude + + @staticmethod + def post_process_output_data(data: List[np.ndarray]) -> np.ndarray: + return np.array(data) + + @staticmethod + def reshape_tensor(data: np.ndarray, new_shape: List[int]) -> np.ndarray: + return data.reshape(new_shape) + + @staticmethod + def get_node_names_for_input_output_statistics(node: NNCFNode, nncf_graph: NNCFGraph) -> Tuple[str, str]: + return node.node_name, node.node_name diff --git a/nncf/quantization/algorithms/fast_bias_correction/openvino_backend.py b/nncf/quantization/algorithms/fast_bias_correction/openvino_backend.py index 1057b7cbc17..f76ef9a0c72 100644 --- a/nncf/quantization/algorithms/fast_bias_correction/openvino_backend.py +++ b/nncf/quantization/algorithms/fast_bias_correction/openvino_backend.py @@ -19,10 +19,8 @@ from nncf.common.graph.transformations.commands import TargetType from nncf.common.tensor_statistics.collectors import ReductionShape from nncf.common.utils.backend import BackendType -from nncf.common.utils.registry import Registry from nncf.experimental.common.tensor_statistics.collectors import TensorCollector from nncf.openvino.graph.metatypes.common import FAKE_QUANTIZE_OPERATIONS -from nncf.openvino.graph.metatypes.openvino_metatypes import OV_OPERATOR_METATYPES from nncf.openvino.graph.node_utils import get_bias_value from nncf.openvino.graph.node_utils import is_node_with_bias from nncf.openvino.graph.transformations.command_creation import OVCommandCreator @@ -38,10 +36,6 @@ @ALGO_BACKENDS.register(BackendType.OPENVINO) class OVFastBiasCorrectionAlgoBackend(FastBiasCorrectionAlgoBackend): - @property - def operation_metatypes(self) -> Registry: - return OV_OPERATOR_METATYPES - @property def tensor_processor(self) -> OVNNCFCollectorTensorProcessor: return OVNNCFCollectorTensorProcessor @@ -74,13 +68,16 @@ def get_sub_input_output_names(subgraph: ov.Model) -> Tuple[str, str]: return subgraph.inputs[0].get_any_name(), subgraph.outputs[0].get_any_name() @staticmethod - def create_blob(shape: Tuple[int], data: List[np.ndarray], channel_axis: int) -> np.ndarray: + def create_input_data( + shape: Tuple[int], data: List[np.ndarray], input_name: str, channel_axis: int + ) -> Dict[str, np.ndarray]: blob = np.zeros(shape) for j, idx in enumerate(np.ndindex(blob.shape[channel_axis])): index = tuple(slice(None) if i != channel_axis else idx for i in range(blob.ndim)) blob[index] = data[j] blob = blob.astype(data[0].dtype) - return blob + input_data = {input_name: blob} + return input_data @staticmethod def get_bias_value(node: NNCFNode, nncf_graph: NNCFGraph, model: ov.Model) -> np.ndarray: @@ -107,3 +104,22 @@ def process_model_output(raw_data: Dict, output_name: str) -> OVNNCFTensor: @staticmethod def is_node_with_bias(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: return is_node_with_bias(node, nncf_graph) + + @staticmethod + def get_bias_shift_magnitude(current_bias_value: np.ndarray, updated_bias_value: np.ndarray) -> float: + bias_shift_magnitude = np.inf + if np.count_nonzero(current_bias_value == 0) == 0: + bias_shift_magnitude = np.max(np.abs((updated_bias_value - current_bias_value) / current_bias_value)) + return bias_shift_magnitude + + @staticmethod + def post_process_output_data(data: List[np.ndarray]) -> np.ndarray: + return np.array(data) + + @staticmethod + def reshape_tensor(data: np.ndarray, new_shape: List[int]) -> np.ndarray: + return data.reshape(new_shape) + + @staticmethod + def get_node_names_for_input_output_statistics(node: NNCFNode, nncf_graph: NNCFGraph) -> Tuple[str, str]: + return node.node_name, node.node_name diff --git a/nncf/quantization/algorithms/fast_bias_correction/torch_backend.py b/nncf/quantization/algorithms/fast_bias_correction/torch_backend.py new file mode 100644 index 00000000000..b7316724db0 --- /dev/null +++ b/nncf/quantization/algorithms/fast_bias_correction/torch_backend.py @@ -0,0 +1,132 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Dict, List, Optional, Tuple + +import numpy as np +import torch + +from nncf.common.graph import NNCFGraph +from nncf.common.graph import NNCFNode +from nncf.common.graph.definitions import NNCFGraphNodeType +from nncf.common.graph.transformations.commands import TargetType +from nncf.common.tensor_statistics.collectors import ReductionShape +from nncf.common.utils.backend import BackendType +from nncf.quantization.algorithms.fast_bias_correction.backend import ALGO_BACKENDS +from nncf.quantization.algorithms.fast_bias_correction.backend import FastBiasCorrectionAlgoBackend +from nncf.torch.graph.transformations.command_creation import create_bias_correction_command +from nncf.torch.graph.transformations.commands import PTBiasCorrectionCommand +from nncf.torch.graph.transformations.commands import PTModelExtractionWithFusedBiasCommand +from nncf.torch.graph.transformations.commands import PTTargetPoint +from nncf.torch.model_analyzer import get_fused_bias_value +from nncf.torch.model_analyzer import get_potential_fused_node +from nncf.torch.model_analyzer import is_node_with_fused_bias +from nncf.torch.model_analyzer import is_quantized_weights +from nncf.torch.nncf_network import NNCFNetwork +from nncf.torch.tensor import PTNNCFTensor +from nncf.torch.tensor_statistics.collectors import PTMeanStatisticCollector +from nncf.torch.tensor_statistics.collectors import PTNNCFCollectorTensorProcessor + + +@ALGO_BACKENDS.register(BackendType.TORCH) +class PTFastBiasCorrectionAlgoBackend(FastBiasCorrectionAlgoBackend): + TARGET_TYPE_TO_PT_INS_TYPE_MAP = { + TargetType.PRE_LAYER_OPERATION: TargetType.OPERATOR_PRE_HOOK, + TargetType.POST_LAYER_OPERATION: TargetType.OPERATOR_POST_HOOK, + } + + @property + def tensor_processor(self) -> PTNNCFCollectorTensorProcessor: + return PTNNCFCollectorTensorProcessor() + + @staticmethod + def target_point(target_type: TargetType, target_node_name: str, port_id: int) -> PTTargetPoint: + if NNCFGraphNodeType.INPUT_NODE in target_node_name or target_type == TargetType.POST_LAYER_OPERATION: + port_id = None + if target_type in PTFastBiasCorrectionAlgoBackend.TARGET_TYPE_TO_PT_INS_TYPE_MAP: + target_type = PTFastBiasCorrectionAlgoBackend.TARGET_TYPE_TO_PT_INS_TYPE_MAP[target_type] + return PTTargetPoint(target_type, target_node_name, input_port_id=port_id) + + @staticmethod + def create_bias_correction_command( + node: NNCFNode, bias_value: np.ndarray, nncf_graph: NNCFGraph + ) -> PTBiasCorrectionCommand: + return create_bias_correction_command(node, bias_value) + + @staticmethod + def model_extraction_command(inputs: List[str], outputs: List[str]) -> PTModelExtractionWithFusedBiasCommand: + return PTModelExtractionWithFusedBiasCommand(inputs[0]) + + @staticmethod + def mean_statistic_collector( + reduction_shape: ReductionShape, + inplace: bool, + num_samples: Optional[int] = None, + window_size: Optional[int] = None, + ) -> PTMeanStatisticCollector: + return PTMeanStatisticCollector(reduction_shape, num_samples, window_size) + + @staticmethod + def get_sub_input_output_names(subgraph: NNCFNetwork) -> Tuple[str, str]: + # Pytorch does not have name for extracted node + return None, None + + @staticmethod + def create_input_data( + shape: Tuple[int], data: List[torch.Tensor], input_name: str, channel_axis: int + ) -> torch.Tensor: + blob = torch.zeros(shape, dtype=data[0].dtype) + for j, idx in enumerate(np.ndindex(blob.shape[channel_axis])): + index = tuple(slice(None) if i != channel_axis else idx for i in range(blob.ndim)) + blob[index] = data[j] + return blob + + @staticmethod + def get_bias_value(node: NNCFNode, nncf_graph: NNCFGraph, model: NNCFNetwork) -> np.ndarray: + return get_fused_bias_value(node, model) + + @staticmethod + def get_activation_port_ids_for_bias_node(node: NNCFNode) -> Tuple[int, int]: + return 0, 0 + + @staticmethod + def process_model_output(raw_data: Dict, output_name: str) -> PTNNCFTensor: + return PTNNCFTensor(raw_data) + + @staticmethod + def is_quantized_weights(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: + return is_quantized_weights(node, nncf_graph) + + @staticmethod + def is_node_with_bias(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: + return is_node_with_fused_bias(node, nncf_graph) + + @staticmethod + def get_bias_shift_magnitude(current_bias_value: torch.Tensor, updated_bias_value: torch.Tensor) -> float: + bias_shift_magnitude = torch.inf + if torch.count_nonzero(current_bias_value == 0) == 0: + bias_shift_magnitude = torch.max(torch.abs((updated_bias_value - current_bias_value) / current_bias_value)) + return bias_shift_magnitude + + @staticmethod + def post_process_output_data(data: List[torch.Tensor]) -> torch.Tensor: + return torch.Tensor(data) + + @staticmethod + def reshape_tensor(data: torch.Tensor, new_shape: List[int]) -> torch.Tensor: + return data.reshape(new_shape) + + @staticmethod + def get_node_names_for_input_output_statistics(node: NNCFNode, nncf_graph: NNCFGraph) -> Tuple[str, str]: + input_node_name = node.node_name + next_norm_node = get_potential_fused_node(input_node_name, nncf_graph) + output_node_name = next_norm_node.node_name if next_norm_node else input_node_name + return input_node_name, output_node_name diff --git a/nncf/quantization/algorithms/hyperparameter_tuner/__init__.py b/nncf/quantization/algorithms/hyperparameter_tuner/__init__.py new file mode 100644 index 00000000000..9b29b47534a --- /dev/null +++ b/nncf/quantization/algorithms/hyperparameter_tuner/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/nncf/quantization/algorithms/hyperparameter_tuner/algorithm.py b/nncf/quantization/algorithms/hyperparameter_tuner/algorithm.py new file mode 100644 index 00000000000..fba7b984278 --- /dev/null +++ b/nncf/quantization/algorithms/hyperparameter_tuner/algorithm.py @@ -0,0 +1,371 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy +import dataclasses +import functools +import itertools +import operator +from typing import Any, Callable, Dict, Iterable, List, Tuple, Type, TypeVar, Union + +from nncf.common.factory import NNCFGraphFactory +from nncf.common.factory import StatisticsAggregatorFactory +from nncf.common.graph.graph import NNCFGraph +from nncf.common.logging import nncf_logger +from nncf.common.utils.backend import get_backend +from nncf.common.utils.timer import timer +from nncf.data.dataset import Dataset +from nncf.quantization.algorithms.accuracy_control.evaluator import Evaluator +from nncf.quantization.algorithms.accuracy_control.evaluator import MetricResults +from nncf.quantization.algorithms.accuracy_control.rank_functions import create_normalized_mse_func +from nncf.quantization.algorithms.accuracy_control.subset_selection import select_subset +from nncf.quantization.algorithms.algorithm import Algorithm + +TModel = TypeVar("TModel") +TTensor = TypeVar("TTensor") +CombinationKey = Tuple[int, ...] +Combination = Dict[str, Any] + + +def create_combinations(param_grid: Dict[str, List[Any]]) -> Dict[CombinationKey, Combination]: + """ + Creates combinations as follows: + * All keys in `param_grid` are numbered using integers from 0 to N = len(param_grid)-1 + Order of keys is used. Let key_j is a key from param_grid.keys() that corresponds + integer j in {0, 1, ..., N}. + * Set of combination keys (CK) are created as Cartesian product the following sets + + CK = {None, 0, 1, ..., num_val_0} x {None, 0, 1, ..., num_val_1} x ... x {None, 0, 1, ..., num_val_N}, + + where num_val_j is a number of values in param_grid[key_j]. + * Creates combination for each combination key. If combination_key[i] is None then parameter with key_i + name is not changed. Otherwise, the param_grid[key_i][combination_key[i]] value should be included + to combination as new value for parameter with key_i name. + + :param param_grid: Dictionary with parameters names as keys and list of + parameter settings to try as values. + :return: Created combination. + """ + simple_changes = [] + indices = [] + + for param_name, values in param_grid.items(): + indices.append([None, *range(len(values))]) + simple_changes.append([{param_name: v} for v in values]) + + combinations: Dict[CombinationKey, Combination] = {} + + for combination_key in itertools.product(*indices): + combination: Combination = {} + for param_idx, value_idx in enumerate(combination_key): + if value_idx is None: + continue + combination.update(simple_changes[param_idx][value_idx]) + + combinations[combination_key] = combination + + return combinations + + +def is_dataclass_instance(obj: Any) -> bool: + """ + Returns `True` if object is a dataclass instance, `False` otherwise. + + :param obj: Object to check. + :return: `True` if object is a dataclass instance, `False` otherwise. + """ + return dataclasses.is_dataclass(obj) and not isinstance(obj, type) + + +def apply_combination(init_params: Dict[str, Any], combination: Combination) -> Dict[str, Any]: + """ + Applies combination of parameters to initial parameters. + + :param init_params: Initial set of parameters. + :param combination: Combination of parameters. + :return: Returns `init_params` where some values of parameters were changed according to + provided combination. + """ + DELIMITER = ":" + params = copy.deepcopy(init_params) + for param_key, param_value in combination.items(): + if DELIMITER in param_key: + main_key, *path_to_attr, attr_name = param_key.split(DELIMITER) + obj = params[main_key] + assert is_dataclass_instance(obj) + for name in path_to_attr: + obj = getattr(obj, name) + assert is_dataclass_instance(obj) + setattr(obj, attr_name, param_value) + else: + params[param_key] = param_value + + return params + + +def print_combination_and_score(title: str, combination: Combination, combination_score: float) -> None: + """ + Prints combination and score. + + :param title: Title. + :param combination: Combination to print. + :param combination_score: Score of combination. + """ + if not combination: + message = "Parameters were not changed" + else: + message = ", ".join(f"{name} = {v}" for name, v in combination.items()) + message = f"{title} {message}" + + nncf_logger.info(message) + nncf_logger.info(f"Score: {combination_score}") + + +def find_best_combination( + combinations: Dict[CombinationKey, Combination], + combination_score_func: Callable[[CombinationKey], float], + param_grid: Dict[str, List[Any]], +) -> CombinationKey: + """ + Finds best combination. + + :param combinations: Combinations. + :param combination_score_func: Combination score function. + :param param_grid: Dictionary with parameters names as keys and list of + parameter settings to try as values. + :return: Best combination key. + """ + best_combination_key = tuple(None for _ in param_grid) + best_combination_score = None + + for param_idx, (param_name, values) in enumerate(param_grid.items()): + nncf_logger.info(f"Start search best value for the '{param_name}' parameter") + values_indices = [None, *range(len(values))] + param_best_combination_key = None + param_best_combination_score = None + + for value_idx in values_indices: + combination_key = (*best_combination_key[:param_idx], value_idx, *best_combination_key[param_idx + 1 :]) + combination_score = combination_score_func(combination_key) + + if param_best_combination_score is None or param_best_combination_score < combination_score: + param_best_combination_score = combination_score + param_best_combination_key = combination_key + + print_combination_and_score( + "Current combination of parameters:", combinations[combination_key], combination_score + ) + + if best_combination_score is None or best_combination_score <= param_best_combination_score: + best_combination_score = param_best_combination_score + best_combination_key = param_best_combination_key + + print_combination_and_score( + "Best combination of parameters:", combinations[best_combination_key], best_combination_score + ) + + return best_combination_key + + +class HyperparameterTuner: + """ + This algorithm is used to find a best combination of parameters from `param_grid`. + + The `param_grid` in simple case is a dictionary with parameters names + as keys and list of parameter settings to try as values. + + param_grid = { + "param_name": [0.1, 0.2], + } + + The parameters names should be same as in `algorithm_cls.__init__()` method. + In case when "param_name" parameter is a dataclass object there is a way to specify settings + to try for his fields using marker ":" + + param_grid = { + "param_name:field_a": [10, 20], + "param_name:field_b:x": [0.1, 0.2], + } + + In the example above the `param_name` and "param_name:field_b" parameters are dataclasses. + This rule is applied recursively. + + The algorithm works as follow: let we have the following `param_grid` + + param_grid = { + "param_name_0" : [0.2, 0.4, 0.6], + "param_name_1:x": [-1, -2, -3], + "param_name_2": [True, False], + } + + First of all, algorithm finds the best value for parameter "param_name_0". + Further, taking into account the found value, the best value for the "param_name_1:x" parameter + is sought. After that, taking into account the found values for "param_name_0" and "param_name_1:x" + parameters, the best value for the "param_name_2" is sought. + """ + + def __init__( + self, + algorithm_cls: Type[Algorithm], + init_params: Dict[str, Any], + param_grid: Dict[str, List[Any]], + calibration_dataset: Dataset, + validation_fn: Callable[[Any, Iterable[Any]], Tuple[float, Union[None, List[float], List[List[TTensor]]]]], + subset_size: int, + initial_metric_results: MetricResults, + quantized_metric_results: MetricResults, + ): + """ + :param algorithm_cls: Class of algorithm. + :param init_params: Initial set of parameters used to create algorithm. + :param param_grid: Dictionary with parameters names as keys and list of + parameter settings to try as values. + :param calibration_dataset: Dataset used to collect statistics for algorithm. + :param validation_fn: Validation function used to validated model. + :param subset_size: Number of data items that should be selected + from the dataset and used to validate model. + :param initial_metric_results: Metric results for initial model. + :param quantized_metric_results: Metric results for quantized with `init_params` model. + """ + self._algorithm_cls = algorithm_cls + self._init_params = init_params + self._param_grid = param_grid + self._calibration_dataset = calibration_dataset + self._evaluator = Evaluator(validation_fn) + self._subset_size = subset_size + self._initial_metric_results = initial_metric_results + self._quantized_metric_results = quantized_metric_results + + self._is_metric_mode = isinstance(self._initial_metric_results.values_for_each_item[0], float) + + # # Will be initialized inside `apply()` method + self._error_fn = None + + # Will be initialized inside `_initialize_algorithms()` method + self._algorithms: Dict[CombinationKey, Algorithm] = {} + self._statistic_points = None + + self._calculated_scores: Dict[CombinationKey, float] = {} + + def apply(self, model: TModel, validation_dataset: Dataset) -> TModel: + """ + Applies algorithm to input model. + + :param model: Input model. + :param validation_dataset: Dataset used to validate resulted model. + :return: Resulted model. + """ + if self._is_metric_mode: + self._error_fn = operator.sub + else: + self._error_fn = create_normalized_mse_func(get_backend(model)) + + subset_indices = select_subset( + self._subset_size, + self._initial_metric_results.values_for_each_item, + self._quantized_metric_results.values_for_each_item, + self._error_fn, + ) + + combinations = create_combinations(self._param_grid) + + initial_graph = NNCFGraphFactory.create(model) + + nncf_logger.info("Start initialization of algorithms") + with timer(): + self._prepare_algorithms(model, initial_graph, combinations) + + combination_score_fn = functools.partial( + self._calculate_combination_score, + initial_model=model, + initial_graph=initial_graph, + dataset=validation_dataset, + subset_indices=subset_indices, + ) + + nncf_logger.info("Start search best combination of parameters") + with timer(): + best_combination_key = find_best_combination(combinations, combination_score_fn, self._param_grid) + + algorithm = self._algorithms[best_combination_key] + result_model = algorithm.apply(model, initial_graph, self._statistic_points) + + return result_model + + def _prepare_algorithms( + self, initial_model: TModel, initial_graph: NNCFGraph, combinations: Dict[CombinationKey, Combination] + ) -> None: + """ + Creates algorithm for each combination of parameters. Collects statistics for + created algorithms. + + :param initial_model: Input model used to collect statistics for algorithms. + :param combinations: Combinations of parameters. + """ + for combination_key, combination in combinations.items(): + kwargs = apply_combination(self._init_params, combination) + self._algorithms[combination_key] = self._algorithm_cls(**kwargs) + + # Collect required statistics for created algorithms + stats_aggregator = StatisticsAggregatorFactory.create(initial_model, self._calibration_dataset) + for algorithm in self._algorithms.values(): + statistic_points = algorithm.get_statistic_points(initial_model, initial_graph) + stats_aggregator.register_statistic_points(statistic_points) + stats_aggregator.collect_statistics(initial_model, initial_graph) + self._statistic_points = stats_aggregator.statistic_points + + def _calculate_combination_score( + self, + combination_key: CombinationKey, + initial_model: TModel, + initial_graph: NNCFGraph, + dataset: Dataset, + subset_indices: List[int], + ) -> float: + """ + Calculates score for provided combination. + + :param combination_key: Combination key. + :param initial_model: Input model. + :param dataset: Dataset used to select data items for validation. + :param subset_indices: Zero-based indices of data items that should be selected + from the dataset and used to validate model. + :return: Calculated score. + """ + if combination_key in self._calculated_scores: + return self._calculated_scores[combination_key] + + algorithm = self._algorithms[combination_key] + model = algorithm.apply(initial_model, initial_graph, self._statistic_points) + score = self._validate_model(model, dataset, subset_indices) + self._calculated_scores[combination_key] = score + + return score + + def _validate_model(self, model: TModel, dataset: Dataset, subset_indices: List[int]) -> float: + """ + Validates input model on subset. + + :param model: Input model. + :param dataset: Dataset used to select data items for validation. + :param subset_indices: Zero-based indices of data items that should be selected + from the dataset and used to validate model. + :return: Calculated metric. + """ + if self._is_metric_mode: + metric_value, _ = self._evaluator.validate(model, dataset, subset_indices) + else: + approximate_outputs = self._evaluator.collect_values_for_each_item(model, dataset, subset_indices) + reference_outputs = [self._initial_metric_results.values_for_each_item[i] for i in subset_indices] + errors = [self._error_fn(a, b) for a, b in zip(reference_outputs, approximate_outputs)] + metric_value = sum(errors) / len(errors) + + return metric_value diff --git a/nncf/quantization/algorithms/hyperparameter_tuner/param_grid.py b/nncf/quantization/algorithms/hyperparameter_tuner/param_grid.py new file mode 100644 index 00000000000..fe0baa1b833 --- /dev/null +++ b/nncf/quantization/algorithms/hyperparameter_tuner/param_grid.py @@ -0,0 +1,74 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import itertools +from typing import Any, Dict + +from nncf.common.quantization.structs import QuantizationPreset +from nncf.quantization.range_estimator import AggregatorType +from nncf.quantization.range_estimator import RangeEstimatorParameters +from nncf.quantization.range_estimator import StatisticsCollectorParameters +from nncf.quantization.range_estimator import StatisticsType + + +def get_quantization_param_grid() -> Dict[str, Any]: + """ + Returns params grid for post-training quantization algorithm. + """ + min_param_values = [ + StatisticsCollectorParameters( + statistics_type=StatisticsType.MIN, + aggregator_type=AggregatorType.MIN, + ), + StatisticsCollectorParameters( + statistics_type=StatisticsType.QUANTILE, + aggregator_type=AggregatorType.MEAN, + quantile_outlier_prob=10e-4, + ), + StatisticsCollectorParameters( + statistics_type=StatisticsType.QUANTILE, + aggregator_type=AggregatorType.MEAN, + quantile_outlier_prob=10e-5, + ), + ] + max_param_values = [ + StatisticsCollectorParameters( + statistics_type=StatisticsType.MAX, + aggregator_type=AggregatorType.MAX, + ), + StatisticsCollectorParameters( + statistics_type=StatisticsType.QUANTILE, + aggregator_type=AggregatorType.MEAN, + quantile_outlier_prob=10e-4, + ), + StatisticsCollectorParameters( + statistics_type=StatisticsType.QUANTILE, + aggregator_type=AggregatorType.MEAN, + quantile_outlier_prob=10e-5, + ), + ] + + param_grid = { + "preset": [QuantizationPreset.PERFORMANCE, QuantizationPreset.MIXED], + "fast_bias_correction": [True, False], + "advanced_parameters:weights_range_estimator_params": [ + RangeEstimatorParameters( + min=StatisticsCollectorParameters(statistics_type=StatisticsType.MIN), + max=StatisticsCollectorParameters(statistics_type=StatisticsType.MAX), + ) + ], + "advanced_parameters:activations_range_estimator_params": [ + RangeEstimatorParameters(min=min_v, max=max_v) + for min_v, max_v in itertools.product(min_param_values, max_param_values) + ], + } + + return param_grid diff --git a/nncf/quantization/algorithms/min_max/algorithm.py b/nncf/quantization/algorithms/min_max/algorithm.py index e3820e69c33..72718bbeb68 100644 --- a/nncf/quantization/algorithms/min_max/algorithm.py +++ b/nncf/quantization/algorithms/min_max/algorithm.py @@ -14,9 +14,10 @@ from copy import deepcopy from typing import Any, Dict, List, Optional, OrderedDict, Set, TypeVar +import numpy as np + from nncf import Dataset from nncf.common.factory import ModelTransformerFactory -from nncf.common.factory import NNCFGraphFactory from nncf.common.graph.graph import NNCFGraph from nncf.common.graph.graph import NNCFNode from nncf.common.graph.operator_metatypes import OperatorMetatype @@ -30,6 +31,7 @@ from nncf.common.logging import nncf_logger from nncf.common.quantization.config_assignment import assign_qconfig_lists_to_modules from nncf.common.quantization.quantizer_propagation.solver import QuantizerPropagationSolver +from nncf.common.quantization.quantizer_propagation.structs import IgnoreReason from nncf.common.quantization.quantizer_setup import SingleConfigQuantizationPoint from nncf.common.quantization.quantizer_setup import SingleConfigQuantizerSetup from nncf.common.quantization.structs import QuantizableWeightedLayerNode @@ -89,7 +91,7 @@ class MinMaxQuantization(Algorithm): The algorithm modifies the model by inserting additional nodes, which emulates the quantization of the data flow. The algorithm calibrates the parameters of the inserted nodes by collecting the statistics in the insertion points. - The modified model is returned after the work of the algorithm, which can be perfomed via the original framework. + The modified model is returned after the work of the algorithm, which can be performed via the original framework. It is expected that the inference of the obtained model in the int8 mode would be faster than the original model. """ @@ -130,7 +132,7 @@ def __init__( to True. :param activations_quantization_params: Quantization parameters for model activations. - :param weights_quantization_params: Quantization parameters for model weigths. + :param weights_quantization_params: Quantization parameters for model weights. :param activations_range_estimator_params: Quantization range estimation parameters for activation. :param weights_range_estimator_params: Quantization range estimation parameters @@ -163,41 +165,41 @@ def __init__( quantizer_group, preset, self._quantization_params[quantizer_group] ) - self.nncf_graph = None # It prevents the duplicate weight quantizers from being added. # It can happen when you have layers that share the identical weight tensor. self._quantization_target_points_to_qconfig = ( collections.OrderedDict() ) # type: OrderedDict[TargetPoint, QuantizerConfig] self._unified_scale_groups = [] + self._algorithm_key = f"MMQ_{hash(self)}" @property def available_backends(self) -> Dict[str, BackendType]: return ALGO_BACKENDS.registry_dict def _get_quantizer_constraints( - self, group: QuantizerGroup, preset: QuantizationPreset, quantizaton_params: Optional[QuantizationParameters] + self, group: QuantizerGroup, preset: QuantizationPreset, quantization_params: Optional[QuantizationParameters] ) -> QuantizationConstraints: """ Returns QuantizationConstraints for the provided quantizer group. :param group: Quantizer group. :param preset: Quantization preset. - :param quantizaton_parameters: Quantization parameters. + :param quantization_params: Quantization parameters. :return: QuantizationConstraints. """ constraints = {"mode": preset.get_params_configured_by_preset(group)["mode"]} - if quantizaton_params is None: + if quantization_params is None: return QuantizationConstraints(**constraints) - if quantizaton_params.mode is not None: - constraints["mode"] = quantizaton_params.mode - if quantizaton_params.num_bits is not None: - constraints["num_bits"] = quantizaton_params.num_bits - if quantizaton_params.per_channel is not None: - constraints["per_channel"] = quantizaton_params.per_channel - if quantizaton_params.signedness_to_force is not None: - constraints["signedness_to_force"] = quantizaton_params.signedness_to_force + if quantization_params.mode is not None: + constraints["mode"] = quantization_params.mode + if quantization_params.num_bits is not None: + constraints["num_bits"] = quantization_params.num_bits + if quantization_params.per_channel is not None: + constraints["per_channel"] = quantization_params.per_channel + if quantization_params.signedness_to_force is not None: + constraints["signedness_to_force"] = quantization_params.signedness_to_force return QuantizationConstraints(**constraints) @@ -259,13 +261,21 @@ def _get_range_estimator_parameters( return RangeEstimatorParameters(min_statistic_collector, max_statistic_collector) def _get_stat_collector( - self, nncf_graph: NNCFGraph, target_point: TargetPoint, quantizer_config: QuantizerConfig + self, + nncf_graph: NNCFGraph, + target_point: TargetPoint, + quantizer_config: QuantizerConfig, + num_samples: int, ) -> TensorStatisticCollectorBase: """ - Creates and returns statistic collector instance based on the quantizer's configuration. + Creates and returns a statistic collector based on the quantizer's configuration. - :param quantizer_config: QuantizerConfig instance for the current layer. - :return: One of the TensorStatisticCollectorBase instances + :param nncf_graph: NNCFGraph instance. + :param target_point: Target point indicates where statistics should be collected. + :param quantizer_config: Configuration of a quantizer layer, + defining the configuration of created statistic collector. + :param num_samples: Number of samples to collect from the 'target_point'. + :return: Statistic Collector. """ range_estimator_params = self._get_range_estimator_parameters(target_point, quantizer_config) @@ -275,7 +285,7 @@ def _get_stat_collector( target_point, quantizer_config, inplace=self._inplace_statistics, - num_samples=self._subset_size, + num_samples=num_samples, ) def _get_default_qconfig(self, constraints: QuantizationConstraints = None) -> QuantizerConfig: @@ -290,7 +300,9 @@ def _get_default_qconfig(self, constraints: QuantizationConstraints = None) -> Q qconfig = constraints.apply_constraints_to(qconfig) return qconfig - def _get_ignored_names(self, nncf_graph: NNCFGraph, ignored_patterns: GraphPattern) -> Set[str]: + def _get_ignored_names( + self, nncf_graph: NNCFGraph, inference_nncf_graph: NNCFGraph, ignored_patterns: GraphPattern + ) -> Dict[str, IgnoreReason]: """ Returns all node names that are ignored for quantization: Firstly, the ignored names are obtained from user-defined ignored the scope. @@ -298,40 +310,50 @@ def _get_ignored_names(self, nncf_graph: NNCFGraph, ignored_patterns: GraphPatte Lastly, the ignored names are updated from ignored_patterns. :param nncf_graph: NNCFGraph instance. + :param inference_nncf_graph: Inference graph without constant flows. :param ignored_patterns: Ignored patterns. - :return: Node names are ignored for quantization. + :return: Ignored node names and ignore reason for quantization. """ - model_type = self._model_type - device = self._target_device - ignored_names = set() + user_ignored_names = get_ignored_node_names_from_ignored_scope( + self._ignored_scope, nncf_graph, strict=self._ignored_scope.validate + ) - ignored_names.update(get_ignored_node_names_from_ignored_scope(self._ignored_scope, nncf_graph)) + ignored_scope = self._get_ignored_scope(inference_nncf_graph, ignored_patterns) + autogenerated_ignored_names = get_ignored_node_names_from_ignored_scope(ignored_scope, nncf_graph, strict=False) - model_type_ignore_scope = self._backend_entity.get_ignored_scope(model_type, device) + ignored_names = {name: IgnoreReason.AUTOGENERATED for name in autogenerated_ignored_names} - ignored_names.update( - get_ignored_node_names_from_ignored_scope(model_type_ignore_scope, nncf_graph, strict=False) + ignored_names_by_layer_attributes = self._backend_entity.get_ignored_names_by_layer_attributes( + inference_nncf_graph ) + ignored_names.update({name: IgnoreReason.AUTOGENERATED for name in ignored_names_by_layer_attributes}) - ignored_scope = self._get_ignored_scope(nncf_graph, ignored_patterns) - - ignored_names.update(get_ignored_node_names_from_ignored_scope(ignored_scope, nncf_graph)) + # User ignored scope has higher priority + ignored_names.update({name: IgnoreReason.USER_REQUESTED for name in user_ignored_names}) return ignored_names - def _get_ignored_scope(self, nncf_graph: NNCFGraph, ignored_patterns: GraphPattern) -> IgnoredScope: + def _get_ignored_scope(self, inference_nncf_graph: NNCFGraph, ignored_patterns: GraphPattern) -> IgnoredScope: """ Returns IgnoredScope with node names matched ignored_patterns. - :param nncf_graph: NNCFGraph instance. + :param nncf_graph: Inference graph without constant flows. :param ignored_patterns: Ignored patterns. - :return: IgnoredScope with all node names mathced ignored_patterns. + :return: IgnoredScope with all node names matched ignored_patterns. """ - nncf_node_names = [nncf_node.node_name for nncf_node in nncf_graph.find_matching_nodes(ignored_patterns)] + nncf_node_names = [] + for subgraph in inference_nncf_graph.find_matching_subgraphs(ignored_patterns, strict=False): + for nncf_node in subgraph: + nncf_node_names.append(nncf_node.node_name) + return IgnoredScope(names=nncf_node_names) def _get_quantizer_setup( - self, nncf_graph: NNCFGraph, hw_patterns: GraphPattern, ignored_patterns: GraphPattern + self, + nncf_graph: NNCFGraph, + inference_nncf_graph: NNCFGraph, + hw_patterns: GraphPattern, + ignored_patterns: GraphPattern, ) -> SingleConfigQuantizerSetup: """ Returns SingleConfigQuantizerSetup instance based on the input NNCFGraph. @@ -345,7 +367,7 @@ def _get_quantizer_setup( hw_config_path = self._backend_entity.hw_config.get_path_to_hw_config(hw_config_type) hw_config = self._backend_entity.hw_config.from_json(hw_config_path) - ignored_names = self._get_ignored_names(nncf_graph, ignored_patterns) + ignored_names = self._get_ignored_names(nncf_graph, inference_nncf_graph, ignored_patterns) weight_nodes = self._backend_entity.get_weight_nodes(nncf_graph) default_weight_qconfig = self._get_default_qconfig(self._global_quantizer_constraints[QuantizerGroup.WEIGHTS]) @@ -360,15 +382,13 @@ def _get_quantizer_setup( QuantizableWeightedLayerNode(node, qconf_list) for node, qconf_list in weighted_node_and_qconf_lists.items() ] - inference_nncf_graph = transform_to_inference_graph( - deepcopy(nncf_graph), self._backend_entity.shapeof_metatypes, self._backend_entity.read_variable_metatypes - ) ip_graph = InsertionPointGraph(inference_nncf_graph) ip_graph = ip_graph.get_ip_graph_with_merged_hw_optimized_operations(hw_patterns) post_processing_types = self._backend_entity.post_processing_metatypes + metatypes_to_ignore = self._backend_entity.get_ignored_metatypes(self._model_type, self._target_device) solver = QuantizerPropagationSolver( activation_ignored_scopes=ignored_names, - weight_ignored_scopes=ignored_names, + weight_ignored_scopes=list(ignored_names.keys()), hw_config=hw_config, default_trait_to_metatype_map=self._backend_entity.quant_trait_op_dict, default_qconfig_list=[ @@ -378,6 +398,8 @@ def _get_quantizer_setup( quantize_outputs=self._quantize_outputs, global_constraints=self._global_quantizer_constraints, post_processing_marker_metatypes=post_processing_types, + metatypes_to_ignore=metatypes_to_ignore, + scales_unification_map=self._backend_entity.scales_unification_map, ) quantization_proposal = solver.run_on_ip_graph(ip_graph) @@ -397,13 +419,8 @@ def _add_weight_quantization_target_point( :param model: Model in the original framework. :param nncf_graph: The built NNCFGraph of the model. """ - node_name = quantization_point.insertion_point.target_node_name - node = nncf_graph.get_node_by_name(node_name) - weights_port_ids = self._backend_entity.get_weight_tensor_port_ids(node) - for port_id in weights_port_ids: - weight_quantization_target_point = self._backend_entity.target_point( - TargetType.OPERATION_WITH_WEIGHTS, node_name, port_id - ) + weight_quantization_target_points = self._get_weight_quantization_target_points(quantization_point, nncf_graph) + for weight_quantization_target_point in weight_quantization_target_points: self._quantization_target_points_to_qconfig[weight_quantization_target_point] = quantization_point.qconfig def _add_activation_quantization_target_point(self, quantization_point: SingleConfigQuantizationPoint) -> None: @@ -416,13 +433,32 @@ def _add_activation_quantization_target_point(self, quantization_point: SingleCo activation_quantization_target_point = self._get_activation_quantization_target_point(quantization_point) self._quantization_target_points_to_qconfig[activation_quantization_target_point] = quantization_point.qconfig + def _get_weight_quantization_target_points( + self, quantization_point: SingleConfigQuantizationPoint, nncf_graph: NNCFGraph + ) -> List[SingleConfigQuantizationPoint]: + """ + Returns weight quantization target points to the set of existing points. + + :param quantization_point: SingleConfigQuantizationPoint for the needed layer. + :param nncf_graph: NNCFGraph instance for working with the graph and nodes. + :return: List of SingleConfigQuantizationPoints for the needed layer. + """ + weight_quantization_target_points = [] + node_name = quantization_point.insertion_point.target_node_name + node = nncf_graph.get_node_by_name(node_name) + weights_port_ids = self._backend_entity.get_weight_tensor_port_ids(node) + for port_id in weights_port_ids: + weight_quantization_target_points.append( + self._backend_entity.target_point(TargetType.OPERATION_WITH_WEIGHTS, node_name, port_id) + ) + return weight_quantization_target_points + def _get_activation_quantization_target_point( self, quantization_point: SingleConfigQuantizationPoint ) -> SingleConfigQuantizationPoint: """ Returns activation quantization target point to the set of existing points. - :param nncf_graph: NNCFGraph instance for working with the graph and nodes. :param quantization_point: SingleConfigQuantizationPoint for the needed layer. :return: SingleConfigQuantizationPoint for the needed layer. """ @@ -441,7 +477,9 @@ def _get_activation_quantization_target_point( ) return activation_quantization_target_point - def _get_quantization_target_points(self, model: TModel) -> OrderedDict[TargetPoint, QuantizerConfig]: + def _get_quantization_target_points( + self, model: TModel, nncf_graph: NNCFGraph + ) -> OrderedDict[TargetPoint, QuantizerConfig]: """ Returns Quantization Target Points. In the Compression Pipeline logic NNCF assumes that the compression pipeline works only on the single model. @@ -453,8 +491,6 @@ def _get_quantization_target_points(self, model: TModel) -> OrderedDict[TargetPo :param nncf_graph: NNCFGraph instance. :return: Set of Quantization Target Points. """ - nncf_graph = NNCFGraphFactory.create(model) if self.nncf_graph is None else self.nncf_graph - if self._quantization_target_points_to_qconfig: return self._quantization_target_points_to_qconfig, self._unified_scale_groups backend = get_backend(model) @@ -464,9 +500,15 @@ def _get_quantization_target_points(self, model: TModel) -> OrderedDict[TargetPo backend=backend, device=device, model_type=model_type ) hw_patterns = PatternsManager.get_full_hw_pattern_graph(backend=backend, device=device, model_type=model_type) - quantizer_setup = self._get_quantizer_setup(nncf_graph, hw_patterns, ignored_patterns) + + inference_nncf_graph = transform_to_inference_graph( + deepcopy(nncf_graph), self._backend_entity.shapeof_metatypes, self._backend_entity.read_variable_metatypes + ) + + quantizer_setup = self._get_quantizer_setup(nncf_graph, inference_nncf_graph, hw_patterns, ignored_patterns) self._apply_model_type_pass(self._model_type, quantizer_setup, nncf_graph) - self._unified_scale_groups = self._collect_unified_groups(quantizer_setup) + self._apply_device_pass(self._target_device, quantizer_setup, inference_nncf_graph) + self._unified_scale_groups = self._collect_unified_groups(quantizer_setup, nncf_graph) quantization_points = list(quantizer_setup.quantization_points.values()) quantization_points = self._topological_sort_quantization_points(quantization_points, nncf_graph) for quantization_point in quantization_points: @@ -478,11 +520,14 @@ def _get_quantization_target_points(self, model: TModel) -> OrderedDict[TargetPo raise RuntimeError("Incorrect quantization point") return self._quantization_target_points_to_qconfig, self._unified_scale_groups - def _collect_unified_groups(self, quantizer_setup: SingleConfigQuantizerSetup) -> List[List[TargetPoint]]: + def _collect_unified_groups( + self, quantizer_setup: SingleConfigQuantizerSetup, nncf_graph: NNCFGraph + ) -> List[List[TargetPoint]]: """ Collects the group of quantizers for unification. :param quantizer_setup: SingleConfigQuantizerSetup instance. + :param nncf_graph: NNCFGraph instance. :return: List with the groups of the TargetPoints. """ unified_scale_groups = [] @@ -496,7 +541,9 @@ def _collect_unified_groups(self, quantizer_setup: SingleConfigQuantizerSetup) - activation_target_point = self._get_activation_quantization_target_point(quantization_point) unified_scale_group.append(activation_target_point) else: - raise RuntimeError("Only activation quantizers can be unified.") + weight_target_points = self._get_weight_quantization_target_points(quantization_point, nncf_graph) + for weight_target_point in weight_target_points: + unified_scale_group.append(weight_target_point) unified_scale_groups.append(unified_scale_group) return unified_scale_groups @@ -569,31 +616,31 @@ def _get_quantization_points_overflow_fix( ) if overflow_fix == OverflowFix.FIRST_LAYER: weight_quantization_points = _filter_target_points_by_metatypes( - weight_quantization_points, self._backend_entity.conv_metatype, nncf_graph + weight_quantization_points, self._backend_entity.conv_metatypes, nncf_graph ) for input_node in nncf_graph.get_input_nodes(): nodes = self._get_first_quantized_convolutions(weight_quantization_points, input_node, nncf_graph) output.update(nodes) return output - def _apply( + def apply( self, model: TModel, + graph: NNCFGraph, statistic_points: Optional[StatisticPointsContainer] = None, dataset: Optional[Dataset] = None, ) -> TModel: transformation_layout = TransformationLayout() - nncf_graph = NNCFGraphFactory.create(model) if self.nncf_graph is None else self.nncf_graph model_transformer = ModelTransformerFactory.create(model) - quantization_target_points, unified_scale_groups = self._get_quantization_target_points(model) + quantization_target_points, unified_scale_groups = self._get_quantization_target_points(model, graph) quantization_points_overflow_fix = self._get_quantization_points_overflow_fix( - self._overflow_fix, quantization_target_points, nncf_graph + self._overflow_fix, quantization_target_points, graph ) weight_layer_names = set() def filter_func(point: StatisticPoint) -> bool: return ( - MinMaxQuantization in point.algorithm_to_tensor_collectors + self._algorithm_key in point.algorithm_to_tensor_collectors and point.target_point == quantization_target_point ) @@ -603,9 +650,12 @@ def filter_func(point: StatisticPoint) -> bool: for quantization_target_point in unified_scale_group: target_node_name = quantization_target_point.target_node_name for tensor_collector in statistic_points.get_algo_statistics_for_node( - target_node_name, filter_func, MinMaxQuantization + target_node_name, filter_func, self._algorithm_key ): - group_statistics.append(tensor_collector.get_statistics()) + statistics = tensor_collector.get_statistics() + if statistics.min_values is None or statistics.max_values is None: + raise RuntimeError(f"Statistics were not collected for the node {target_node_name}") + group_statistics.append(statistics) unified_values = self._backend_entity.unify_statistics(group_statistics) for quantization_target_point in unified_scale_group: @@ -613,8 +663,8 @@ def filter_func(point: StatisticPoint) -> bool: q_group = QuantizerGroup.ACTIVATIONS narrow_range = get_quantizer_narrow_range(qconfig, q_group) parameters = calculate_quantizer_parameters(unified_values, qconfig, q_group, narrow_range) - command = self._backend_entity.create_activation_quantizer_insertion_command( - nncf_graph, quantization_target_point, qconfig, parameters + command = self._backend_entity.create_quantizer_insertion_command( + graph, quantization_target_point, qconfig, parameters ) transformation_layout.register(command) unified_ops_list.add(quantization_target_point) @@ -624,10 +674,10 @@ def filter_func(point: StatisticPoint) -> bool: continue target_node_name = quantization_target_point.target_node_name for tensor_collector in statistic_points.get_algo_statistics_for_node( - target_node_name, filter_func, MinMaxQuantization + target_node_name, filter_func, self._algorithm_key ): if quantization_target_point.is_weight_target_point(): - weights_name = self._backend_entity.get_weight_name(nncf_graph, quantization_target_point) + weights_name = self._backend_entity.get_weight_name(graph, quantization_target_point) if not self._backend_entity.should_quantize_weight(weights_name, weight_layer_names): continue weight_layer_names.add(weights_name) @@ -638,38 +688,37 @@ def filter_func(point: StatisticPoint) -> bool: half_range = quantization_target_point in quantization_points_overflow_fix narrow_range = get_quantizer_narrow_range(qconfig, quant_group) statistics = tensor_collector.get_statistics() + if statistics.min_values is None or statistics.max_values is None: + raise RuntimeError(f"Statistics were not collected for the node {target_node_name}") parameters = calculate_quantizer_parameters(statistics, qconfig, quant_group, narrow_range, half_range) - if quantization_target_point.is_weight_target_point(): - command = self._backend_entity.create_weight_quantizer_insertion_command( - nncf_graph, quantization_target_point, qconfig, parameters - ) - else: - command = self._backend_entity.create_activation_quantizer_insertion_command( - nncf_graph, quantization_target_point, qconfig, parameters - ) - + command = self._backend_entity.create_quantizer_insertion_command( + graph, quantization_target_point, qconfig, parameters + ) transformation_layout.register(command) quantized_model = model_transformer.transform(transformation_layout) return quantized_model - def get_statistic_points(self, model: TModel) -> StatisticPointsContainer: + def get_statistic_points(self, model: TModel, graph: NNCFGraph) -> StatisticPointsContainer: self._set_backend_entity(model) - nncf_graph = NNCFGraphFactory.create(model) if self.nncf_graph is None else self.nncf_graph - quantization_target_points, _ = self._get_quantization_target_points(model) + quantization_target_points, _ = self._get_quantization_target_points(model, graph) output = StatisticPointsContainer() for quantization_target_point, qconfig in quantization_target_points.items(): nncf_logger.debug( f"Adding target point {quantization_target_point.target_node_name}" f" with type {quantization_target_point.type} for statistics collection" ) - stat_collector = self._get_stat_collector(nncf_graph, quantization_target_point, qconfig) + num_samples = self._subset_size + if quantization_target_point.is_weight_target_point(): + # Weight statistics is constant, so only one collection is enough. + num_samples = 1 + stat_collector = self._get_stat_collector(graph, quantization_target_point, qconfig, num_samples) output.add_statistic_point( StatisticPoint( target_point=quantization_target_point, tensor_collector=stat_collector, - algorithm=MinMaxQuantization, + algorithm=self._algorithm_key, ) ) return output @@ -690,8 +739,7 @@ def _apply_model_type_pass( if quantization_point.is_activation_quantization_point(): for node_name in quantization_point.directly_quantized_operator_node_names: node = nncf_graph.get_node_by_name(node_name) - mat_mul_metatype = self._backend_entity.mat_mul_metatype - if node.metatype != mat_mul_metatype: + if node.metatype not in self._backend_entity.mat_mul_metatypes: continue if ( quantization_point.qconfig.mode != QuantizationMode.SYMMETRIC @@ -702,3 +750,91 @@ def _apply_model_type_pass( f"Update quantization mode for the node {node_name}" f" to the symmetric due to ModelType parameter." ) + + def _apply_device_pass( + self, target_device: TargetDevice, quantizer_setup: SingleConfigQuantizerSetup, nncf_graph: NNCFGraph + ) -> None: + """ + This method applies model post-processing device passes to SingleConfigQuantizerSetup in-place. + + :param target_device: TargetDevice instance. + :param quantizer_setup: SingleConfigQuantizerSetup instance to update. + :param nncf_graph: NNCFGraph. + :return: None. + """ + + passes_map = {TargetDevice.CPU_SPR: self._apply_spr_pass} + + if target_device not in passes_map: + return + + passes_map[target_device](quantizer_setup, nncf_graph) + + def _apply_spr_pass( + self, quantizer_setup: SingleConfigQuantizerSetup, nncf_graph: NNCFGraph + ) -> SingleConfigQuantizerSetup: + """ + Applies CPU_SPR-related pass. + The main action is to remove one of the quantizers before elementwise layer (e.g. Add). + This action allows to get performance boost on SPR devices. + + :param quantizer_setup: SingleConfigQuantizerSetup instance to update. + :param nncf_graph: NNCFGraph instance to update. + :return: Modified SingleConfigQuantizerSetup. + """ + + def _is_node_after_producers(node): + input_node = node + while True: + input_node = nncf_graph.get_previous_nodes(input_node) + if len(input_node) > 1: + return False + input_node = input_node[0] + if input_node.metatype in producer_metatypes: + return True + + producer_metatypes = ( + self._backend_entity.conv_metatypes + + self._backend_entity.mat_mul_metatypes + + self._backend_entity.group_conv_metatypes + ) + + quantizer_setup_map = { + p.insertion_point.target_node_name: q_key for q_key, p in quantizer_setup.quantization_points.items() + } + + # Walking through all Add layers. + for add_node in nncf_graph.get_nodes_by_metatypes(self._backend_entity.add_metatypes): + add_inputs = nncf_graph.get_previous_nodes(add_node) + + # Filtering Add based on it's input. + # Need to find Add layer only with two activations as input. + if len(add_inputs) == 2 and all(n.node_name in quantizer_setup_map for n in add_inputs): + # Sorting of the inputs based on length of input's consumer in descending order. + add_inputs.sort(key=lambda n: len(nncf_graph.get_next_nodes(n)), reverse=True) + fq_1_producer, fq_2_producer = add_inputs + fq_1_q_key = quantizer_setup_map[fq_1_producer.node_name] + fq_2_q_key = quantizer_setup_map[fq_2_producer.node_name] + + # In the case of the two quantizers where one of them produces data into branching, + # it needs to remove the quantizer without branching after it. + if ( + len(nncf_graph.get_next_nodes(fq_1_producer)) > 1 + and len(nncf_graph.get_next_nodes(fq_2_producer)) == 1 + ): + quantizer_setup.discard(fq_2_q_key, True) + continue + + # In the case of the two quantizers without the brancking after them, + # it needs to check that all quantizers follows after producer nodes. + if _is_node_after_producers(fq_1_producer) and _is_node_after_producers(fq_2_producer): + fq_1_prod_shape = np.prod(nncf_graph.get_output_edges(fq_1_producer)[0].tensor_shape) + fq_2_prod_shape = np.prod(nncf_graph.get_output_edges(fq_2_producer)[0].tensor_shape) + + # Then it needs to remove quantizer with the smallest shape. + if fq_1_prod_shape >= fq_2_prod_shape: + quantizer_setup.discard(fq_1_q_key, True) + else: + quantizer_setup.discard(fq_2_q_key, True) + + return quantizer_setup diff --git a/nncf/quantization/algorithms/min_max/backend.py b/nncf/quantization/algorithms/min_max/backend.py index 6a87fc327af..254a1c949cf 100644 --- a/nncf/quantization/algorithms/min_max/backend.py +++ b/nncf/quantization/algorithms/min_max/backend.py @@ -28,18 +28,18 @@ from nncf.parameters import TargetDevice from nncf.quantization.fake_quantize import FakeQuantizeParameters from nncf.quantization.range_estimator import RangeEstimatorParameters -from nncf.scopes import IgnoredScope TModel = TypeVar("TModel") ALGO_BACKENDS = Registry("algo_backends") +# pylint:disable=too-many-public-methods class MinMaxAlgoBackend(ABC): @property @abstractmethod - def mat_mul_metatype(self) -> OperatorMetatype: + def mat_mul_metatypes(self) -> List[OperatorMetatype]: """ - Property for the backend-specific MatMul metatype. + Property for the backend-specific MatMul metatypes. """ @property @@ -58,7 +58,7 @@ def shapeof_metatypes(self) -> List[OperatorMetatype]: @property @abstractmethod - def conv_metatype(self) -> List[OperatorMetatype]: + def conv_metatypes(self) -> List[OperatorMetatype]: """ Property for the backend-specific Convolution metatypes. """ @@ -77,6 +77,27 @@ def read_variable_metatypes(self) -> List[OperatorMetatype]: Property for the backend-specific metatypes that also can be interpreted as inputs (ReadValue). """ + @property + @abstractmethod + def add_metatypes(self) -> List[OperatorMetatype]: + """ + Property for the backend-specific metatypes that also can be interpreted as Add layer. + """ + + @property + @abstractmethod + def group_conv_metatypes(self) -> List[OperatorMetatype]: + """ + Property for the backend-specific Grouped Convolution metatypes. + """ + + @property + @abstractmethod + def scales_unification_map(self) -> Dict[OperatorMetatype, OperatorMetatype]: + """ + Property for the backend-specific metatypes that produces quantizers that might be unified. + """ + @property @abstractmethod def hw_config(self) -> HWConfig: @@ -105,25 +126,7 @@ def target_point(target_type: TargetType, target_node_name: str, port_id: int) - @staticmethod @abstractmethod - def create_activation_quantizer_insertion_command( - nncf_graph: NNCFGraph, - target_point: TargetPoint, - quantizer_config: QuantizerConfig, - parameters: FakeQuantizeParameters, - ) -> TransformationCommand: - """ - Returns backend-specific quantizer insertion command. - - :param nncf_graph: NNCFGraph to get input/output shapes for the target point. - :param target_point: Target location for the correction. - :param quantizer_config: QuantizerConfig instance for the current layer. - :param parameters: FakeQuantizeParameters to calculate activation quantization parameters. - :return: Backend-specific TransformationCommand for the quantizer insertion operation. - """ - - @staticmethod - @abstractmethod - def create_weight_quantizer_insertion_command( + def create_quantizer_insertion_command( nncf_graph: NNCFGraph, target_point: TargetPoint, quantizer_config: QuantizerConfig, @@ -203,13 +206,23 @@ def should_quantize_weight(weight_name: str, quantized_weight_names: Set[str]) - @staticmethod @abstractmethod - def get_ignored_scope(model_type: ModelType, device: TargetDevice) -> IgnoredScope: + def get_ignored_metatypes(model_type: ModelType, device: TargetDevice) -> List[OperatorMetatype]: """ - Returns ignores scope based on a model type and device parameters. + Returns ignored metatypes based on a model type and device parameters. :param model_type: Model type parameter. :param device: Target device. - :return: Instance of ignored scope. + :return: List of ignored metatypes. + """ + + @staticmethod + @abstractmethod + def get_ignored_names_by_layer_attributes(nncf_graph: NNCFGraph) -> List[str]: + """ + Returns names of ignored nodes based on layer_attributes. + + :param nncf_graph: NNCFGraph instance. + :return: List of ignored names. """ @staticmethod diff --git a/nncf/quantization/algorithms/min_max/onnx_backend.py b/nncf/quantization/algorithms/min_max/onnx_backend.py index 303b5a77c22..1bdab878188 100644 --- a/nncf/quantization/algorithms/min_max/onnx_backend.py +++ b/nncf/quantization/algorithms/min_max/onnx_backend.py @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, List, Optional, Set, Tuple, Union +from typing import Dict, List, Optional, Set, Union import numpy as np @@ -21,21 +21,11 @@ from nncf.common.quantization.structs import QuantizationMode from nncf.common.quantization.structs import QuantizerConfig from nncf.common.utils.backend import BackendType -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXAddLayerMetatype -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXConvolutionMetatype -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXConvolutionTransposeMetatype -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXDivLayerMetatype -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXLinearMetatype -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXMulLayerMetatype -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXNonMaxSuppressionMetatype -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXPowMetatype -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXReduceMeanMetatype -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXShapeMetatype -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXSqueezeMetatype -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXSubMetatype -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXTopKMetatype -from nncf.onnx.graph.nncf_graph_builder import ONNXExtendedLayerAttributes +from nncf.onnx.graph.metatypes import onnx_metatypes as om from nncf.onnx.graph.node_utils import get_input_edges_mapping +from nncf.onnx.graph.node_utils import get_quantization_axis +from nncf.onnx.graph.node_utils import get_quantized_tensor_shape +from nncf.onnx.graph.node_utils import get_reduction_shape from nncf.onnx.graph.transformations.commands import ONNXQuantizerInsertionCommand from nncf.onnx.graph.transformations.commands import ONNXTargetPoint from nncf.onnx.hardware.config import ONNXHWConfig @@ -52,35 +42,47 @@ from nncf.quantization.algorithms.min_max.backend import MinMaxAlgoBackend from nncf.quantization.fake_quantize import FakeQuantizeParameters from nncf.quantization.range_estimator import RangeEstimatorParameters -from nncf.scopes import IgnoredScope +# pylint:disable=too-many-public-methods @ALGO_BACKENDS.register(BackendType.ONNX) class ONNXMinMaxAlgoBackend(MinMaxAlgoBackend): @property - def mat_mul_metatype(self) -> OperatorMetatype: - return ONNXLinearMetatype + def mat_mul_metatypes(self) -> List[OperatorMetatype]: + return om.MATMUL_METATYPES @property def post_processing_metatypes(self) -> List[OperatorMetatype]: - return [ONNXTopKMetatype, ONNXNonMaxSuppressionMetatype] + return [om.ONNXTopKMetatype, om.ONNXNonMaxSuppressionMetatype] @property def shapeof_metatypes(self) -> List[OperatorMetatype]: - return [ONNXShapeMetatype] + return [om.ONNXShapeMetatype] @property - def conv_metatype(self) -> List[OperatorMetatype]: - return [ONNXConvolutionMetatype] + def conv_metatypes(self) -> List[OperatorMetatype]: + return [om.ONNXConvolutionMetatype] @property def overflow_fix_metatypes(self) -> List[OperatorMetatype]: - return [ONNXConvolutionMetatype, ONNXConvolutionTransposeMetatype, ONNXLinearMetatype] + return [om.ONNXConvolutionMetatype, om.ONNXConvolutionTransposeMetatype, *om.MATMUL_METATYPES] @property def read_variable_metatypes(self) -> List[OperatorMetatype]: return [] + @property + def add_metatypes(self) -> List[OperatorMetatype]: + return [om.ONNXAddLayerMetatype] + + @property + def group_conv_metatypes(self) -> List[OperatorMetatype]: + return self.conv_metatypes + + @property + def scales_unification_map(self) -> Dict[OperatorMetatype, OperatorMetatype]: + return {om.ONNXConcatMetatype: self.overflow_fix_metatypes} + @property def hw_config(self) -> HWConfig: return ONNXHWConfig @@ -94,34 +96,18 @@ def target_point(target_type: TargetType, target_node_name: str, port_id: int) - return ONNXTargetPoint(target_type, target_node_name, port_id) @staticmethod - def create_activation_quantizer_insertion_command( + def create_quantizer_insertion_command( nncf_graph: NNCFGraph, target_point: ONNXTargetPoint, quantizer_config: QuantizerConfig, parameters: FakeQuantizeParameters, - ) -> ONNXQuantizerInsertionCommand: - nncf_input_node_next_nodes = ONNXMinMaxAlgoBackend._get_input_edges_mapping(nncf_graph) - axis = ONNXMinMaxAlgoBackend._get_axis(nncf_graph, target_point, quantizer_config) + ): tensor_type = np.int8 if np.any(parameters.input_low < 0) else np.uint8 - onnx_parameters = convert_fq_params_to_onnx_params(parameters, quantizer_config.num_bits, tensor_type, axis) - return ONNXQuantizerInsertionCommand(target_point, nncf_input_node_next_nodes, onnx_parameters) - - @staticmethod - def create_weight_quantizer_insertion_command( - nncf_graph: NNCFGraph, - target_point: ONNXTargetPoint, - quantizer_config: QuantizerConfig, - parameters: FakeQuantizeParameters, - ) -> ONNXQuantizerInsertionCommand: - if quantizer_config.signedness_to_force is False: - raise ValueError( - "The HW expects to have signed quantization of weights, " - "while the quantizer configuration for weights contains signedness_to_force=False." - ) - - tensor_type = np.int8 # The weight is restricted to have only signed range + if target_point.is_weight_target_point(): + tensor_type = np.int8 # The weight is restricted to have only signed range nncf_input_node_next_nodes = ONNXMinMaxAlgoBackend._get_input_edges_mapping(nncf_graph) - axis = ONNXMinMaxAlgoBackend._get_axis(nncf_graph, target_point, quantizer_config) + node = nncf_graph.get_node_by_name(target_point.target_node_name) + axis = get_quantization_axis(quantizer_config.per_channel, node, target_point) onnx_parameters = convert_fq_params_to_onnx_params(parameters, quantizer_config.num_bits, tensor_type, axis) return ONNXQuantizerInsertionCommand(target_point, nncf_input_node_next_nodes, onnx_parameters) @@ -129,8 +115,8 @@ def create_weight_quantizer_insertion_command( def unify_statistics(statistics: List[ONNXMinMaxTensorStatistic]) -> ONNXMinMaxTensorStatistic: max_values, min_values = [], [] for statistic in statistics: - max_values.append(statistic.max_values) - min_values.append(statistic.min_values) + max_values.append(np.array(statistic.max_values).flatten()) + min_values.append(np.array(statistic.min_values).flatten()) max_values = np.max(max_values, axis=0) min_values = np.min(min_values, axis=0) return ONNXMinMaxTensorStatistic(min_values=min_values, max_values=max_values) @@ -139,39 +125,6 @@ def unify_statistics(statistics: List[ONNXMinMaxTensorStatistic]) -> ONNXMinMaxT def _get_input_edges_mapping(nncf_graph: NNCFGraph): return get_input_edges_mapping(nncf_graph) - @staticmethod - def _get_axis( - nncf_graph: NNCFGraph, target_point: ONNXTargetPoint, quantizer_config: QuantizerConfig - ) -> Optional[int]: - if not quantizer_config.per_channel: - return None - if not target_point.is_weight_target_point(): - return 1 - node = nncf_graph.get_node_by_name(target_point.target_node_name) - return node.metatype.weight_definitions.weight_channel_axis - - @staticmethod - def _get_reduction_shape_and_use_abs_max( - nncf_graph: NNCFGraph, target_point: ONNXTargetPoint, quantizer_config: QuantizerConfig - ) -> Tuple[Optional[Tuple[int, ...]], bool]: - use_abs_max = quantizer_config.mode == QuantizationMode.SYMMETRIC - if not quantizer_config.per_channel: - return None, use_abs_max - - if not target_point.is_weight_target_point(): - # TODO: support reduction shapes for 3D-5D conv cases - return (0, 2, 3), use_abs_max - - # Calculate reduction shape for weight statistic collector - node = nncf_graph.get_node_by_name(target_point.target_node_name) - assert isinstance(node.layer_attributes, ONNXExtendedLayerAttributes) - weight_shape = node.layer_attributes.weight_shape - reduction_shape = list(range(len(weight_shape))) - - axis = ONNXMinMaxAlgoBackend._get_axis(nncf_graph, target_point, quantizer_config) - reduction_shape.pop(axis) - return tuple(reduction_shape), use_abs_max - @staticmethod def get_statistic_collector( range_estimator_params: RangeEstimatorParameters, @@ -181,9 +134,14 @@ def get_statistic_collector( inplace: bool, num_samples: int = None, ) -> Union[ONNXMinMaxStatisticCollector, ONNXMeanMinMaxStatisticCollector]: - reduction_shape, use_abs_max = ONNXMinMaxAlgoBackend._get_reduction_shape_and_use_abs_max( - nncf_graph, target_point, quantizer_config - ) + is_per_channel = quantizer_config.per_channel + node = nncf_graph.get_node_by_name(target_point.target_node_name) + use_abs_max = quantizer_config.mode == QuantizationMode.SYMMETRIC + reduction_shape = None # Per-Tensor + quantization_axis = get_quantization_axis(is_per_channel, node, target_point) + quantized_tensor_shape = get_quantized_tensor_shape(nncf_graph, node, target_point) + if quantization_axis is not None and quantized_tensor_shape is not None: # Per-Channel + reduction_shape = get_reduction_shape(quantized_tensor_shape, quantization_axis) if ( range_estimator_params.min.statistics_type == StatisticsType.MIN @@ -213,38 +171,41 @@ def get_statistic_collector( @staticmethod def get_weight_tensor_port_ids(node: NNCFNode) -> List[Optional[int]]: - return [node.metatype.weight_definitions.weight_port_id] + return list(node.layer_attributes.weight_attrs.keys()) @staticmethod - def get_ignored_scope(model_type: ModelType, device: TargetDevice) -> IgnoredScope: + def get_ignored_metatypes(model_type: ModelType, device: TargetDevice) -> List[OperatorMetatype]: + types = [] if model_type == ModelType.TRANSFORMER: - types = [] - metatypes_to_add = [ - ONNXAddLayerMetatype, - ONNXPowMetatype, - ONNXSqueezeMetatype, - ONNXSubMetatype, - ONNXReduceMeanMetatype, - ONNXDivLayerMetatype, + types = [ + om.ONNXAddLayerMetatype, + om.ONNXPowMetatype, + om.ONNXSqueezeMetatype, + om.ONNXSubMetatype, + om.ONNXReduceMeanMetatype, + om.ONNXReduceL2Metatype, + om.ONNXReduceSumMetatype, + om.ONNXDivLayerMetatype, + om.ONNXMaximumMetatype, + om.ONNXSqrtMetatype, + om.ONNXReciprocalMetatype, ] if device != TargetDevice.CPU_SPR: - metatypes_to_add.append(ONNXMulLayerMetatype) - for metatype in metatypes_to_add: - types.extend(metatype.get_all_aliases()) - return IgnoredScope(types=types) - return IgnoredScope() + types.append(om.ONNXMulLayerMetatype) + return types + + @staticmethod + def get_ignored_names_by_layer_attributes(nncf_graph: NNCFGraph) -> List[str]: + return [] @staticmethod def get_weight_nodes(nncf_graph: NNCFGraph) -> List[NNCFNode]: - return [ - node - for node in nncf_graph.get_all_nodes() - if isinstance(node.layer_attributes, ONNXExtendedLayerAttributes) - ] + return [node for node in nncf_graph.get_all_nodes() if node.layer_attributes.has_weight()] @staticmethod def get_weight_name(nncf_graph: NNCFGraph, target_point: ONNXTargetPoint) -> str: - return nncf_graph.get_node_by_name(target_point.target_node_name).layer_name + node_name, port_id = target_point.target_node_name, target_point.port_id + return nncf_graph.get_node_by_name(node_name).layer_attributes.weight_attrs[port_id]["name"] @staticmethod def should_quantize_weight(weight_name: str, quantized_weight_names: Set[str]) -> bool: diff --git a/nncf/quantization/algorithms/min_max/openvino_backend.py b/nncf/quantization/algorithms/min_max/openvino_backend.py index 86ee4c86a77..a5e35bdd84a 100644 --- a/nncf/quantization/algorithms/min_max/openvino_backend.py +++ b/nncf/quantization/algorithms/min_max/openvino_backend.py @@ -24,27 +24,10 @@ from nncf.common.utils.backend import BackendType from nncf.experimental.common.tensor_statistics.collectors import AGGREGATORS_MAP from nncf.experimental.common.tensor_statistics.collectors import TensorCollector +from nncf.openvino.graph.layer_attributes import OVLayerAttributes +from nncf.openvino.graph.metatypes import openvino_metatypes as om from nncf.openvino.graph.metatypes.openvino_metatypes import GENERAL_WEIGHT_LAYER_METATYPES -from nncf.openvino.graph.metatypes.openvino_metatypes import OVAddMetatype -from nncf.openvino.graph.metatypes.openvino_metatypes import OVConvolutionBackpropDataMetatype -from nncf.openvino.graph.metatypes.openvino_metatypes import OVConvolutionMetatype -from nncf.openvino.graph.metatypes.openvino_metatypes import OVDivideMetatype -from nncf.openvino.graph.metatypes.openvino_metatypes import OVGroupConvolutionBackpropDataMetatype -from nncf.openvino.graph.metatypes.openvino_metatypes import OVGroupConvolutionMetatype -from nncf.openvino.graph.metatypes.openvino_metatypes import OVMatMulMetatype -from nncf.openvino.graph.metatypes.openvino_metatypes import OVMultiplyMetatype -from nncf.openvino.graph.metatypes.openvino_metatypes import OVMVNMetatype -from nncf.openvino.graph.metatypes.openvino_metatypes import OVNonMaxSuppressionMetatype -from nncf.openvino.graph.metatypes.openvino_metatypes import OVPowerMetatype -from nncf.openvino.graph.metatypes.openvino_metatypes import OVReadValueMetatype -from nncf.openvino.graph.metatypes.openvino_metatypes import OVReduceMeanMetatype -from nncf.openvino.graph.metatypes.openvino_metatypes import OVShapeOfMetatype -from nncf.openvino.graph.metatypes.openvino_metatypes import OVSqrtMetatype -from nncf.openvino.graph.metatypes.openvino_metatypes import OVSquaredDifferenceMetatype -from nncf.openvino.graph.metatypes.openvino_metatypes import OVSqueezeMetatype -from nncf.openvino.graph.metatypes.openvino_metatypes import OVSubtractMetatype -from nncf.openvino.graph.metatypes.openvino_metatypes import OVTopKMetatype -from nncf.openvino.graph.nncf_graph_builder import OVConstantLayerAttributes +from nncf.openvino.graph.node_utils import get_channel_agnostic_reduction_shape from nncf.openvino.graph.node_utils import get_weight_channel_axes from nncf.openvino.graph.transformations.commands import OVQuantizerInsertionCommand from nncf.openvino.graph.transformations.commands import OVTargetPoint @@ -60,40 +43,52 @@ from nncf.quantization.algorithms.min_max.backend import ALGO_BACKENDS from nncf.quantization.algorithms.min_max.backend import MinMaxAlgoBackend from nncf.quantization.fake_quantize import FakeQuantizeParameters -from nncf.scopes import IgnoredScope +# pylint:disable=too-many-public-methods @ALGO_BACKENDS.register(BackendType.OPENVINO) class OVMinMaxAlgoBackend(MinMaxAlgoBackend): @property - def mat_mul_metatype(self) -> OperatorMetatype: - return OVMatMulMetatype + def mat_mul_metatypes(self) -> List[OperatorMetatype]: + return [om.OVMatMulMetatype] @property def post_processing_metatypes(self) -> List[OperatorMetatype]: - return [OVTopKMetatype, OVNonMaxSuppressionMetatype] + return [om.OVTopKMetatype, om.OVNonMaxSuppressionMetatype] @property def shapeof_metatypes(self) -> List[OperatorMetatype]: - return [OVShapeOfMetatype] + return [om.OVShapeOfMetatype] @property - def conv_metatype(self) -> List[OperatorMetatype]: - return [OVConvolutionMetatype] + def conv_metatypes(self) -> List[OperatorMetatype]: + return [om.OVConvolutionMetatype] @property def overflow_fix_metatypes(self) -> List[OperatorMetatype]: return [ - OVConvolutionMetatype, - OVGroupConvolutionMetatype, - OVConvolutionBackpropDataMetatype, - OVGroupConvolutionBackpropDataMetatype, - OVMatMulMetatype, + om.OVConvolutionMetatype, + om.OVGroupConvolutionMetatype, + om.OVConvolutionBackpropDataMetatype, + om.OVGroupConvolutionBackpropDataMetatype, + om.OVMatMulMetatype, ] @property def read_variable_metatypes(self) -> List[OperatorMetatype]: - return [OVReadValueMetatype] + return [om.OVReadValueMetatype] + + @property + def add_metatypes(self) -> List[OperatorMetatype]: + return [om.OVAddMetatype] + + @property + def group_conv_metatypes(self) -> List[OperatorMetatype]: + return [om.OVGroupConvolutionMetatype] + + @property + def scales_unification_map(self) -> Dict[OperatorMetatype, OperatorMetatype]: + return {om.OVConcatMetatype: self.overflow_fix_metatypes} @property def hw_config(self) -> HWConfig: @@ -108,16 +103,7 @@ def target_point(target_type: TargetType, target_node_name: str, port_id: int) - return OVTargetPoint(target_type, target_node_name, port_id) @staticmethod - def create_activation_quantizer_insertion_command( - nncf_graph: NNCFGraph, - target_point: OVTargetPoint, - quantizer_config: QuantizerConfig, - parameters: FakeQuantizeParameters, - ) -> OVQuantizerInsertionCommand: - return OVQuantizerInsertionCommand(target_point, parameters) - - @staticmethod - def create_weight_quantizer_insertion_command( + def create_quantizer_insertion_command( nncf_graph: NNCFGraph, target_point: OVTargetPoint, quantizer_config: QuantizerConfig, @@ -129,8 +115,8 @@ def create_weight_quantizer_insertion_command( def unify_statistics(statistics: List[OVMinMaxTensorStatistic]) -> OVMinMaxTensorStatistic: max_values, min_values = [], [] for statistic in statistics: - max_values.append(statistic.max_values) - min_values.append(statistic.min_values) + max_values.append(np.array(statistic.max_values).flatten()) + min_values.append(np.array(statistic.min_values).flatten()) max_values = np.max(max_values, axis=0) min_values = np.min(min_values, axis=0) return OVMinMaxTensorStatistic(min_values=min_values, max_values=max_values) @@ -154,26 +140,19 @@ def _get_reduction_shape_and_use_abs_max( # TODO (l-bat): Disable quantizer propogation through layout changing operations channel_axis = 1 # OpenVINO activations have channel first layout: [N, C, Z, Y, X] - axes = tuple(i for i in range(len(shape)) if i != channel_axis) + axes = get_channel_agnostic_reduction_shape([channel_axis], shape) return axes, use_abs_max - assert isinstance(node.layer_attributes, OVConstantLayerAttributes) - const_shape = node.layer_attributes.const_attrs[target_point.port_id]["shape"] + assert isinstance(node.layer_attributes, OVLayerAttributes) + const_shape = node.layer_attributes.constant_attributes[target_point.port_id]["shape"] if quantizer_config.per_channel: channel_axes = get_weight_channel_axes(node, target_point.port_id) - axes = tuple(i for i in range(len(const_shape)) if i not in channel_axes) + axes = get_channel_agnostic_reduction_shape(channel_axes, const_shape) else: axes = tuple(range(len(const_shape))) - return axes, use_abs_max - @staticmethod - def _get_num_samples(num_samples, target_point: OVTargetPoint): - if target_point.is_weight_target_point(): - return 1 - return num_samples - @staticmethod def get_statistic_collector( range_estimator_params: RangeEstimatorParameters, @@ -186,7 +165,6 @@ def get_statistic_collector( reduction_shape, use_abs_max = OVMinMaxAlgoBackend._get_reduction_shape_and_use_abs_max( nncf_graph, target_point, quantizer_config ) - _num_samples = OVMinMaxAlgoBackend._get_num_samples(num_samples, target_point) collector = TensorCollector(OVMinMaxTensorStatistic) for params, container_key in zip( @@ -216,7 +194,7 @@ def get_statistic_collector( statistic_type = StatisticsType.ABS_MAX reducer = OV_REDUCERS_MAP[statistic_type](**kwargs) - kwargs = {"num_samples": _num_samples, "tensor_processor": OVNNCFCollectorTensorProcessor} + kwargs = {"num_samples": num_samples, "tensor_processor": OVNNCFCollectorTensorProcessor} aggregator = AGGREGATORS_MAP[params.aggregator_type](**kwargs) collector.register_statistic_branch(container_key, reducer, aggregator) @@ -227,40 +205,49 @@ def get_weight_tensor_port_ids(node: NNCFNode) -> List[Optional[int]]: return node.layer_attributes.get_const_port_ids() @staticmethod - def get_ignored_scope(model_type: ModelType, device: TargetDevice) -> IgnoredScope: + def get_ignored_metatypes(model_type: ModelType, device: TargetDevice) -> List[OperatorMetatype]: + types = [] if model_type == ModelType.TRANSFORMER: - types = [] - metatypes_to_add = [ - OVAddMetatype, - OVPowerMetatype, - OVSqueezeMetatype, - OVSubtractMetatype, - OVReduceMeanMetatype, - OVSquaredDifferenceMetatype, - OVMVNMetatype, - OVDivideMetatype, - OVSqrtMetatype, + types = [ + om.OVAddMetatype, + om.OVPowerMetatype, + om.OVSqueezeMetatype, + om.OVSubtractMetatype, + om.OVReduceMeanMetatype, + om.OVReduceL2Metatype, + om.OVSumMetatype, + om.OVSquaredDifferenceMetatype, + om.OVMVNMetatype, + om.OVDivideMetatype, + om.OVSqrtMetatype, + om.OVMaximumMetatype, ] if device != TargetDevice.CPU_SPR: - metatypes_to_add.append(OVMultiplyMetatype) - for metatype in metatypes_to_add: - types.extend(metatype.get_all_aliases()) - return IgnoredScope(types=types) - return IgnoredScope() + types.append(om.OVMultiplyMetatype) + return types + + @staticmethod + def get_ignored_names_by_layer_attributes(nncf_graph: NNCFGraph) -> List[str]: + ignored_names = [] + target_nodes = nncf_graph.get_nodes_by_metatypes([om.OVGRUSequenceMetatype]) + for node in target_nodes: + if isinstance(node.layer_attributes, OVLayerAttributes): + if node.layer_attributes.input_attributes["linear_before_reset"]: + ignored_names.append(node.node_name) + return ignored_names @staticmethod def get_weight_nodes(nncf_graph: NNCFGraph) -> List[NNCFNode]: return [ node for node in nncf_graph.get_all_nodes() - if isinstance(node.layer_attributes, OVConstantLayerAttributes) - and node.metatype in GENERAL_WEIGHT_LAYER_METATYPES + if isinstance(node.layer_attributes, OVLayerAttributes) and node.metatype in GENERAL_WEIGHT_LAYER_METATYPES ] @staticmethod def get_weight_name(nncf_graph: NNCFGraph, target_point: OVTargetPoint) -> str: node = nncf_graph.get_node_by_name(target_point.target_node_name) - return node.layer_attributes.const_attrs[target_point.port_id]["name"] + return node.layer_attributes.constant_attributes[target_point.port_id]["name"] @staticmethod def should_quantize_weight(weight_name: str, quantized_weight_names: Set[str]) -> bool: diff --git a/nncf/quantization/algorithms/min_max/torch_backend.py b/nncf/quantization/algorithms/min_max/torch_backend.py index 408ea4db0b4..620a276eaf1 100644 --- a/nncf/quantization/algorithms/min_max/torch_backend.py +++ b/nncf/quantization/algorithms/min_max/torch_backend.py @@ -36,12 +36,11 @@ from nncf.quantization.algorithms.min_max.backend import MinMaxAlgoBackend from nncf.quantization.fake_quantize import FakeQuantizeParameters from nncf.quantization.range_estimator import RangeEstimatorParameters -from nncf.scopes import IgnoredScope from nncf.torch.graph.graph import PTTargetPoint from nncf.torch.graph.transformations.commands import PTInsertionCommand from nncf.torch.hardware.config import PTHWConfig +from nncf.torch.model_transformer import PTModelTransformer from nncf.torch.nncf_network import NNCFNetwork -from nncf.torch.nncf_network import PTModelTransformer from nncf.torch.quantization.default_quantization import DEFAULT_PT_QUANT_TRAIT_TO_OP_DICT from nncf.torch.quantization.init_range import PTRangeInitCollectorParams from nncf.torch.quantization.init_range import StatCollectorGenerator @@ -58,9 +57,14 @@ # pylint:disable=too-many-public-methods @ALGO_BACKENDS.register(BackendType.TORCH) class PTMinMaxAlgoBackend(MinMaxAlgoBackend): + TARGET_TYPE_TO_PT_INS_TYPE_MAP = { + TargetType.PRE_LAYER_OPERATION: TargetType.OPERATOR_PRE_HOOK, + TargetType.POST_LAYER_OPERATION: TargetType.OPERATOR_POST_HOOK, + } + @property - def mat_mul_metatype(self) -> OperatorMetatype: - return om.PTModuleLinearMetatype + def mat_mul_metatypes(self) -> List[OperatorMetatype]: + return [om.PTModuleLinearMetatype] @property def post_processing_metatypes(self) -> List[OperatorMetatype]: @@ -71,7 +75,7 @@ def shapeof_metatypes(self) -> List[OperatorMetatype]: return [] @property - def conv_metatype(self) -> List[OperatorMetatype]: + def conv_metatypes(self) -> List[OperatorMetatype]: return [om.PTModuleConv1dMetatype, om.PTModuleConv2dMetatype, om.PTModuleConv3dMetatype] @property @@ -90,6 +94,18 @@ def overflow_fix_metatypes(self) -> List[OperatorMetatype]: def read_variable_metatypes(self) -> List[OperatorMetatype]: return [] + @property + def add_metatypes(self) -> List[OperatorMetatype]: + return [om.PTAddMetatype] + + @property + def group_conv_metatypes(self) -> List[OperatorMetatype]: + return self.conv_metatypes + + @property + def scales_unification_map(self) -> Dict[OperatorMetatype, OperatorMetatype]: + return {om.PTCatMetatype: self.overflow_fix_metatypes} + @property def hw_config(self) -> HWConfig: return PTHWConfig @@ -102,11 +118,6 @@ def quant_trait_op_dict(self) -> Dict[int, OperatorMetatype]: def model_transformer(model: NNCFNetwork) -> ModelTransformer: return PTModelTransformer(model) - TARGET_TYPE_TO_PT_INS_TYPE_MAP = { - TargetType.PRE_LAYER_OPERATION: TargetType.OPERATOR_PRE_HOOK, - TargetType.POST_LAYER_OPERATION: TargetType.OPERATOR_POST_HOOK, - } - @staticmethod def target_point(target_type: TargetType, target_node_name: str, port_id: int) -> PTTargetPoint: if NNCFGraphNodeType.INPUT_NODE in target_node_name or target_type == TargetType.POST_LAYER_OPERATION: @@ -116,18 +127,7 @@ def target_point(target_type: TargetType, target_node_name: str, port_id: int) - return PTTargetPoint(target_type, target_node_name, input_port_id=port_id) @staticmethod - def create_activation_quantizer_insertion_command( - nncf_graph: NNCFGraph, - target_point: PTTargetPoint, - quantizer_config: QuantizerConfig, - parameters: FakeQuantizeParameters, - ) -> PTInsertionCommand: - return PTMinMaxAlgoBackend._create_quantizer_insertion_command( - nncf_graph, target_point, quantizer_config, parameters - ) - - @staticmethod - def create_weight_quantizer_insertion_command( + def create_quantizer_insertion_command( nncf_graph: NNCFGraph, target_point: PTTargetPoint, quantizer_config: QuantizerConfig, @@ -141,8 +141,8 @@ def create_weight_quantizer_insertion_command( def unify_statistics(statistics: List[PTMinMaxTensorStatistic]) -> PTMinMaxTensorStatistic: max_values, min_values = [], [] for statistic in statistics: - max_values.append(statistic.max_values) - min_values.append(statistic.min_values) + max_values.append(torch.tensor(statistic.max_values).flatten()) + min_values.append(torch.tensor(statistic.min_values).flatten()) max_values = torch.max(torch.tensor(max_values)) min_values = torch.min(torch.tensor(min_values)) return PTMinMaxTensorStatistic(min_values=min_values, max_values=max_values) @@ -309,24 +309,27 @@ def _create_quantizer_insertion_command( return PTInsertionCommand(target_point, quantizer, TransformationPriority.QUANTIZATION_PRIORITY) @staticmethod - def get_ignored_scope(model_type: ModelType, device: TargetDevice) -> IgnoredScope: + def get_ignored_metatypes(model_type: ModelType, device: TargetDevice) -> List[OperatorMetatype]: + types = [] if model_type == ModelType.TRANSFORMER: - types = [] - metatypes_to_add = [ + types = [ om.PTAddMetatype, om.PTPowerMetatype, om.PTSubMetatype, om.PTMeanMetatype, + om.PTSumMetatype, + om.PTReduceL2, om.PTDivMetatype, + om.PTMaxMetatype, + om.PTSqueezeMetatype, ] if device != TargetDevice.CPU_SPR: - metatypes_to_add.append(om.PTMulMetatype) - type_name_to_add = ["squeeze"] - for metatype in metatypes_to_add: - types.extend(metatype.get_all_aliases()) - types.extend(type_name_to_add) - return IgnoredScope(types=types) - return IgnoredScope() + types.append(om.PTMulMetatype) + return types + + @staticmethod + def get_ignored_names_by_layer_attributes(nncf_graph: NNCFGraph) -> List[str]: + return [] @staticmethod def get_weight_nodes(nncf_graph: NNCFGraph) -> List[NNCFNode]: diff --git a/nncf/quantization/algorithms/post_training/algorithm.py b/nncf/quantization/algorithms/post_training/algorithm.py index 60ab4b02edf..79630c3c652 100644 --- a/nncf/quantization/algorithms/post_training/algorithm.py +++ b/nncf/quantization/algorithms/post_training/algorithm.py @@ -9,13 +9,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, Optional, TypeVar - -import numpy as np +from dataclasses import dataclass +from typing import Callable, Dict, List, Optional, TypeVar from nncf import Dataset +from nncf.common.deprecation import warning_deprecated +from nncf.common.factory import NNCFGraphFactory +from nncf.common.factory import StatisticsAggregatorFactory +from nncf.common.graph.graph import NNCFGraph +from nncf.common.logging import nncf_logger from nncf.common.quantization.structs import QuantizationPreset -from nncf.common.tensor_statistics.aggregator import StatisticsAggregator from nncf.common.tensor_statistics.statistic_point import StatisticPointsContainer from nncf.common.utils.backend import BackendType from nncf.common.utils.backend import copy_model @@ -26,12 +29,16 @@ from nncf.quantization.algorithms.algorithm import Algorithm from nncf.quantization.algorithms.bias_correction.algorithm import BIAS_CORRECTION_THRESHOLD from nncf.quantization.algorithms.bias_correction.algorithm import BiasCorrection +from nncf.quantization.algorithms.channel_alignment.algorithm import ChannelAlignment from nncf.quantization.algorithms.fast_bias_correction.algorithm import FAST_BIAS_CORRECTION_THRESHOLD from nncf.quantization.algorithms.fast_bias_correction.algorithm import FastBiasCorrection from nncf.quantization.algorithms.min_max.algorithm import MinMaxQuantization +from nncf.quantization.algorithms.smooth_quant.algorithm import SmoothQuant +from nncf.quantization.passes import insert_null_biases_pass from nncf.scopes import IgnoredScope TModel = TypeVar("TModel") +TPass = Callable[[TModel], TModel] class PostTrainingQuantization(Algorithm): @@ -40,11 +47,13 @@ class PostTrainingQuantization(Algorithm): 1) ChannelAlignment 2) MinMaxQuantization 3) FastBiasCorrection or BiasCorrection - - Disclaimer: currently, it only supports MinMaxQuantization, FastBiasCorrection & BiasCorrection. - ChannelAlignment will be added soon. """ + @dataclass + class FirstStageAlgorithm: + algorithm: "Algorithm" + pre_passes: List[TPass] + def __init__( self, preset: QuantizationPreset = QuantizationPreset.PERFORMANCE, @@ -78,10 +87,30 @@ def __init__( """ super().__init__() self.algorithms = [] + self.first_stage_algorithms: List[self.FirstStageAlgorithm] = [] + + if target_device is TargetDevice.VPU: + warning_deprecated("VPU device is deprecated and will no longer be supported in the future.") if advanced_parameters is None: advanced_parameters = AdvancedQuantizationParameters() + if model_type == ModelType.TRANSFORMER and advanced_parameters.smooth_quant_alpha >= 0: + smooth_quant_algorithm = SmoothQuant( + subset_size=subset_size, + inplace_statistics=advanced_parameters.inplace_statistics, + alpha=advanced_parameters.smooth_quant_alpha, + ) + self.first_stage_algorithms.append(self.FirstStageAlgorithm(smooth_quant_algorithm, [])) + + if not advanced_parameters.disable_channel_alignment: + channel_alignment = ChannelAlignment( + subset_size=subset_size, + inplace_statistics=advanced_parameters.inplace_statistics, + backend_params=advanced_parameters.backend_params, + ) + self.first_stage_algorithms.append(self.FirstStageAlgorithm(channel_alignment, [insert_null_biases_pass])) + min_max_quantization = MinMaxQuantization( preset=preset, target_device=target_device, @@ -119,7 +148,7 @@ def __init__( threshold = BIAS_CORRECTION_THRESHOLD if bias_correction_params.threshold is not None: threshold = bias_correction_params.threshold - bias_correction_subset_size = max(np.int(subset_size * 0.2), 1) + bias_correction_subset_size = max(int(subset_size * 0.2), 1) bias_correction = BiasCorrection( subset_size=bias_correction_subset_size, threshold=threshold, @@ -134,56 +163,67 @@ def __init__( def available_backends(self) -> Dict[str, BackendType]: return - def get_statistic_points(self, model: TModel) -> StatisticPointsContainer: + def get_statistic_points(self, model: TModel, graph: NNCFGraph) -> StatisticPointsContainer: + if self.first_stage_algorithms: + raise NotImplementedError( + "Statistic points are not supported yet for SmoothQuant and ChannelAlignment algorithms." + ) + output = StatisticPointsContainer() for algorithm in self.algorithms: - for statistic_points in algorithm.get_statistic_points(model).values(): + for statistic_points in algorithm.get_statistic_points(model, graph).values(): for statistic_point in statistic_points: output.add_statistic_point(statistic_point) return output - def _create_statistics_aggregator(self, dataset: Dataset, backend: BackendType) -> StatisticsAggregator: - """ - Creates backend-specific StatisticsAggregator. - - :param engine: engine for the model execution - :param dataset: dataset for the statistics collection and validation - :param model_transformer: backend-specific ModelTransformerBase instance - :param backend: model backend type for the further differentiations - :return: backnd-specific StatisticsAggregator - """ - if backend == BackendType.ONNX: - from nncf.onnx.statistics.aggregator import ONNXStatisticsAggregator - - return ONNXStatisticsAggregator(dataset) - if backend == BackendType.OPENVINO: - from nncf.openvino.statistics.aggregator import OVStatisticsAggregator - - return OVStatisticsAggregator(dataset) - if backend == BackendType.TORCH: - from nncf.torch.statistics.aggregator import PTStatisticsAggregator - - return PTStatisticsAggregator(dataset) - return None - - def _apply( + def apply( self, model: TModel, + graph: NNCFGraph, statistic_points: Optional[StatisticPointsContainer] = None, dataset: Optional[Dataset] = None, ) -> TModel: modified_model = copy_model(model) - if statistic_points is None: - backend = get_backend(modified_model) + modified_model_graph = graph + backend = get_backend(modified_model) + + for first_stage_algorithm in self.first_stage_algorithms: + algorithm = first_stage_algorithm.algorithm + + if isinstance(algorithm, SmoothQuant) and backend != BackendType.OPENVINO: + nncf_logger.debug(f"{backend.name} does not support SmoothQuant algorithm yet.") + continue + + if isinstance(algorithm, ChannelAlignment) and backend != BackendType.OPENVINO: + nncf_logger.debug(f"{backend.name} does not support ChannelAlignment algorithm yet.") + continue + + for pre_pass in first_stage_algorithm.pre_passes: + modified_model = pre_pass(modified_model, modified_model_graph) + modified_model_graph = NNCFGraphFactory.create(modified_model) + + statistics_aggregator = StatisticsAggregatorFactory.create(modified_model, dataset) + algo_statistic_points = algorithm.get_statistic_points(modified_model, modified_model_graph) + statistics_aggregator.register_statistic_points(algo_statistic_points) + statistics_aggregator.collect_statistics(modified_model, modified_model_graph) + modified_model = algorithm.apply( + modified_model, modified_model_graph, statistics_aggregator.statistic_points + ) + modified_model_graph = NNCFGraphFactory.create(modified_model) - statistics_aggregator = self._create_statistics_aggregator(dataset, backend) + if statistic_points is None: + statistics_aggregator = StatisticsAggregatorFactory.create(modified_model, dataset) for algorithm in self.algorithms: - algo_statistic_points = algorithm.get_statistic_points(modified_model) + algo_statistic_points = algorithm.get_statistic_points(modified_model, modified_model_graph) statistics_aggregator.register_statistic_points(algo_statistic_points) - statistics_aggregator.collect_statistics(modified_model) + statistics_aggregator.collect_statistics(modified_model, modified_model_graph) statistic_points = statistics_aggregator.statistic_points - for algorithm in self.algorithms: - modified_model = algorithm.apply(modified_model, statistic_points) + for algorithm in self.algorithms[:-1]: + modified_model = algorithm.apply(modified_model, modified_model_graph, statistic_points) + modified_model_graph = NNCFGraphFactory.create(modified_model) + # building the model graph is not required after the last algorithm + modified_model = self.algorithms[-1].apply(modified_model, modified_model_graph, statistic_points) + return modified_model diff --git a/nncf/quantization/algorithms/smooth_quant/__init__.py b/nncf/quantization/algorithms/smooth_quant/__init__.py new file mode 100644 index 00000000000..9b29b47534a --- /dev/null +++ b/nncf/quantization/algorithms/smooth_quant/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/nncf/quantization/algorithms/smooth_quant/algorithm.py b/nncf/quantization/algorithms/smooth_quant/algorithm.py new file mode 100644 index 00000000000..7a121f31ea7 --- /dev/null +++ b/nncf/quantization/algorithms/smooth_quant/algorithm.py @@ -0,0 +1,361 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import Counter +from collections import defaultdict +from copy import deepcopy +from typing import Dict, List, Optional, Tuple, TypeVar + +from tqdm import tqdm + +from nncf import Dataset +from nncf.common.factory import ModelTransformerFactory +from nncf.common.graph.graph import NNCFGraph +from nncf.common.graph.graph import NNCFNode +from nncf.common.graph.transformations.commands import TargetType +from nncf.common.graph.transformations.layout import TransformationLayout +from nncf.common.logging import nncf_logger +from nncf.common.tensor_statistics.statistic_point import StatisticPoint +from nncf.common.tensor_statistics.statistic_point import StatisticPointsContainer +from nncf.common.utils.backend import BackendType +from nncf.common.utils.backend import get_backend +from nncf.quantization.algorithms.algorithm import Algorithm +from nncf.quantization.algorithms.smooth_quant.backend import ALGO_BACKENDS + +TModel = TypeVar("TModel") +TTensor = TypeVar("TTensor") +STATISTIC_BRANCH_KEY = "abs_max" + + +class SmoothQuant(Algorithm): + """ + Post-training SmoothQuant algorithm implementation. + + The main purpose of this algorithm is to reduce activation quantization error + via the insertion of nodes with smoothing scales for weighted layers. + """ + + def __init__(self, subset_size: int = 300, inplace_statistics: bool = True, alpha: Optional[int] = 0.95): + """ + :param subset_size: Size of a subset for the statistics collection, + default is 300. + :param inplace_statistics: Defines whether to calculate quantizers statistics + by backend graph operations or by default Python implementation, defaults + to True. + :param alpha: The parameter that regulates the calculation of the scale. + The default value is 0.95. Negative value switches off the algorithm. + """ + + if alpha < 0: + raise RuntimeError("Smooth Quant algorithm does not support negative alpha parameter!") + + super().__init__() + self._subset_size = subset_size + self._inplace_statistics = inplace_statistics + self._backend_entity = None + self._alpha = alpha + self._algorithm_key = f"SQ_{hash(self)}" + self._cached_multiply_names = Counter() + + @property + def available_backends(self) -> Dict[str, BackendType]: + return ALGO_BACKENDS.registry_dict + + def _set_backend_entity(self, model: TModel) -> None: + """ + Creates a helper class with a backed-specific logic of the algorithm. + + :param model: Backend-specific input model. + """ + model_backend = get_backend(model) + if model_backend == BackendType.OPENVINO: + from nncf.quantization.algorithms.smooth_quant.openvino_backend import OVSmoothQuantAlgoBackend + + self._backend_entity = OVSmoothQuantAlgoBackend() + else: + raise RuntimeError( + "Cannot return backend-specific entity because {} is not supported!".format(model_backend) + ) + + def apply( + self, + model: TModel, + graph: NNCFGraph, + statistic_points: Optional[StatisticPointsContainer] = None, + dataset: Optional[Dataset] = None, + ) -> TModel: + self._set_backend_entity(model) + + nodes_to_smooth_data = self._get_nodes_to_smooth_data(graph) + model_transformer = ModelTransformerFactory.create(model) + transformation_layout = TransformationLayout() + + node_groups = self._group_nodes_by_source(nodes_to_smooth_data, graph) + + best_scale = None + for group_id, nodes in tqdm(node_groups.items(), desc="Applying Smooth Quant"): + best_ratio = 0.0 + empty_statistic = False + for node_to_smooth in nodes: + source_node, input_port_id, source_output_port_id, _ = group_id + activations_value = self._get_statistics_for_node( + statistic_points, node_to_smooth.node_name, input_port_id + ) + if any(val is None for val in activations_value): + empty_statistic = True + break + activations_value = self._backend_entity.clip_statistics(activations_value) + + weight_port = self._backend_entity.get_weight_tensor_port_id(node_to_smooth) + weight_value = self._backend_entity.get_weight_value(node_to_smooth, model, weight_port) + weight_statistics = self._process_weight_statistics(node_to_smooth, weight_value, weight_port) + weight_statistics = self._backend_entity.clip_statistics(weight_statistics) + + scales, ratio = self._backend_entity.calculate_scale_and_ratio( + activations_value, weight_statistics, self._alpha + ) + + if ratio > best_ratio: + best_ratio = ratio + best_scale = deepcopy(scales) + + if empty_statistic: + nncf_logger.debug( + f"Skipped SmoothQuant for nodes after {source_node.node_name} because of the empty statistics." + ) + continue + + if best_scale is None: + nncf_logger.debug( + f"Skipped SmoothQuant for nodes after {source_node.node_name} because of the empty scale." + ) + continue + + for node_to_smooth in nodes: + weights_scale = self._calculate_weight_scale(best_scale, node_to_smooth) + weight_port = self._backend_entity.get_weight_tensor_port_id(node_to_smooth) + weight_value = self._backend_entity.get_weight_value(node_to_smooth, model, weight_port) + scaled_weight = weight_value * weights_scale + weight_update_command = self._backend_entity.weight_update_command( + node_to_smooth, scaled_weight, weight_port + ) + transformation_layout.register(weight_update_command) + + activations_shape = graph.get_output_edges(source_node)[source_output_port_id].tensor_shape + activation_scale = self._calculate_activation_scale(best_scale, activations_shape, nodes, graph) + + scale_node_name = self._create_scale_node_name(source_node.node_name, source_output_port_id) + scale_insertion_command = self._backend_entity.scale_insertion_command( + source_node, activation_scale, source_output_port_id, nodes, scale_node_name + ) + transformation_layout.register(scale_insertion_command) + + transformed_model = model_transformer.transform(transformation_layout) + return transformed_model + + def _group_nodes_by_source(self, nodes_to_smooth: List[Dict], nncf_graph: NNCFGraph) -> Dict[tuple, List]: + """ + Groups nodes that will be smoothed by source (parent node). + + :param nodes_to_smooth: List of the nodes that will be smoothed. + :param nncf_graph: NNCFGraph instance. + :return: Dictionary with the source info as key and grouped nodes as value. + """ + groups = defaultdict(list) + for node_data in nodes_to_smooth: + node_to_smooth = node_data["node_to_smooth"] + input_act_port = node_data["input_act_port"] + + source_node = nncf_graph.get_input_edges(node_to_smooth)[input_act_port].from_node + edge = nncf_graph.get_edge(source_node, node_to_smooth) + # Such group_id (with node, ports, and shape as a hash) allows us to be confident + # that all sensitive parameters are equal for successor nodes are equal. + group_id = (source_node, input_act_port, edge.output_port_id, hash(str(edge.tensor_shape))) + groups[group_id].append(node_to_smooth) + + return groups + + def _get_statistics_for_node( + self, statistic_points: StatisticPointsContainer, node_name: str, act_port: int + ) -> List[TTensor]: + """ + Collects statistics for node. + + :param statistic_points: StatisticPointsContainer instance. + :param node_name: Name of the node for collection. + :param act_port: Activation port id. + :return: List of the TTensor instances. + """ + + def filter_func(point: StatisticPoint) -> bool: + return ( + self._algorithm_key in point.algorithm_to_tensor_collectors + and point.target_point.type == TargetType.PRE_LAYER_OPERATION + and point.target_point.port_id == act_port + ) + + statistics_for_node = [] + for tensor_collector in statistic_points.get_algo_statistics_for_node( + node_name, filter_func, self._algorithm_key + ): + statistics_for_node.append(tensor_collector.get_statistics()[STATISTIC_BRANCH_KEY]) + return statistics_for_node + + def get_statistic_points(self, model: TModel, graph: NNCFGraph) -> StatisticPointsContainer: + statistic_container = StatisticPointsContainer() + + self._set_backend_entity(model) + + nodes_to_smooth_data = self._get_nodes_to_smooth_data(graph) + + for node_data in nodes_to_smooth_data: + node_to_smooth = node_data["node_to_smooth"] + target_point = self._backend_entity.target_point( + TargetType.PRE_LAYER_OPERATION, + target_node_name=node_to_smooth.node_name, + port_id=node_data["input_act_port"], + ) + input_reduction_shape = self._calculate_input_reduction_shape( + graph, node_to_smooth, node_data["input_act_port"] + ) + stat_collector = self._backend_entity.get_abs_max_channel_collector( + self._subset_size, input_reduction_shape, self._inplace_statistics, STATISTIC_BRANCH_KEY + ) + statistic_container.add_statistic_point( + StatisticPoint( + target_point=target_point, + tensor_collector=stat_collector, + algorithm=self._algorithm_key, + ) + ) + return statistic_container + + def _get_nodes_to_smooth_data(self, nncf_graph: NNCFGraph) -> List[Dict]: + """ + Collects layers whose activations will be smoothed. + + :param nncf_graph: NNCFGraph instance. + :return: List with the data for each layer. + """ + nodes_with_weights = nncf_graph.get_nodes_by_metatypes(self._backend_entity.weighted_metatypes) + nodes_to_smooth_data = [] + + for node_with_weight in nodes_with_weights: + if not self._backend_entity.is_node_with_weights(node_with_weight): + continue + + ports_map = self._backend_entity.get_input_ports_map(node_with_weight, nncf_graph) + weight_node = nncf_graph.get_input_edges(node_with_weight)[ports_map["weight"]].from_node + + # Skipping shared weights + if len(nncf_graph.get_next_nodes(weight_node)) > 1: + continue + + nodes_to_smooth_data.append( + { + "node_to_smooth": node_with_weight, + "input_act_port": ports_map["activation"], + } + ) + return nodes_to_smooth_data + + def _calculate_activation_scale( + self, scale_value: TTensor, activations_shape: List[int], nodes: List[NNCFNode], nncf_graph: NNCFGraph + ) -> TTensor: + """ + Calculates activation scales for Smooth node. + + :param scale_value: Base scale value. + :param activations_shape: activation tensor shape. + :param nodes: List of consumers for Smooth node. + :return: Calculated per-channel activation scale. + """ + activation_ports_map = { + node: self._backend_entity.get_input_ports_map(node, nncf_graph)["activation"] for node in nodes + } + channel_axes = [ + self._backend_entity.get_activation_channel_axis(node, port) for node, port in activation_ports_map.items() + ] + channel_axis = channel_axes[0] + + if not all(axis == channel_axis for axis in channel_axes): + raise RuntimeError(f"Channel axes for nodes {[n.node_name for n in nodes]} are not identical") + + activations_size = len(activations_shape) + return self._backend_entity.calculate_activation_scale(scale_value, activations_size, channel_axis) + + def _calculate_weight_scale(self, scale_value: TTensor, node: NNCFNode) -> TTensor: + """ + Calculates scale for weight tensor. + + :param scale_value: Base scale value. + :param node: Consumer for Smooth node. + :return: Calculated scale for weights. + """ + port_id = self._backend_entity.get_weight_tensor_port_id(node) + weights_size = len(node.layer_attributes.constant_attributes[port_id]["shape"]) + if weights_size > 1: + channel_axis = self._backend_entity.get_weight_channel_axis(node, port_id) + return self._backend_entity.calculate_weight_scale(scale_value, weights_size, channel_axis) + return scale_value + + def _calculate_input_reduction_shape(self, nncf_graph: NNCFGraph, node: NNCFNode, input_port: int) -> Tuple[int]: + """ + Returns reduction shape for specified input. + + :param nncf_graph: NNCFGraph instance. + :param node: NNCFNode to check. + :param input_port: Specified input port id. + :return: Calculated reduction shape. + """ + shape = nncf_graph.get_input_edges(node)[input_port].tensor_shape + reduction_shape = tuple([0]) + if len(shape) > 1: + channel_axis = self._backend_entity.get_activation_channel_axis(node, input_port) + reduction_shape = self._backend_entity.get_channel_agnostic_reduction_shape(channel_axis, shape) + return reduction_shape + + def _process_weight_statistics(self, node: NNCFNode, weights: TTensor, port_id: int) -> TTensor: + """ + Returns processed weight statistics for node. + + :param node: NNCFNode to check. + :param weights: Backend-specific weights. + :param port_id: Weight port id. + :return: Weight statistic for node. + """ + channel_axis = 0 + if len(weights.shape) > 1: + channel_axis = self._backend_entity.get_weight_channel_axis(node, port_id) + return self._backend_entity.process_weight_statistics(weights, channel_axis) + + def _create_scale_node_name(self, source_name: str, source_port_id: int) -> str: + """ + Returns uniqie scale node name for new layer. + + :param source_name: Source layer name. + :param source_port_id: Source port id. + :return: Generated uniqie name. + """ + scale_node_name = f"{source_name}_{source_port_id}" + unique_index = self._cached_multiply_names[scale_node_name] + self._cached_multiply_names[scale_node_name] += 1 + return f"{scale_node_name}_{unique_index}/sq_multiply" diff --git a/nncf/quantization/algorithms/smooth_quant/backend.py b/nncf/quantization/algorithms/smooth_quant/backend.py new file mode 100644 index 00000000000..dda7fc44d2d --- /dev/null +++ b/nncf/quantization/algorithms/smooth_quant/backend.py @@ -0,0 +1,238 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import ABC +from abc import abstractmethod +from typing import Dict, List, Optional, Tuple, TypeVar + +from nncf.common.graph import NNCFGraph +from nncf.common.graph import NNCFNode +from nncf.common.graph.operator_metatypes import OperatorMetatype +from nncf.common.graph.transformations.commands import TargetPoint +from nncf.common.graph.transformations.commands import TargetType +from nncf.common.graph.transformations.commands import TransformationCommand +from nncf.common.utils.registry import Registry +from nncf.experimental.common.tensor_statistics.collectors import TensorCollector + +TModel = TypeVar("TModel") +TTensor = TypeVar("TTensor") +ALGO_BACKENDS = Registry("algo_backends") + + +class SmoothQuantAlgoBackend(ABC): + @property + @abstractmethod + def weighted_metatypes(self) -> List[OperatorMetatype]: + """ + Property for the backend-specific metatypes. + """ + + @staticmethod + @abstractmethod + def target_point(target_type: TargetType, target_node_name: str, port_id: int) -> TargetPoint: + """ + Returns backend-specific target point. + + :param target_type: Type of the location that should be modified. + :param target_node_name: Name of the located node. + :param port_id: Port ID of the tensor for the statistics distribution. + :return: Backend-specific TargetPoint. + """ + + @staticmethod + @abstractmethod + def is_node_with_weights(node: NNCFNode) -> bool: + """ + Checks whether the node with weights or not. + + :param node: NNCFNode to check. + :return: boolean indicating whether the node has weights or not. + """ + + @staticmethod + @abstractmethod + def get_input_ports_map(node: NNCFNode, nncf_graph: NNCFGraph) -> Dict[str, int]: + """ + Returns map with activation & weighted ports. + + :param node: NNCFNode to check. + :param nncf_graph: NNCFGraph instance. + :return: Map with the activation & weighted ports. + """ + + @staticmethod + @abstractmethod + def get_channel_agnostic_reduction_shape(channel_axis: int, shape: Tuple[int]) -> Tuple[int]: + """ + Returns filtered reduction shape without axes that corresponds channels. + + :param channel_axes: List of the channel axes. + :param shape: Shape that need to be filtered. + :return: Reduction shape in tuple format. + """ + + @staticmethod + @abstractmethod + def get_abs_max_channel_collector( + num_samples: int, stats_reduction_shape: Tuple[int], inplace: bool, branch_key: str + ) -> TensorCollector: + """ + Returns TensorCollector with MaxAggregator and AbsMaxReducer. + + :param stats_reduction_shape: Calculated reduction shape. + :param inplace: Whether to calculate statistic inplace or not. + :param branch_key: Specific string for branch key. + :return: TensorCollector instance. + """ + + @staticmethod + @abstractmethod + def process_weight_statistics(weights: TTensor, channel_axis: int) -> TTensor: + """ + Returns processed weight statistics for node. + + :param weights: Weights tensor. + :param channel_axis: Channel axis for calculation. + :return: Weight statistics. + """ + + @staticmethod + @abstractmethod + def get_weight_value(node_with_weight: NNCFNode, model: TModel, port_id: int) -> TTensor: + """ + Returns the weight value for the node with weight. + + :param node_with_weight: The node with weight. + :param model: The model that contains this operation. + :param port_id: The input port ID to get weight input. + :return: The weight value. + """ + + @staticmethod + @abstractmethod + def get_weight_tensor_port_id(node: NNCFNode) -> int: + """ + Returns node's input port indices with weights tensors. + + :param node: NNCFNode to find its weights input port indices. + :return: Weights input port indices. + """ + + @staticmethod + @abstractmethod + def clip_statistics(statistics: TTensor) -> TTensor: + """ + Clips statistics for further calculation. + + :param statistics: Input statistics. + :return: Clipped statistics. + """ + + @staticmethod + @abstractmethod + def calculate_scale_and_ratio( + activations: TTensor, weights: TTensor, alpha: float, quantile: Optional[float] + ) -> Tuple[TTensor, TTensor]: + """ + Calculates base scale value and it's ratio. + + :param activations: Activation statistics value. + :param weights: Weights statistics value. + :param alpha: Base value for exponentiation. + :param quantile: Base quantile value. + :return: Calculated base scale value & ratio. + """ + + @staticmethod + @abstractmethod + def calculate_activation_scale(scale_value: TTensor, activations_size: int, channel_axis: int) -> TTensor: + """ + Calculates activation scales for Smooth node. + + :param scale_value: Base scale value. + :param activations_size: Size of the activation shape. + :param channel_axis: Axis for shape calculation. + :return: Calculated activation scale. + """ + + @staticmethod + @abstractmethod + def calculate_weight_scale(scale_value: TTensor, weights_size: int, channel_axis: int) -> TTensor: + """ + Calculates scale for weight tensor. + + :param scale_value: Base scale value. + :param weights_size: Size of the weights shape. + :param channel_axis: Axis for shape calculation. + :return: Calculated scale for weights. + """ + + @staticmethod + @abstractmethod + def weight_update_command( + node_with_weight: NNCFNode, weight_value: TTensor, weight_port_id: int + ) -> TransformationCommand: + """ + Returns command to update weights. + + :param node_with_weight: NNCFNode instance. + :param weight_value: New weight value. + :param weight_port_id: Weight port id. + :return: TransformationCommand instance. + """ + + @staticmethod + @abstractmethod + def scale_insertion_command( + source_node: NNCFNode, scale_value: TTensor, port_id: int, nodes: List[NNCFNode] + ) -> TransformationCommand: + """ + Returns command to insert Smooth Quant node. + + :param source_node: NNCFNode instance. + :param scale_value: Smooth Quant value. + :param port_id: Output port for source node. + :param nodes: List of consumers for Smooth node. + :return: TransformationCommand instance. + """ + + @staticmethod + @abstractmethod + def get_activation_channel_axis(node: NNCFNode, port_id: int) -> int: + """ + Returns axis number of the activation tensor which correspond to it channel. + + :param node: NNCFNode instance. + :param port_id: Specified input port id. + :return: Channel axis number. + """ + + @staticmethod + @abstractmethod + def get_weight_channel_axis(node: NNCFNode, port_id: int) -> int: + """ + Returns axis number of the weight tensor which correspond to it channel. + + :param node: NNCFNode instance. + :param port_id: Specified input port id. + :return: Channel axis number. + """ + + @staticmethod + @abstractmethod + def calculate_port_based_channel_axis(port_id: int, transpose: bool) -> int: + """ + Returns port-based channel axis. + + :param port_id: Specified input port id. + :param transpose: Transpose position. + :return: Channel axis. + """ diff --git a/nncf/quantization/algorithms/smooth_quant/openvino_backend.py b/nncf/quantization/algorithms/smooth_quant/openvino_backend.py new file mode 100644 index 00000000000..415ea5a626a --- /dev/null +++ b/nncf/quantization/algorithms/smooth_quant/openvino_backend.py @@ -0,0 +1,182 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Dict, List, Optional, Tuple + +import numpy as np +import openvino.runtime as ov + +from nncf.common.graph import NNCFGraph +from nncf.common.graph import NNCFNode +from nncf.common.graph.operator_metatypes import OperatorMetatype +from nncf.common.graph.transformations.commands import TargetType +from nncf.common.utils.backend import BackendType +from nncf.experimental.common.tensor_statistics.collectors import MaxAggregator +from nncf.experimental.common.tensor_statistics.collectors import TensorCollector +from nncf.openvino.graph.metatypes.openvino_metatypes import OVMatMulMetatype +from nncf.openvino.graph.node_utils import get_channel_agnostic_reduction_shape +from nncf.openvino.graph.node_utils import get_weight_value +from nncf.openvino.graph.transformations.command_creation import OVCommandCreator +from nncf.openvino.graph.transformations.commands import OVMultiplyInsertionCommand +from nncf.openvino.graph.transformations.commands import OVTargetPoint +from nncf.openvino.graph.transformations.commands import OVWeightUpdateCommand +from nncf.openvino.statistics.collectors import OVAbsMaxReducer +from nncf.openvino.statistics.collectors import OVNNCFCollectorTensorProcessor +from nncf.quantization.algorithms.smooth_quant.backend import ALGO_BACKENDS +from nncf.quantization.algorithms.smooth_quant.backend import SmoothQuantAlgoBackend + + +@ALGO_BACKENDS.register(BackendType.OPENVINO) +class OVSmoothQuantAlgoBackend(SmoothQuantAlgoBackend): + @property + def weighted_metatypes(self) -> List[OperatorMetatype]: + return [OVMatMulMetatype] + + @staticmethod + def target_point(target_type: TargetType, target_node_name: str, port_id: int) -> OVTargetPoint: + return OVTargetPoint(target_type, target_node_name, port_id) + + @staticmethod + def is_node_with_weights(node: NNCFNode) -> bool: + return node.layer_attributes and node.layer_attributes.constant_attributes + + @staticmethod + def get_input_ports_map(node: NNCFNode, nncf_graph: NNCFGraph) -> Dict[str, int]: + weight_ports = node.layer_attributes.get_const_port_ids() + activation_ports = [ + e.input_port_id for e in nncf_graph.get_input_edges(node) if e.input_port_id not in weight_ports + ] + + if len(weight_ports) != 1 or len(activation_ports) != 1: + raise RuntimeError(f"Too many weight or activation ports for {node.node_name} node") + + return {"activation": activation_ports[0], "weight": weight_ports[0]} + + @staticmethod + def get_channel_agnostic_reduction_shape(channel_axis: int, shape: Tuple[int]) -> Tuple[int]: + return get_channel_agnostic_reduction_shape([channel_axis], shape) + + @staticmethod + def get_abs_max_channel_collector( + num_samples: int, stats_reduction_shape: Tuple[int], inplace: bool, branch_key: str + ) -> TensorCollector: + collector = TensorCollector() + reducer = OVAbsMaxReducer(stats_reduction_shape, inplace) + aggregator = MaxAggregator(OVNNCFCollectorTensorProcessor, num_samples) + collector.register_statistic_branch(branch_key, reducer, aggregator) + return collector + + @staticmethod + def process_weight_statistics(weights: np.ndarray, channel_axis: int) -> np.ndarray: + if len(weights.shape) > 1: + base_axes = list(range(weights.ndim - 2)) + transpose_axes = base_axes + [-1, -2] + weights = np.transpose(weights, axes=transpose_axes) + return np.max(np.abs(weights), axis=channel_axis) + + @staticmethod + def get_weight_value(node_with_weight: NNCFNode, model: ov.Model, port_id: int) -> np.ndarray: + return get_weight_value(node_with_weight, model, port_id) + + @staticmethod + def get_weight_tensor_port_id(node: NNCFNode) -> int: + const_ids = node.layer_attributes.get_const_port_ids() + if len(const_ids) != 1: + raise RuntimeError(f"Found more than 1 port for {node.node_name} node") + return const_ids[0] + + @staticmethod + def clip_statistics(statistics: np.ndarray) -> np.ndarray: + a_min = 1e-5 + squeezed = np.squeeze(statistics) + return np.clip(squeezed, a_min=a_min, a_max=None) + + @staticmethod + def calculate_scale_and_ratio( + activations: np.ndarray, weights: np.ndarray, alpha: float, quantile: Optional[float] = 0.1 + ) -> np.ndarray: + scales = np.power(activations, alpha) / (np.power(weights, 1 - alpha) + np.finfo(float).eps) + + a_min = np.quantile(scales, quantile) + a_max = 1e2 + + scales = np.clip(scales, a_min=a_min, a_max=a_max) + ratio = scales.min() / (scales.max() + np.finfo(float).eps) + return scales, ratio + + @staticmethod + def calculate_activation_scale(scale_value: np.ndarray, activations_size: int, channel_axis: int) -> np.ndarray: + activation_scale = scale_value ** (-1) + if activations_size > 1: + reshape_shape = np.ones(activations_size, dtype=np.int64) + reshape_shape[channel_axis] = activation_scale.size + activation_scale = np.reshape(activation_scale, reshape_shape) + return activation_scale + + @staticmethod + def calculate_weight_scale(scale_value: np.ndarray, weights_size: int, channel_axis: int) -> np.ndarray: + weight_scale = scale_value + if weights_size > 1: + reshape_shape = np.ones(weights_size, dtype=np.int64) + reshape_shape[channel_axis] = scale_value.size + weight_scale = np.reshape(scale_value, reshape_shape) + return weight_scale + + @staticmethod + def weight_update_command( + node_with_weight: NNCFNode, weight_value: np.ndarray, weight_port_id: int + ) -> OVWeightUpdateCommand: + return OVCommandCreator.create_command_to_update_weight(node_with_weight, weight_value, weight_port_id) + + @staticmethod + def scale_insertion_command( + source_node: NNCFNode, scale_value: np.ndarray, port_id: int, nodes: List[NNCFNode], scale_node_name: str + ) -> OVMultiplyInsertionCommand: + return OVCommandCreator.multiply_insertion_command(source_node, nodes, port_id, scale_value, scale_node_name) + + @staticmethod + def get_activation_channel_axis(node: NNCFNode, port_id: int) -> int: + channel_axis = 1 + + if node.metatype == OVMatMulMetatype: + if port_id > 1: + raise RuntimeError(f"{OVMatMulMetatype.name} can not take more than 2 input tensors.") + + if ( + node.layer_attributes is not None + and node.layer_attributes.input_attributes is not None + and "transpose" in node.layer_attributes.input_attributes + ): + transpose = node.layer_attributes.input_attributes["transpose"] + channel_axis = OVSmoothQuantAlgoBackend.calculate_port_based_channel_axis(port_id, transpose) + + return channel_axis + + @staticmethod + def get_weight_channel_axis(node: NNCFNode, port_id: int) -> int: + channel_axis = 1 if node.metatype.const_channel_axis is None else node.metatype.const_channel_axis[0] + + if port_id not in node.layer_attributes.constant_attributes: + raise RuntimeError(f"{node.node_name} should contain {port_id} in the attributes map.") + + if node.metatype == OVMatMulMetatype: + if port_id > 1: + raise RuntimeError(f"{OVMatMulMetatype.name} can not take more than 2 input tensors.") + + if "transpose" in node.layer_attributes.constant_attributes[port_id]: + transpose = node.layer_attributes.constant_attributes[port_id]["transpose"] + channel_axis = OVSmoothQuantAlgoBackend.calculate_port_based_channel_axis(port_id, transpose) + + return channel_axis + + @staticmethod + def calculate_port_based_channel_axis(port_id: int, transpose: bool) -> int: + return -2 + port_id if transpose else -1 - port_id diff --git a/nncf/quantization/fake_quantize.py b/nncf/quantization/fake_quantize.py index 8093e7a9fbc..4b68187e68e 100644 --- a/nncf/quantization/fake_quantize.py +++ b/nncf/quantization/fake_quantize.py @@ -287,3 +287,27 @@ def _calculate_scaled_parameters( input_low *= (export_levels - 1) / (levels - 1) return input_low, input_high, export_levels + + +def calculate_scale_zero_point( + input_low: np.ndarray, input_high: np.ndarray, level_low: int, level_high: int, narrow_range: bool +) -> Tuple[np.ndarray, np.ndarray]: + """ + Calculates scale and zero_point values for the quantizer. + + :param input_low: The minimum limit for an input value based on collected statistics. + :param input_high: The maximum limit for an input value based on collected statistics. + :param level_low: The minimum level in the integer range to quantize. + The default is "0" for an unsigned range, and "-2^(bit-1)" for a signed one . + :param level_high: The maximum level in the integer range to quantize. + The default is "2^bits-1" for an unsigned range, and "2^(bit-1)-1" for a signed one. + :param narrow_range: True if the range of quantized values is narrowed as compared to the + naive case, False otherwise. + :return: Scale and Zero point values. + """ + levels = level_high - level_low if narrow_range else level_high - level_low + 1 + scale = np.array((input_high - input_low) / (levels - 1)).astype(np.float32) + expected_level_low = level_low + 1 if narrow_range else level_low + zero_point = expected_level_low - np.round(input_low / scale) + zero_point = np.clip(zero_point.astype(np.int32), level_low, level_high) + return scale, zero_point diff --git a/nncf/quantization/passes.py b/nncf/quantization/passes.py index f422b2e3777..e6af74c8271 100644 --- a/nncf/quantization/passes.py +++ b/nncf/quantization/passes.py @@ -10,10 +10,14 @@ # limitations under the License. import collections -from typing import List, Optional +from typing import List, Optional, TypeVar from nncf.common.graph.graph import NNCFGraph from nncf.common.graph.operator_metatypes import OperatorMetatype +from nncf.common.utils.backend import BackendType +from nncf.common.utils.backend import get_backend + +TModel = TypeVar("TModel") def transform_to_inference_graph( @@ -116,3 +120,19 @@ def filter_constant_nodes( constant_nodes = [node for node in nncf_graph.get_all_nodes() if node not in visited_nodes] nncf_graph.remove_nodes_from(constant_nodes) return nncf_graph + + +def insert_null_biases_pass(model: TModel, graph: NNCFGraph) -> TModel: + """ + This pass finds and inserts zero biases to the given model for the layers that should have it. + + :param model: Model instance. + :param graph: NNCFGraph instance. + :return: Updated Model instance with zero biases + """ + model_backend = get_backend(model) + if model_backend == BackendType.OPENVINO: + from nncf.openvino.graph.model_utils import insert_null_biases + + return insert_null_biases(model, graph) + return model diff --git a/nncf/quantization/quantize_model.py b/nncf/quantization/quantize_model.py index 44d8ba84bfe..c52dedd7021 100644 --- a/nncf/quantization/quantize_model.py +++ b/nncf/quantization/quantize_model.py @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Callable, Iterable, Optional +from typing import Any, Callable, Iterable, List, Optional, Tuple, TypeVar, Union from nncf.api.compression import TModel from nncf.common.quantization.structs import QuantizationPreset @@ -22,8 +22,14 @@ from nncf.parameters import TargetDevice from nncf.quantization.advanced_parameters import AdvancedAccuracyRestorerParameters from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters +from nncf.quantization.algorithms.accuracy_control.evaluator import MetricResults +from nncf.quantization.algorithms.hyperparameter_tuner.algorithm import HyperparameterTuner +from nncf.quantization.algorithms.hyperparameter_tuner.param_grid import get_quantization_param_grid +from nncf.quantization.algorithms.post_training.algorithm import PostTrainingQuantization from nncf.scopes import IgnoredScope +TTensor = TypeVar("TTensor") + @api(canonical_alias="nncf.quantize") def quantize( @@ -161,8 +167,7 @@ def quantize_with_accuracy_control( :type calibration_dataset: nncf.Dataset :param validation_dataset: A dataset for the validation process. :type validation_dataset: nncf.Dataset - :param validation_fn: A validation function to validate the model. It should take - two argumets: + :param validation_fn: A validation function to validate the model. It should take two arguments: - `model`: model to be validate. - `validation_dataset`: dataset that provides data items to validate the provided model. @@ -218,3 +223,95 @@ def quantize_with_accuracy_control( ) raise RuntimeError(f"Unsupported type of backend: {backend}") + + +@api(canonical_alias="nncf.compress_weights") +def compress_weights(model: TModel) -> TModel: + """ + Compress model weights. + + :param model: A model to be compressed. + :return: The non-trainable model with compressed weights. + """ + backend = get_backend(model) + if backend == BackendType.TORCH: + from nncf.torch.quantization.quantize_model import compress_weights_impl + + return compress_weights_impl(model) + if backend == BackendType.OPENVINO: + from nncf.openvino.quantization.quantize_model import compress_weights_impl + + return compress_weights_impl(model) + + raise RuntimeError(f"Unsupported type of backend: {backend}") + + +def quantize_with_tune_hyperparams( + model: TModel, + calibration_dataset: Dataset, + validation_dataset: Dataset, + validation_fn: Callable[[Any, Iterable[Any]], Tuple[float, Union[None, List[float], List[List[TTensor]]]]], + initial_metric_results: MetricResults, + quantized_metric_results: MetricResults, + tuner_subset_size: int = 300, + preset: QuantizationPreset = QuantizationPreset.PERFORMANCE, + target_device: TargetDevice = TargetDevice.ANY, + subset_size: int = 300, + fast_bias_correction: bool = True, + model_type: Optional[ModelType] = None, + ignored_scope: Optional[IgnoredScope] = None, + advanced_quantization_parameters: Optional[AdvancedQuantizationParameters] = None, +) -> TModel: + """ + Applies post-training quantization algorithm with tune hyperparameters to provided model. + + :param model: A model to be quantized. + :param calibration_dataset: A representative dataset for the calibration process. + :param validation_dataset: : A dataset for the validation process. + :param validation_fn: A validation function to validate the model. + :param initial_metric_results: Initial metric results. + :param quantized_metric_results: Quantized metric results. + :param tuner_subset_size: Tuner subset size. + :param preset: A preset that controls the quantization mode. + :param target_device: A target device the specificity of which will be taken + into account while compressing in order to obtain the best performance + for this type of device. + :param subset_size: Size of a subset to calculate activations + statistics used for quantization. + :param fast_bias_correction: Setting this option to `False` enables a different + bias correction method which is more accurate, in general, and takes + more time but requires less memory. + :param model_type: Model type is needed to specify additional patterns + in the model. Supported only `transformer` now. + :param ignored_scope: An ignored scope that defined the list of model control + flow graph nodes to be ignored during quantization. + :param advanced_quantization_parameters: Advanced quantization parameters for + fine-tuning the quantization algorithm. + :return: The quantized model. + """ + init_quantization_params = { + "preset": preset, + "target_device": target_device, + "subset_size": subset_size, + "fast_bias_correction": fast_bias_correction, + "model_type": model_type, + "ignored_scope": ignored_scope, + "advanced_parameters": advanced_quantization_parameters, + } + + quantization_param_grid = get_quantization_param_grid() + + hyperparameter_tuner = HyperparameterTuner( + PostTrainingQuantization, + init_quantization_params, + quantization_param_grid, + calibration_dataset, + validation_fn, + tuner_subset_size, + initial_metric_results, + quantized_metric_results, + ) + + quantized_model = hyperparameter_tuner.apply(model, validation_dataset) + + return quantized_model diff --git a/nncf/scopes.py b/nncf/scopes.py index ed64121b5d5..4b6adcd03cf 100644 --- a/nncf/scopes.py +++ b/nncf/scopes.py @@ -60,11 +60,15 @@ class IgnoredScope: :type patterns: List[str] :param types: List of ignored operation types. :type types: List[str] + :param validate: If set to True, then a RuntimeError will be raised if any ignored scope does not match + in the model graph. + :type types: bool """ names: List[str] = field(default_factory=list) patterns: List[str] = field(default_factory=list) types: List[str] = field(default_factory=list) + validate: bool = True def convert_ignored_scope_to_list(ignored_scope: Optional[IgnoredScope]) -> List[str]: @@ -96,7 +100,7 @@ def get_ignored_node_names_from_ignored_scope( If strict is False, returns all possible matches. :param ignored_scope: Given ignored scope instance. - :param nncf_grpah: Given NNCFGrpah. + :param nncf_graph: Given NNCFGraph. :param strict: Whether all ignored_scopes must match at least one node or not. :returns: NNCF node names from given NNCFGraph specified in given ignored scope. """ @@ -129,7 +133,7 @@ def get_ignored_node_names_from_ignored_scope( not_matched_patterns.append(str_pattern) matched_by_patterns.extend(matches) if strict and not_matched_patterns: - raise RuntimeError(f"No mathes for ignored patterns {not_matched_patterns} in the NNCFGraph. " + error_msg) + raise RuntimeError(f"No matches for ignored patterns {not_matched_patterns} in the NNCFGraph. " + error_msg) nncf_logger.info(f"{len(matched_by_patterns)} ignored nodes was found by patterns in the NNCFGraph") matched_by_types = [] diff --git a/nncf/telemetry/extractors.py b/nncf/telemetry/extractors.py index 279ac516229..999e0377afa 100644 --- a/nncf/telemetry/extractors.py +++ b/nncf/telemetry/extractors.py @@ -48,7 +48,6 @@ def extract(self, argvalue: Any) -> CollectedEvent: Implement this method to prepare the telemetry event data from the tracked function's argument value passed via `argvalue`. """ - pass class VerbatimTelemetryExtractor(TelemetryExtractor): diff --git a/nncf/telemetry/wrapper.py b/nncf/telemetry/wrapper.py index e677f8409b3..341f3c8c0c3 100644 --- a/nncf/telemetry/wrapper.py +++ b/nncf/telemetry/wrapper.py @@ -13,7 +13,7 @@ import sys from abc import ABC from abc import abstractmethod -from typing import Callable +from typing import Callable, Optional from unittest.mock import MagicMock from nncf import __version__ @@ -40,7 +40,13 @@ def start_session(self, category: str, **kwargs): @abstractmethod def send_event( - self, event_category: str, event_action: str, event_label: str, event_value: int = 1, force_send=False, **kwargs + self, + event_category: str, + event_action: str, + event_label: str, + event_value: Optional[int] = None, + force_send=False, + **kwargs, ): """ Send single event. @@ -74,20 +80,20 @@ def skip_if_raised(func: Callable[..., None]) -> Callable[..., None]: @functools.wraps(func) def wrapped(*args, **kwargs): try: - func() + func(*args, **kwargs) # pylint:disable=broad-except except Exception as e: - nncf_logger.debug(f"Skipped calling {func.__name__} - internally triggered exception {e}") + nncf_logger.debug(f"Skipped calling {func} - internally triggered exception {e}") return wrapped class NNCFTelemetry(ITelemetry): - MEASUREMENT_ID = "UA-17808594-29" + MEASUREMENT_ID = "G-W5E9RNLD4H" def __init__(self): try: - self._impl = Telemetry(app_name="nncf", app_version=__version__, tid=self.MEASUREMENT_ID) + self._impl = Telemetry(app_name="nncf", app_version=__version__, tid=self.MEASUREMENT_ID, backend="ga4") # pylint:disable=broad-except except Exception as e: nncf_logger.debug(f"Failed to instantiate telemetry object: exception {e}") @@ -98,8 +104,16 @@ def start_session(self, category: str, **kwargs): @skip_if_raised def send_event( - self, event_category: str, event_action: str, event_label: str, event_value: int = 1, force_send=False, **kwargs + self, + event_category: str, + event_action: str, + event_label: str, + event_value: Optional[int] = None, + force_send=False, + **kwargs, ): + if event_value is None: + event_value = 1 self._impl.send_event(event_category, event_action, event_label, event_value, force_send, **kwargs) @skip_if_raised diff --git a/nncf/tensorflow/__init__.py b/nncf/tensorflow/__init__.py index 75857c5562f..2ccd2007420 100644 --- a/nncf/tensorflow/__init__.py +++ b/nncf/tensorflow/__init__.py @@ -12,7 +12,7 @@ Base subpackage for NNCF TensorFlow functionality. """ import tensorflow -from pkg_resources import parse_version +from packaging import version from nncf import nncf_logger from nncf.common.logging.logger import warn_bkc_version_mismatch @@ -22,17 +22,17 @@ try: _tf_version = tensorflow.__version__ - tensorflow_version = parse_version(_tf_version).base_version + tensorflow_version = version.parse(_tf_version).base_version except: nncf_logger.debug("Could not parse tensorflow version") _tf_version = "0.0.0" - tensorflow_version = parse_version(_tf_version).base_version + tensorflow_version = version.parse(_tf_version).base_version tensorflow_version_major, tensorflow_version_minor = tuple(map(int, tensorflow_version.split(".")))[:2] if not tensorflow_version.startswith(BKC_TF_VERSION[:-2]): warn_bkc_version_mismatch("tensorflow", BKC_TF_VERSION, _tf_version) -elif not (tensorflow_version_major == 2 and 4 <= tensorflow_version_minor <= 11): +elif not (tensorflow_version_major == 2 and 4 <= tensorflow_version_minor <= 13): raise RuntimeError( - f"NNCF only supports 2.4.0 <= tensorflow <= 2.11.*, " f"while current tensorflow version is {_tf_version}" + f"NNCF only supports 2.4.0 <= tensorflow <= 2.13.*, while current tensorflow version is {_tf_version}" ) diff --git a/nncf/tensorflow/accuracy_aware_training/keras_model_utils.py b/nncf/tensorflow/accuracy_aware_training/keras_model_utils.py index aee34e40c0a..feb37eb200c 100644 --- a/nncf/tensorflow/accuracy_aware_training/keras_model_utils.py +++ b/nncf/tensorflow/accuracy_aware_training/keras_model_utils.py @@ -25,11 +25,11 @@ def accuracy_aware_fit( nncf_config, callbacks, initial_epoch, + uncompressed_model_accuracy, steps_per_epoch=None, batch_size=None, tensorboard_writer=None, log_dir=None, - uncompressed_model_accuracy=None, validation_data=None, validation_steps=None, result_dict_to_val_metric_fn=None, diff --git a/nncf/tensorflow/accuracy_aware_training/runner.py b/nncf/tensorflow/accuracy_aware_training/runner.py index 97a2ac636fc..ac3444e058b 100644 --- a/nncf/tensorflow/accuracy_aware_training/runner.py +++ b/nncf/tensorflow/accuracy_aware_training/runner.py @@ -52,7 +52,7 @@ def reset_training(self): scheduler.values = [lr * self.base_lr_reduction_factor_during_search for lr in scheduler.values] else: nncf_logger.warning( - f"Learning rate scheduler {scheduler} is not supported yet. " f"Won't change the learning rate." + f"Learning rate scheduler {scheduler} is not supported yet. Won't change the learning rate." ) self.training_epoch_count = 0 diff --git a/nncf/tensorflow/exporter.py b/nncf/tensorflow/exporter.py index f281af9f78e..6ec1a653c18 100644 --- a/nncf/tensorflow/exporter.py +++ b/nncf/tensorflow/exporter.py @@ -56,7 +56,7 @@ def export_model(self, save_path: str, save_format: str = TFExportFormat.FROZEN_ if export_fn is None: available_formats = list(format_to_export_fn.keys()) - raise ValueError(f"Unsupported saving format: '{save_format}'. " f"Available formats: {available_formats}") + raise ValueError(f"Unsupported saving format: '{save_format}'. Available formats: {available_formats}") export_fn(save_path) diff --git a/nncf/tensorflow/graph/converter.py b/nncf/tensorflow/graph/converter.py index a53be88e632..9c62dcd342a 100644 --- a/nncf/tensorflow/graph/converter.py +++ b/nncf/tensorflow/graph/converter.py @@ -22,7 +22,9 @@ from nncf.common.graph import NNCFNodeName from nncf.common.graph import OperatorMetatype from nncf.common.graph.definitions import NNCFGraphNodeType +from nncf.common.graph.graph import NNCFNode from nncf.common.graph.layer_attributes import BaseLayerAttributes +from nncf.common.graph.layer_attributes import ConvertDtypeLayerAttributes from nncf.common.graph.layer_attributes import ConvolutionLayerAttributes from nncf.common.graph.layer_attributes import Dtype from nncf.common.graph.layer_attributes import LinearLayerAttributes @@ -35,6 +37,7 @@ from nncf.common.graph.utils import get_split_axis from nncf.common.logging import nncf_logger from nncf.tensorflow.graph.metatypes import keras_layers as layer_metatypes +from nncf.tensorflow.graph.metatypes.common import CAST_METATYPES from nncf.tensorflow.graph.metatypes.common import DECONV_LAYER_METATYPES from nncf.tensorflow.graph.metatypes.common import DEPTHWISE_CONV_LAYER_METATYPES from nncf.tensorflow.graph.metatypes.common import DIMENSION_PERMUTATION_METATYPES @@ -680,7 +683,7 @@ def convert(self) -> NNCFGraph: layer_attributes = _get_layer_attributes(layer_metatype, model_layer) if layer_attributes is not None: - attrs.update({NNCFGraph.LAYER_ATTRIBUTES: layer_attributes}) + attrs.update({NNCFNode.LAYER_ATTRIBUTES: layer_attributes}) node_name = layer_name nncf_node = nncf_graph.add_nncf_node( @@ -754,6 +757,8 @@ def _get_layer_attributes( layer_attributes = _get_permutation_layer_attributes(model_layer, layer_metatype) elif layer_metatype in LAYER_METATYPES_AGNOSTIC_TO_DATA_PRECISION_WITH_MULTIPLE_OUTPUTS: layer_attributes = _get_multiple_output_layer_attributes(model_layer) + elif layer_metatype in CAST_METATYPES: + layer_attributes = _get_cast_layer_attributes(model_layer) return layer_attributes @@ -775,8 +780,10 @@ def _get_conv_layer_attributes(layer: tf.keras.layers.Layer, is_depthwise: bool layer_ = unwrap_layer(layer) layer_metatype = get_keras_layer_metatype(layer_, determine_subtype=False) strides = layer_.strides[0] + dilations = layer_.dilation_rate in_channels = layer.get_input_shape_at(0)[channel_axis] out_channels = layer.get_output_shape_at(0)[channel_axis] + with_bias = layer_.use_bias # TF does not deign to properly set the groups attribute on a depthwise layer, and for compatibility # with common code the groups attribute of the returned ConvolutionLayerAttribute must be set equal @@ -787,14 +794,16 @@ def _get_conv_layer_attributes(layer: tf.keras.layers.Layer, is_depthwise: bool transpose = layer_metatype in DECONV_LAYER_METATYPES return ConvolutionLayerAttributes( - layer.trainable, - in_channels, - out_channels, - kernel_size, - strides, - groups, + weight_requires_grad=layer.trainable, + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=strides, + dilations=dilations, + groups=groups, transpose=transpose, padding_values=([0, 0, 0, 0]), + with_bias=with_bias, ) @@ -802,8 +811,10 @@ def _get_linear_layer_attributes(layer: tf.keras.layers.Layer) -> LinearLayerAtt channel_axis = get_input_channel_axis(layer) in_features = layer.get_input_shape_at(0)[channel_axis] out_features = layer.get_output_shape_at(0)[channel_axis] - bias = layer.use_bias - return LinearLayerAttributes(layer.trainable, in_features, out_features, bias) + with_bias = layer.use_bias + return LinearLayerAttributes( + weight_requires_grad=layer.trainable, in_features=in_features, out_features=out_features, with_bias=with_bias + ) def _get_reshape_layer_attributes(layer: tf.keras.layers.Layer) -> ReshapeLayerAttributes: @@ -842,3 +853,9 @@ def _get_multiple_output_layer_attributes(layer: tf.keras.layers.Layer) -> Multi input_shape = [input_shape] axis = get_split_axis(input_shape, output_shape) return MultipleOutputLayerAttributes(chunks, axis) + + +def _get_cast_layer_attributes(layer: tf.keras.layers.Layer) -> ConvertDtypeLayerAttributes: + src_dtype = layer.input.dtype + dst_dtype = layer.output.dtype + return ConvertDtypeLayerAttributes(src_dtype, dst_dtype) diff --git a/nncf/tensorflow/graph/metatypes/common.py b/nncf/tensorflow/graph/metatypes/common.py index 72d8d6f0aad..af35b07bb29 100644 --- a/nncf/tensorflow/graph/metatypes/common.py +++ b/nncf/tensorflow/graph/metatypes/common.py @@ -104,20 +104,31 @@ layer_metatypes.TFZeroPadding1DLayerMetatype, layer_metatypes.TFZeroPadding2DLayerMetatype, layer_metatypes.TFZeroPadding3DLayerMetatype, + layer_metatypes.TFDropoutLayerMetatype, + layer_metatypes.TFSlicingOpLambdaMetatype, + layer_metatypes.TFPermuteLayerMetatype, # TF_OP_METATYPES_AGNOSTIC_TO_DATA_PRECISION_WITH_ONE_INPUT op_metatypes.TFIdentityOpMetatype, op_metatypes.TFPackOpMetatype, op_metatypes.TFPadOpMetatype, op_metatypes.TFStridedSliceOpMetatype, + op_metatypes.TFSliceOpMetatype, op_metatypes.TFReshapeOpMetatype, op_metatypes.TFShapeOpMetatype, op_metatypes.TFMaxOpMetatype, op_metatypes.TFMaxPoolOpMetatype, op_metatypes.TFExpandDimsOpMetatype, op_metatypes.TFSqueezeOpMetatype, + op_metatypes.TFGatherOpMetatype, op_metatypes.TFMaxPool3DOpMetatype, op_metatypes.TFTileOpMetatype, op_metatypes.TFSplitOpMetatype, + op_metatypes.TFTransposeOpMetatype, + # TFReluOpMetatype and TFReLULayerMetatype aren't considered to be QUANTIZATION_AGNOSTIC, because: + # 1. Runtime doesn't provide performance benefits by quantizing the stand-alone RELU's (ticket: 59548) + # 2. It's frequently better for the end accuracy to have quantizers set up after the RELU + # so that the input distribution to the quantizer is non-negative + # and we can therefore have better quantization resolution while preserving the original dynamic range ] LAYER_METATYPES_AGNOSTIC_TO_DATA_PRECISION_WITH_MULTIPLE_CONCAT_INPUTS = [ @@ -130,13 +141,10 @@ op_metatypes.TFMinimumOpMetatype, ] -LAYER_METATYPES_AGNOSTIC_TO_DATA_PRECISION_WITH_MULTIPLE_OUTPUTS = [op_metatypes.TFSplitOpMetatype] - -LAYER_METATYPES_AGNOSTIC_TO_DATA_PRECISION = ( - LAYER_METATYPES_AGNOSTIC_TO_DATA_PRECISION_WITH_ONE_INPUT - + LAYER_METATYPES_AGNOSTIC_TO_DATA_PRECISION_WITH_MULTIPLE_CONCAT_INPUTS - + LAYER_METATYPES_AGNOSTIC_TO_DATA_PRECISION_WITH_MULTIPLE_INPUTS -) +LAYER_METATYPES_AGNOSTIC_TO_DATA_PRECISION_WITH_MULTIPLE_OUTPUTS = [ + op_metatypes.TFSplitOpMetatype, + op_metatypes.TFUnPackOpMetatype, +] ELEMENTWISE_LAYER_METATYPES = [ layer_metatypes.TFAddLayerMetatype, @@ -165,6 +173,10 @@ layer_metatypes.TFPermuteLayerMetatype, ] +CAST_METATYPES = [ + op_metatypes.TFCastOpMetatype, +] + def get_operator_metatypes() -> List[Type[OperatorMetatype]]: keras_metatypes_list = list(layer_metatypes.KERAS_LAYER_METATYPES.registry_dict.values()) diff --git a/nncf/tensorflow/graph/model_transformer.py b/nncf/tensorflow/graph/model_transformer.py index 2a3fedec52c..e0ef7647d2d 100644 --- a/nncf/tensorflow/graph/model_transformer.py +++ b/nncf/tensorflow/graph/model_transformer.py @@ -346,6 +346,8 @@ def _insert_layers_before(self, layer_name: str, instance_idx: int, input_port_i # Downstream layer config update if downstream_layer_cfg["class_name"] in ["TFOpLambda", "SlicingOpLambda"]: downstream_layer_inbound_nodes[instance_idx][input_port_id][0] = config["name"] + downstream_layer_inbound_nodes[instance_idx][input_port_id][1] = 0 + downstream_layer_inbound_nodes[instance_idx][input_port_id][2] = 0 else: self._model_config["layers"][idx]["inbound_nodes"][instance_idx][input_port_id] = [ config["name"], diff --git a/nncf/tensorflow/graph/pattern_operations.py b/nncf/tensorflow/graph/pattern_operations.py index a1d85418e55..708bdc7275b 100644 --- a/nncf/tensorflow/graph/pattern_operations.py +++ b/nncf/tensorflow/graph/pattern_operations.py @@ -37,7 +37,7 @@ for layer_name in m.get_all_aliases() ) ), - "label": "ELEMENTWISE", + "label": "QUANTIZATION_AGNOSTIC", } BATCH_NORMALIZATION_OPERATIONS = { diff --git a/nncf/tensorflow/graph/transformations/commands.py b/nncf/tensorflow/graph/transformations/commands.py index facd19a6255..2f6c695f400 100644 --- a/nncf/tensorflow/graph/transformations/commands.py +++ b/nncf/tensorflow/graph/transformations/commands.py @@ -91,6 +91,9 @@ def __init__(self, target_points: List[TargetPoint]): def target_points(self) -> List[TargetPoint]: return self._target_points + def __str__(self) -> str: + return f"TFMultiLayerPoint: {[str(t) for t in self._target_points]}" + class TFLayerStateNames: LAYER_NAME = "layer_name" @@ -170,7 +173,7 @@ def __eq__(self, other: Any) -> bool: ) def __str__(self) -> str: - return " ".join([super().__str__(), self.instance_idx, str(self.input_port_id)]) + return " ".join([super().__str__(), str(self.instance_idx), str(self.input_port_id)]) def __hash__(self) -> int: return hash(str(self)) @@ -238,7 +241,7 @@ def __eq__(self, other: Any) -> bool: ) def __str__(self) -> str: - return " ".join([super().__str__(), self.instance_idx, str(self.output_port_id)]) + return " ".join([super().__str__(), str(self.instance_idx), str(self.output_port_id)]) def __hash__(self) -> int: return hash(str(self)) diff --git a/nncf/tensorflow/pruning/base_algorithm.py b/nncf/tensorflow/pruning/base_algorithm.py index 046a6f60f1f..12f0cdfc0e4 100644 --- a/nncf/tensorflow/pruning/base_algorithm.py +++ b/nncf/tensorflow/pruning/base_algorithm.py @@ -115,7 +115,7 @@ def get_transformation_layout(self, model: tf.keras.Model) -> TFTransformationLa converter = TFModelConverterFactory.create(model) self._graph = converter.convert() - check_scopes_in_graph(self._graph, self.ignored_scopes, self.target_scopes) + check_scopes_in_graph(self._graph, self.ignored_scopes, self.target_scopes, self.validate_scopes) groups_of_nodes_to_prune = self._pruning_node_selector.create_pruning_groups(self._graph) @@ -136,7 +136,7 @@ def get_transformation_layout(self, model: tf.keras.Model) -> TFTransformationLa # Add output_mask to elements to run mask_propagation # and detect spec_nodes that will be pruned. # It should be done for all elements of shared layer. - node.data["output_mask"] = TFNNCFTensor(tf.ones(get_output_channels(node))) + node.attributes["output_mask"] = TFNNCFTensor(tf.ones(get_output_channels(node))) if layer_name in shared_layers: continue if node.is_shared(): @@ -175,7 +175,7 @@ def get_transformation_layout(self, model: tf.keras.Model) -> TFTransformationLa for spec_node in spec_nodes: layer_name = get_layer_identifier(spec_node) layer = model.get_layer(layer_name) - if spec_node.data["output_mask"] is None: + if spec_node.attributes["output_mask"] is None: # Skip elements that will not be pruned continue if layer_name in shared_layers: diff --git a/nncf/tensorflow/pruning/filter_pruning/algorithm.py b/nncf/tensorflow/pruning/filter_pruning/algorithm.py index c4b7102bb56..2e0d33cefed 100644 --- a/nncf/tensorflow/pruning/filter_pruning/algorithm.py +++ b/nncf/tensorflow/pruning/filter_pruning/algorithm.py @@ -280,7 +280,7 @@ def _set_binary_masks_for_pruned_layers_groupwise(self, pruning_level: float): # 0. Removing masks at the elements of the NNCFGraph for node in self._original_graph.topological_sort(): - node.data.pop("output_mask", None) + node.attributes.pop("output_mask", None) # 1. Calculate masks for group in self._pruned_layer_groups_info.get_all_clusters(): @@ -298,7 +298,7 @@ def _set_binary_masks_for_pruned_layers_groupwise(self, pruning_level: float): filter_mask = calculate_binary_mask(cumulative_filters_importance, threshold) for node in group.elements: nncf_node = self._original_graph.get_node_by_id(node.nncf_node_id) - nncf_node.data["output_mask"] = TFNNCFTensor(filter_mask) + nncf_node.attributes["output_mask"] = TFNNCFTensor(filter_mask) # 2. Propagating masks across the graph mask_propagator = MaskPropagationAlgorithm( @@ -310,8 +310,8 @@ def _set_binary_masks_for_pruned_layers_groupwise(self, pruning_level: float): nncf_sorted_nodes = self._original_graph.topological_sort() for layer in wrapped_layers: nncf_node = [n for n in nncf_sorted_nodes if layer.name == n.layer_name][0] - if nncf_node.data["output_mask"] is not None: - self._set_operation_masks([layer], nncf_node.data["output_mask"].tensor) + if nncf_node.attributes["output_mask"] is not None: + self._set_operation_masks([layer], nncf_node.attributes["output_mask"].tensor) # Calculate actual flops and weights number with new masks self._update_benchmark_statistics() @@ -329,7 +329,7 @@ def _set_binary_masks_for_pruned_layers_globally(self, pruning_level: float): # 0. Remove masks at the elements of the NNCFGraph for node in self._original_graph.topological_sort(): - node.data.pop("output_mask", None) + node.attributes.pop("output_mask", None) # 1. Calculate masks # a. Calculate importances for all groups of filters @@ -346,7 +346,7 @@ def _set_binary_masks_for_pruned_layers_globally(self, pruning_level: float): filter_mask = calculate_binary_mask(filter_importances[group.id], threshold) for node in group.elements: nncf_node = self._original_graph.get_node_by_id(node.nncf_node_id) - nncf_node.data["output_mask"] = TFNNCFTensor(filter_mask) + nncf_node.attributes["output_mask"] = TFNNCFTensor(filter_mask) # 2. Propagate masks across the graph mask_propagator = MaskPropagationAlgorithm( @@ -358,8 +358,8 @@ def _set_binary_masks_for_pruned_layers_globally(self, pruning_level: float): nncf_sorted_nodes = self._original_graph.topological_sort() for layer in wrapped_layers: nncf_node = [n for n in nncf_sorted_nodes if layer.name == n.layer_name][0] - if nncf_node.data["output_mask"] is not None: - self._set_operation_masks([layer], nncf_node.data["output_mask"].tensor) + if nncf_node.attributes["output_mask"] is not None: + self._set_operation_masks([layer], nncf_node.attributes["output_mask"].tensor) # Calculate actual flops with new masks self._update_benchmark_statistics() @@ -377,7 +377,7 @@ def _set_binary_masks_for_pruned_modules_globally_by_flops_target(self, target_f nncf_sorted_nodes = self._original_graph.topological_sort() for layer in wrapped_layers: nncf_node = [n for n in nncf_sorted_nodes if layer.name == n.layer_name][0] - nncf_node.data["output_mask"] = TFNNCFTensor(tf.ones(get_filters_num(layer))) + nncf_node.attributes["output_mask"] = TFNNCFTensor(tf.ones(get_filters_num(layer))) # 1. Calculate importances for all groups of filters. Initialize masks. filter_importances = [] @@ -421,7 +421,7 @@ def _set_binary_masks_for_pruned_modules_globally_by_flops_target(self, target_f for group in self._pruned_layer_groups_info.get_all_clusters(): for node in group.elements: nncf_node = self._original_graph.get_node_by_id(node.nncf_node_id) - nncf_node.data["output_mask"] = TFNNCFTensor(masks[group.id]) + nncf_node.attributes["output_mask"] = TFNNCFTensor(masks[group.id]) mask_propagator = MaskPropagationAlgorithm( self._original_graph, TF_PRUNING_OPERATOR_METATYPES, TFNNCFPruningTensorProcessor @@ -434,10 +434,10 @@ def _set_binary_masks_for_pruned_modules_globally_by_flops_target(self, target_f nncf_sorted_nodes = self._original_graph.topological_sort() for layer in wrapped_layers: nncf_node = [n for n in nncf_sorted_nodes if layer.name == n.layer_name][0] - if nncf_node.data["output_mask"] is not None: - self._set_operation_masks([layer], nncf_node.data["output_mask"].tensor) + if nncf_node.attributes["output_mask"] is not None: + self._set_operation_masks([layer], nncf_node.attributes["output_mask"].tensor) return - raise RuntimeError(f"Unable to prune model to required flops pruning level:" f" {target_flops_pruning_level}") + raise RuntimeError(f"Unable to prune model to required flops pruning level: {target_flops_pruning_level}") def _set_operation_masks(self, layers: List[NNCFWrapper], filter_mask): for layer in layers: @@ -466,7 +466,7 @@ def _find_uniform_pruning_level_for_target_flops(self, target_flops_pruning_leve self.current_params_num = params_num return right raise RuntimeError( - f"Unable to prune the model to get the required " f"pruning level in flops = {target_flops_pruning_level}" + f"Unable to prune the model to get the required pruning level in flops = {target_flops_pruning_level}" ) def _calculate_flops_and_weights_in_uniformly_pruned_model(self, pruning_level): diff --git a/nncf/tensorflow/pruning/utils.py b/nncf/tensorflow/pruning/utils.py index 74281108390..2248d834022 100644 --- a/nncf/tensorflow/pruning/utils.py +++ b/nncf/tensorflow/pruning/utils.py @@ -15,7 +15,6 @@ import tensorflow as tf from nncf.common.graph import NNCFGraph -from nncf.common.graph import NNCFNode from nncf.common.graph import NNCFNodeName from nncf.common.logging import nncf_logger from nncf.tensorflow.graph.metatypes.common import GENERAL_CONV_LAYER_METATYPES @@ -29,10 +28,6 @@ from nncf.tensorflow.layers.wrapper import NNCFWrapper -def is_shared(node: NNCFNode) -> bool: - return node.data["is_shared"] - - def get_filter_axis(layer: NNCFWrapper, weight_attr: str) -> int: channel_axes = get_weight_channel_axis(layer, weight_attr) filter_axis = channel_axes[0] if isinstance(channel_axes, tuple) else channel_axes @@ -42,7 +37,7 @@ def get_filter_axis(layer: NNCFWrapper, weight_attr: str) -> int: def get_filters_num(layer: NNCFWrapper): layer_metatype = get_keras_layer_metatype(layer) if len(layer_metatype.weight_definitions) != 1: - raise ValueError(f"Could not calculate the number of filters " f"for the layer {layer.layer.name}.") + raise ValueError(f"Could not calculate the number of filters for the layer {layer.layer.name}.") weight_def = layer_metatype.weight_definitions[0] weight_attr = weight_def.weight_attr_name diff --git a/nncf/tensorflow/quantization/algorithm.py b/nncf/tensorflow/quantization/algorithm.py index 96a2e61deaa..c1a11f129e2 100644 --- a/nncf/tensorflow/quantization/algorithm.py +++ b/nncf/tensorflow/quantization/algorithm.py @@ -23,6 +23,8 @@ from nncf.common.graph import NNCFGraph from nncf.common.graph import NNCFNode from nncf.common.graph import NNCFNodeName +from nncf.common.graph.layer_attributes import ConvertDtypeLayerAttributes +from nncf.common.graph.operator_metatypes import OperatorMetatype from nncf.common.graph.transformations.commands import TargetPoint from nncf.common.graph.transformations.commands import TransformationPriority from nncf.common.graph.utils import get_first_nodes_of_type @@ -32,6 +34,7 @@ from nncf.common.logging import nncf_logger from nncf.common.quantization.config_assignment import assign_qconfig_lists_to_modules from nncf.common.quantization.quantizer_propagation.solver import QuantizerPropagationSolver +from nncf.common.quantization.quantizer_propagation.structs import IgnoreReason from nncf.common.quantization.quantizer_setup import ActivationQuantizationInsertionPoint from nncf.common.quantization.quantizer_setup import QuantizationPointId from nncf.common.quantization.quantizer_setup import SingleConfigQuantizerSetup @@ -56,11 +59,15 @@ from nncf.tensorflow.api.compression import TFCompressionAlgorithmBuilder from nncf.tensorflow.graph.converter import TFModelConverter from nncf.tensorflow.graph.converter import TFModelConverterFactory +from nncf.tensorflow.graph.metatypes.common import CAST_METATYPES from nncf.tensorflow.graph.metatypes.common import ELEMENTWISE_LAYER_METATYPES from nncf.tensorflow.graph.metatypes.common import GENERAL_CONV_LAYER_METATYPES from nncf.tensorflow.graph.metatypes.common import LINEAR_LAYER_METATYPES +from nncf.tensorflow.graph.metatypes.keras_layers import TFConcatenateLayerMetatype from nncf.tensorflow.graph.metatypes.keras_layers import TFLambdaLayerMetatype from nncf.tensorflow.graph.metatypes.keras_layers import TFLayerWithWeightsMetatype +from nncf.tensorflow.graph.metatypes.tf_ops import TFConcatOpMetatype +from nncf.tensorflow.graph.metatypes.tf_ops import TFIdentityOpMetatype from nncf.tensorflow.graph.metatypes.tf_ops import TFOpWithWeightsMetatype from nncf.tensorflow.graph.transformations.commands import TFAfterLayer from nncf.tensorflow.graph.transformations.commands import TFBeforeLayer @@ -249,6 +256,8 @@ def __init__(self, config: NNCFConfig, should_init: bool = True): algo_config = self._get_algo_specific_config_section() if self._target_device == "VPU" and "preset" in algo_config: raise RuntimeError("The VPU target device does not support presets.") + if self._target_device == "CPU_SPR": + raise RuntimeError("The CPU_SPR target device does not supported.") self.global_quantizer_constraints = {} self.ignored_scopes_per_group = {} @@ -435,8 +444,9 @@ def _run_batchnorm_adaptation(self, model: tf.keras.Model) -> None: def _get_quantizer_setup(self, model: tf.keras.Model) -> TFQuantizationSetup: converter = TFModelConverterFactory.create(model) nncf_graph = converter.convert() + nncf_graph = QuantizationBuilder._preprocess_cast_nodes(nncf_graph, CAST_METATYPES) - check_scopes_in_graph(nncf_graph, self.ignored_scopes, self.target_scopes) + check_scopes_in_graph(nncf_graph, self.ignored_scopes, self.target_scopes, self.validate_scopes) self._raise_not_supported_warning(nncf_graph) @@ -605,11 +615,15 @@ def _get_quantizer_propagation_solution( f"Following custom layers will be ignored during quantization (custom layer quantization not supported " f"by NNCF yet):\n[{custom_layer_node_names_str}]" ) - ignored_scopes_for_solver = ( - self.ignored_scopes_per_group[QuantizerGroup.ACTIVATIONS] - + input_preprocessing_node_names - + custom_layer_node_names - ) + ignored_scopes_for_solver = { + **{name: IgnoreReason.USER_REQUESTED for name in self.ignored_scopes_per_group[QuantizerGroup.ACTIVATIONS]}, + **{name: IgnoreReason.AUTOGENERATED for name in input_preprocessing_node_names}, + **{name: IgnoreReason.AUTOGENERATED for name in custom_layer_node_names}, + } + scales_unification_map = { + TFConcatenateLayerMetatype: GENERAL_CONV_LAYER_METATYPES + LINEAR_LAYER_METATYPES, + TFConcatOpMetatype: GENERAL_CONV_LAYER_METATYPES + LINEAR_LAYER_METATYPES, + } solver = QuantizerPropagationSolver( activation_ignored_scopes=ignored_scopes_for_solver, weight_ignored_scopes=self.ignored_scopes_per_group[QuantizerGroup.WEIGHTS], @@ -623,6 +637,7 @@ def _get_quantizer_propagation_solution( quantizable_layer_nodes=quantizable_weighted_layer_nodes, global_constraints=self.global_quantizer_constraints, quantize_outputs=self.quantize_outputs, + scales_unification_map=scales_unification_map, ) quantization_proposal = solver.run_on_ip_graph(ip_graph) @@ -678,6 +693,16 @@ def traverse_fn(node: NNCFNode, preprocessing_nodes: List[NNCFNode]) -> Tuple[bo return retval + @staticmethod + def _preprocess_cast_nodes(nncf_graph: NNCFGraph, cast_metatypes: List[OperatorMetatype]) -> NNCFGraph: + cast_nodes = nncf_graph.get_nodes_by_metatypes(cast_metatypes) + for node in cast_nodes: + if not isinstance(node.layer_attributes, ConvertDtypeLayerAttributes): + continue + if node.layer_attributes.src_dtype == node.layer_attributes.dst_dtype: + node.attributes[NNCFNode.METATYPE_ATTR] = TFIdentityOpMetatype + return nncf_graph + def _get_fake_quantize_name(self, node_name: NNCFNodeName, input_port_id: int = None) -> str: original_node_name, instance_idx = get_original_name_and_instance_idx(node_name) fq_name = "{}/fake_quantize".format(original_node_name) diff --git a/nncf/tensorflow/quantization/default_quantization.py b/nncf/tensorflow/quantization/default_quantization.py index edb81ba5867..d184c06cb71 100644 --- a/nncf/tensorflow/quantization/default_quantization.py +++ b/nncf/tensorflow/quantization/default_quantization.py @@ -8,13 +8,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from nncf.common.graph.operator_metatypes import UnknownMetatype from nncf.common.quantization.quantizer_propagation.structs import QuantizationTrait from nncf.tensorflow.graph.metatypes import common from nncf.tensorflow.graph.metatypes import keras_layers as layer_metatypes from nncf.tensorflow.graph.metatypes import tf_ops as op_metatypes +from nncf.tensorflow.graph.metatypes.common import LAYER_METATYPES_AGNOSTIC_TO_DATA_PRECISION_WITH_MULTIPLE_INPUTS +from nncf.tensorflow.graph.metatypes.common import LAYER_METATYPES_AGNOSTIC_TO_DATA_PRECISION_WITH_MULTIPLE_OUTPUTS +from nncf.tensorflow.graph.metatypes.common import LAYER_METATYPES_AGNOSTIC_TO_DATA_PRECISION_WITH_ONE_INPUT -# If there are no some metatypes it means that they are considered as QuantizationTrait.QuantizationAgnostic +# If a metatype is not in this list, then it is considered to be QuantizationTrait.NON_QUANTIZABLE. DEFAULT_TF_QUANT_TRAIT_TO_OP_DICT = { QuantizationTrait.INPUTS_QUANTIZABLE: [ @@ -50,18 +52,9 @@ op_metatypes.TFRelu6OpMetatype, op_metatypes.TFBatchMatMulV2OpMetatype, ], - QuantizationTrait.NON_QUANTIZABLE: [ - layer_metatypes.TFSoftmaxLayerMetatype, - op_metatypes.TFSigmoidOpMetatype, - op_metatypes.TFSoftmaxOpMetatype, - UnknownMetatype, - # Ticket: 108478 - op_metatypes.TFReluOpMetatype, - op_metatypes.TFAbsOpMetatype, - op_metatypes.TFExpOpMetatype, - op_metatypes.TFLogOpMetatype, - op_metatypes.TFSqrtOpMetatype, - ], + QuantizationTrait.QUANTIZATION_AGNOSTIC: LAYER_METATYPES_AGNOSTIC_TO_DATA_PRECISION_WITH_ONE_INPUT + + LAYER_METATYPES_AGNOSTIC_TO_DATA_PRECISION_WITH_MULTIPLE_INPUTS + + LAYER_METATYPES_AGNOSTIC_TO_DATA_PRECISION_WITH_MULTIPLE_OUTPUTS, QuantizationTrait.CONCAT: [ layer_metatypes.TFConcatenateLayerMetatype, op_metatypes.TFConcatOpMetatype, diff --git a/nncf/tensorflow/quantization/quantize_model.py b/nncf/tensorflow/quantization/quantize_model.py index a6605c9d5e6..f6f15d79d8e 100644 --- a/nncf/tensorflow/quantization/quantize_model.py +++ b/nncf/tensorflow/quantization/quantize_model.py @@ -23,7 +23,7 @@ from nncf.parameters import ModelType from nncf.parameters import TargetDevice from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters -from nncf.quantization.advanced_parameters import convert_advanced_parameters_to_dict +from nncf.quantization.advanced_parameters import apply_advanced_parameters_to_config from nncf.scopes import IgnoredScope from nncf.scopes import convert_ignored_scope_to_list from nncf.tensorflow.helpers.model_creation import create_compressed_model @@ -32,7 +32,7 @@ # TODO(alexsu52): It is a workaround and should be removed. -class CalibrarionDataLoader(NNCFDataLoader): +class CalibrationDataLoader(NNCFDataLoader): """ This class wraps the nncf.Dataset. @@ -122,18 +122,10 @@ def _create_nncf_config( compression_config["ignored_scopes"].extend(_ignored_scope) else: compression_config["ignored_scopes"] = _ignored_scope + compression_config["validate_scopes"] = ignored_scope.validate if advanced_parameters is not None: - advanced_config = convert_advanced_parameters_to_dict(advanced_parameters) - - ranges = advanced_config.get("initializer", {}).get("range") - if ranges is not None: - for rconfig in ranges: - rconfig["num_init_samples"] = subset_size - if "type" not in rconfig: - rconfig["type"] = DEFAULT_RANGE_TYPE - - compression_config.update(advanced_config) + compression_config = apply_advanced_parameters_to_config(compression_config, advanced_parameters) return NNCFConfig({"target_device": target_device.value, "compression": compression_config}) @@ -167,7 +159,7 @@ def quantize_impl( nncf_config = _create_nncf_config(preset, target_device, subset_size, ignored_scope, advanced_parameters) - calibration_data_loader = CalibrarionDataLoader(calibration_dataset) + calibration_data_loader = CalibrationDataLoader(calibration_dataset) nncf_config.register_extra_structs( [ QuantizationRangeInitArgs(data_loader=calibration_data_loader), diff --git a/nncf/tensorflow/sparsity/magnitude/algorithm.py b/nncf/tensorflow/sparsity/magnitude/algorithm.py index 086e600205d..0f6e7f3d270 100644 --- a/nncf/tensorflow/sparsity/magnitude/algorithm.py +++ b/nncf/tensorflow/sparsity/magnitude/algorithm.py @@ -63,7 +63,7 @@ def get_transformation_layout(self, model: tf.keras.Model) -> TFTransformationLa converter = TFModelConverterFactory.create(model) nncf_graph = converter.convert() - check_scopes_in_graph(nncf_graph, self.ignored_scopes, self.target_scopes) + check_scopes_in_graph(nncf_graph, self.ignored_scopes, self.target_scopes, self.validate_scopes) transformations = TFTransformationLayout() diff --git a/nncf/tensorflow/sparsity/rb/algorithm.py b/nncf/tensorflow/sparsity/rb/algorithm.py index 22c926e262b..44ec4074804 100644 --- a/nncf/tensorflow/sparsity/rb/algorithm.py +++ b/nncf/tensorflow/sparsity/rb/algorithm.py @@ -54,7 +54,7 @@ def get_transformation_layout(self, model: tf.keras.Model) -> TFTransformationLa converter = TFModelConverterFactory.create(model) nncf_graph = converter.convert() - check_scopes_in_graph(nncf_graph, self.ignored_scopes, self.target_scopes) + check_scopes_in_graph(nncf_graph, self.ignored_scopes, self.target_scopes, self.validate_scopes) transformations = TFTransformationLayout() diff --git a/nncf/tensorflow/tf_internals.py b/nncf/tensorflow/tf_internals.py index 778db994b04..4c458e13ff7 100644 --- a/nncf/tensorflow/tf_internals.py +++ b/nncf/tensorflow/tf_internals.py @@ -23,7 +23,7 @@ from tensorflow.python.keras.engine.keras_tensor import KerasTensor from tensorflow.python.keras.layers import Rescaling from tensorflow.python.keras.utils.control_flow_util import smart_cond -else: +elif version.parse(tensorflow_version) < version.parse("2.13"): from keras import backend from keras import engine as keras_engine from keras import layers @@ -31,3 +31,11 @@ from keras.engine.keras_tensor import KerasTensor from keras.layers import Rescaling from keras.utils.control_flow_util import smart_cond +else: + from keras import backend + from keras import layers + from keras.layers import Rescaling + from keras.src import engine as keras_engine # pylint: disable=no-name-in-module,import-error + from keras.src.applications import imagenet_utils # pylint: disable=no-name-in-module,import-error + from keras.src.engine.keras_tensor import KerasTensor # pylint: disable=no-name-in-module,import-error + from keras.src.utils.control_flow_util import smart_cond # pylint: disable=no-name-in-module,import-error diff --git a/nncf/torch/__init__.py b/nncf/torch/__init__.py index 7016b43aa75..c1275f4ea7f 100644 --- a/nncf/torch/__init__.py +++ b/nncf/torch/__init__.py @@ -13,23 +13,24 @@ """ Base subpackage for NNCF PyTorch functionality. """ + from nncf import nncf_logger from nncf.common.logging.logger import warn_bkc_version_mismatch from nncf.version import BKC_TORCH_VERSION import torch -from pkg_resources import parse_version +from packaging import version try: _torch_version = torch.__version__ - torch_version = parse_version(_torch_version).base_version + torch_version = version.parse(_torch_version).base_version except: nncf_logger.debug("Could not parse torch version") _torch_version = "0.0.0" - torch_version = parse_version(_torch_version).base_version + torch_version = version.parse(_torch_version).base_version -if parse_version(BKC_TORCH_VERSION).base_version != torch_version: +if version.parse(BKC_TORCH_VERSION).base_version != torch_version: warn_bkc_version_mismatch("torch", BKC_TORCH_VERSION, torch.__version__) @@ -56,6 +57,8 @@ from nncf.torch.dynamic_graph.context import disable_tracing from nncf.torch.dynamic_graph.context import no_nncf_trace from nncf.torch.dynamic_graph.context import forward_nncf_trace +from nncf.torch.strip import strip +from nncf.torch.dynamic_graph.patch_pytorch import disable_patching # NNCF relies on tracing PyTorch operations. Each code that uses NNCF # should be executed with PyTorch operators wrapped via a call to "patch_torch_operators", diff --git a/nncf/torch/accuracy_aware_training/runner.py b/nncf/torch/accuracy_aware_training/runner.py index 7b4cc967736..00dac4e7495 100644 --- a/nncf/torch/accuracy_aware_training/runner.py +++ b/nncf/torch/accuracy_aware_training/runner.py @@ -10,7 +10,7 @@ # limitations under the License. import os.path as osp -from typing import Callable, Dict +from typing import Dict import torch from torch.optim.lr_scheduler import ReduceLROnPlateau diff --git a/nncf/torch/automl/agent/ddpg/ddpg.py b/nncf/torch/automl/agent/ddpg/ddpg.py index c5e5715237e..bf7c0027e36 100644 --- a/nncf/torch/automl/agent/ddpg/ddpg.py +++ b/nncf/torch/automl/agent/ddpg/ddpg.py @@ -202,7 +202,7 @@ def update_policy(self): ) target_q_batch = ( - to_tensor(reward_batch) + self.discount * to_tensor(terminal_batch.astype(np.float)) * next_q_values + to_tensor(reward_batch) + self.discount * to_tensor(terminal_batch.astype(float)) * next_q_values ) # Critic update diff --git a/nncf/torch/binarization/binarize_functions.py b/nncf/torch/binarization/binarize_functions.py index d41d18fdb69..2225ffee78e 100644 --- a/nncf/torch/binarization/binarize_functions.py +++ b/nncf/torch/binarization/binarize_functions.py @@ -44,12 +44,12 @@ class XNORBinarizeFn(torch.autograd.Function): @staticmethod def symbolic(g, x): - zero = g.constant(0, [1], "float") + zero = g.op("Constant", value_t=torch.tensor([0])) zero = _unsqueeze_helper(g, zero, [1, 2, 3]) scale = g.op("Abs", x) scale = g.op("ReduceMean", scale, axes_i=[1, 2, 3]) scale_neg = g.op("Neg", scale) - return g.op(add_domain("FakeQuantize"), x, zero, zero, scale_neg, scale, levels_i=2) + return g.op(add_domain("FakeQuantize"), x, zero, zero, scale_neg, scale, levels_i=2).setType(x.type()) @staticmethod def forward(ctx, x): @@ -76,12 +76,12 @@ class DOREFABinarizeFn(torch.autograd.Function): @staticmethod def symbolic(g, x): - zero = g.constant(0, [1], "float") + zero = g.op("Constant", value_t=torch.tensor([0])) zero = _unsqueeze_helper(g, zero, [1, 2, 3]) scale = g.op("Abs", x) scale = g.op("ReduceMean", scale, axes_i=[0, 1, 2, 3]) scale_neg = g.op("Neg", scale) - return g.op(add_domain("FakeQuantize"), x, zero, zero, scale_neg, scale, levels_i=2) + return g.op(add_domain("FakeQuantize"), x, zero, zero, scale_neg, scale, levels_i=2).setType(x.type()) @staticmethod def forward(ctx, x): @@ -105,11 +105,11 @@ def backward(ctx: Any, *grad_outputs: Any) -> Any: class ActivationBinarizationScaleThresholdFn(torch.autograd.Function): @staticmethod def symbolic(g, x, scale, threshold): - zero = g.constant(0, [1], "float") + zero = g.op("Constant", value_t=torch.tensor([0])) zero = _unsqueeze_helper(g, zero, [0, 2, 3]) threshold = g.op("Mul", threshold, scale) scale = _unsqueeze_helper(g, scale, [0, 2, 3]) - return g.op(add_domain("FakeQuantize"), x, threshold, threshold, zero, scale, levels_i=2) + return g.op(add_domain("FakeQuantize"), x, threshold, threshold, zero, scale, levels_i=2).setType(x.type()) @staticmethod def forward(ctx, input_, scale, threshold): diff --git a/nncf/torch/binarization/extensions.py b/nncf/torch/binarization/extensions.py index 26a2ee6f869..be71fd99ec3 100644 --- a/nncf/torch/binarization/extensions.py +++ b/nncf/torch/binarization/extensions.py @@ -20,6 +20,7 @@ from nncf.torch.extensions import EXTENSIONS from nncf.torch.extensions import CudaNotAvailableStub from nncf.torch.extensions import ExtensionLoader +from nncf.torch.extensions import ExtensionLoaderTimeoutException from nncf.torch.extensions import ExtensionNamespace from nncf.torch.extensions import ExtensionsType @@ -60,6 +61,8 @@ def load(cls): build_directory=cls.get_build_dir(), verbose=False, ) + except ExtensionLoaderTimeoutException as e: + raise e except Exception as e: # pylint:disable=broad-except nncf_logger.warning( f"Could not compile CPU binarization extensions. " @@ -91,6 +94,8 @@ def load(cls): build_directory=cls.get_build_dir(), verbose=False, ) + except ExtensionLoaderTimeoutException as e: + raise e except (subprocess.CalledProcessError, OSError, RuntimeError) as e: assert torch.cuda.is_available() raise RuntimeError( diff --git a/nncf/torch/binarization/reference.py b/nncf/torch/binarization/reference.py index 5946d74c6de..cbf80d355fa 100644 --- a/nncf/torch/binarization/reference.py +++ b/nncf/torch/binarization/reference.py @@ -32,11 +32,16 @@ def __init__(self, backend_type: ReferenceBackendType): else: raise RuntimeError("Unknown backend for ReferenceQuantize") + def _astype(self, tensor: GeneralizedTensor, dtype) -> GeneralizedTensor: + if self.backend is np: + return tensor.astype(dtype) + return tensor.type(dtype) + class ReferenceXNORBinarize(ReferenceBase): def forward(self, x: GeneralizedTensor) -> GeneralizedTensor: norm = self.backend.abs(x).mean((1, 2, 3), keepdims=True) - sign = (x > 0).astype(x.dtype) * 2 - 1 + sign = self._astype((x > 0), x.dtype) * 2 - 1 output = sign * norm return output @@ -48,7 +53,7 @@ def backward(grad_output: GeneralizedTensor) -> GeneralizedTensor: class ReferenceDOREFABinarize(ReferenceBase): def forward(self, x: GeneralizedTensor) -> GeneralizedTensor: norm = self.backend.abs(x).mean() - sign = (x > 0).astype(x.dtype) * 2 - 1 + sign = self._astype((x > 0), x.dtype) * 2 - 1 return sign * norm @staticmethod @@ -57,19 +62,19 @@ def backward(grad_output: GeneralizedTensor) -> GeneralizedTensor: class ReferenceActivationBinarize(ReferenceBase): - @staticmethod - def forward(x: GeneralizedTensor, scale: GeneralizedTensor, threshold: GeneralizedTensor) -> GeneralizedTensor: + def forward( + self, x: GeneralizedTensor, scale: GeneralizedTensor, threshold: GeneralizedTensor + ) -> GeneralizedTensor: shape = [1 for s in x.shape] shape[1] = x.shape[1] t = threshold * scale - output = (x > t).astype(x.dtype) * scale + output = self._astype((x > t), x.dtype) * scale return output - @staticmethod - def backward(grad_output, x, scale, output): + def backward(self, grad_output, x: GeneralizedTensor, scale: GeneralizedTensor, output: GeneralizedTensor): # calc gradient for input - mask_lower = (x <= scale).astype(x.dtype) - grad_input = grad_output * (x >= 0).astype(x.dtype) * mask_lower + mask_lower = self._astype((x <= scale), x.dtype) + grad_input = grad_output * self._astype((x >= 0), x.dtype) * mask_lower # calc gradient for scale err = (output - x) / scale @@ -77,7 +82,7 @@ def backward(grad_output, x, scale, output): grad_scale = grad_scale.sum() # calc gradient for threshold - grad_threshold = -grad_output * (x > 0).astype(x.dtype) * (x < scale).astype(x.dtype) + grad_threshold = -grad_output * self._astype((x > 0), x.dtype) * self._astype((x < scale), x.dtype) for idx, _ in enumerate(x.shape): if idx != 1: # activation channel dimension diff --git a/nncf/torch/composite_compression.py b/nncf/torch/composite_compression.py index 6f71b4e7b1a..8173d9ed409 100644 --- a/nncf/torch/composite_compression.py +++ b/nncf/torch/composite_compression.py @@ -22,8 +22,8 @@ from nncf.torch.compression_method_api import PTCompressionAlgorithmController from nncf.torch.compression_method_api import PTCompressionLoss from nncf.torch.graph.transformations.layout import PTTransformationLayout +from nncf.torch.model_transformer import PTModelTransformer from nncf.torch.nncf_network import NNCFNetwork -from nncf.torch.nncf_network import PTModelTransformer TModel = TypeVar("TModel") diff --git a/nncf/torch/compression_method_api.py b/nncf/torch/compression_method_api.py index e90841eba5c..efbbe8b2408 100644 --- a/nncf/torch/compression_method_api.py +++ b/nncf/torch/compression_method_api.py @@ -1,15 +1,14 @@ -# -# Copyright (c) 2019-2023 Intel Corporation -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ @package docstring @@ -33,8 +32,8 @@ from nncf.torch.graph.transformations.layout import PTTransformationLayout from nncf.torch.layers import NNCF_MODULES_DICT from nncf.torch.layers import NNCF_WRAPPED_USER_MODULES_DICT +from nncf.torch.model_transformer import PTModelTransformer from nncf.torch.nncf_network import NNCFNetwork -from nncf.torch.nncf_network import PTModelTransformer TModel = TypeVar("TModel") @@ -137,7 +136,9 @@ def get_transformation_layout(self, model: NNCFNetwork) -> PTTransformationLayou :param model: An instance of NNCFNetwork for the algorithm to be applied to. :return: NNCFNetwork with algorithm-specific modifications applied """ - check_scopes_in_graph(model.nncf.get_original_graph(), self.ignored_scopes, self.target_scopes) + check_scopes_in_graph( + model.nncf.get_original_graph(), self.ignored_scopes, self.target_scopes, self.validate_scopes + ) layout = self._get_transformation_layout(model) self._handle_frozen_layers(model) @@ -202,7 +203,7 @@ def _handle_frozen_layers(self, target_model: NNCFNetwork): is_allowed, reason = self._are_frozen_layers_allowed() if is_allowed: nncf_logger.warning( - f"{reason}, compressing them without tuning weights.\n" f"Frozen layers:\n" f"{scopes_to_print}" + f"{reason}, compressing them without tuning weights.\nFrozen layers:\n{scopes_to_print}" ) else: raise RuntimeError( diff --git a/nncf/torch/dynamic_graph/graph.py b/nncf/torch/dynamic_graph/graph.py index 0e32be5681f..80db3559dc9 100644 --- a/nncf/torch/dynamic_graph/graph.py +++ b/nncf/torch/dynamic_graph/graph.py @@ -15,9 +15,9 @@ import networkx.algorithms.isomorphism as iso from torch import Tensor +from nncf import nncf_logger from nncf.common.graph import Dtype from nncf.common.graph.layer_attributes import BaseLayerAttributes -from nncf.common.logging import nncf_logger from nncf.torch.dynamic_graph.operation_address import OperationAddress from nncf.torch.dynamic_graph.scope import Scope from nncf.torch.dynamic_graph.trace_tensor import TensorMeta @@ -227,13 +227,29 @@ def __init__( input_port_id: int, output_port_id: int, dtype: Dtype, + parallel_input_port_ids: List[int], ): + """ + :param from_node_id - A numeric identifier of the starting node of the edge + :param to_node_id - A numeric identifier of the node to which the edge is pointing + :param activation_shape - The shape of the tensor associated with this edge + :param input_port_id - Port of the operation consuming the tensor that this edge + represents. + :param output_port_id - Port of the operation that produced the tensor represented by this edge. + :param dtype - Data type of the tensor represented by this edge. + :param edge_multiplicity - Multiplicity of the edge. This is a workaround the the DiGraph's + requirement to have no more than 1 edge between any two nodes, while, for instance, an operation + of multiplying a tensor with itself, for instance, needs to somehow store in the graph + the information that the operation actually has two inputs, even though one and the same + tensor (i.e. edge) is used as both inputs; in this case `edge_multiplicity` must be set to 2. + """ self.from_node_id = from_node_id self.to_node_id = to_node_id self.activation_shape = activation_shape self.input_port_id = input_port_id self.output_port_id = output_port_id self.dtype = dtype + self.parallel_input_port_ids = parallel_input_port_ids @classmethod def build_between_two_nx_nodes( @@ -248,6 +264,7 @@ def build_between_two_nx_nodes( input_port_id=nx_edge[DynamicGraph.INPUT_PORT_ID_EDGE_ATTR], output_port_id=nx_edge[DynamicGraph.OUTPUT_PORT_ID_EDGE_ATTR], dtype=nx_edge[DynamicGraph.ACTIVATION_DTYPE_EDGE_ATTR], + parallel_input_port_ids=nx_edge[DynamicGraph.PARALLEL_INPUT_PORT_IDS], ) @@ -323,15 +340,21 @@ def add_node( has_traced_inputs = False for i, info in enumerate(op_exec_context.tensor_metas): + input_port_id = i if info is None or info.creator_id is None: continue + + has_traced_inputs = True parent = self._node_id_to_key_dict[info.creator_id] + if self._nx_graph.get_edge_data(parent, node_key) is not None: + self._nx_graph.edges[parent, node_key][DynamicGraph.PARALLEL_INPUT_PORT_IDS] += [input_port_id] + continue self._nx_graph.add_edge(parent, node_key) - has_traced_inputs = True self._nx_graph.edges[parent, node_key][DynamicGraph.ACTIVATION_SHAPE_EDGE_ATTR] = info.shape - self._nx_graph.edges[parent, node_key][DynamicGraph.INPUT_PORT_ID_EDGE_ATTR] = i + self._nx_graph.edges[parent, node_key][DynamicGraph.INPUT_PORT_ID_EDGE_ATTR] = input_port_id self._nx_graph.edges[parent, node_key][DynamicGraph.OUTPUT_PORT_ID_EDGE_ATTR] = info.index self._nx_graph.edges[parent, node_key][DynamicGraph.ACTIVATION_DTYPE_EDGE_ATTR] = info.dtype + self._nx_graph.edges[parent, node_key][DynamicGraph.PARALLEL_INPUT_PORT_IDS] = [] nx_node_dict = self._nx_graph.nodes[node_key] node = DynamicGraphNode.build_from_nx_node(nx_node_dict) @@ -500,11 +523,10 @@ def __init__(self, node_id_to_key_dict, nx_graph): # TODO: optimize by matching exact module type @staticmethod def _within_iteration(scope: Scope): - scope_name = str(scope) from nncf.torch.layers import ITERATION_MODULES # pylint: disable=cyclic-import - for iter_scope in ITERATION_MODULES.registry_dict: - if iter_scope in scope_name: + for scope_element in scope.scope_elements: + if scope_element.calling_module_class_name in ITERATION_MODULES.registry_dict: return True return False @@ -577,6 +599,7 @@ class DynamicGraph: IS_CALLED_INSIDE_NNCF_MODULE = "is_called_inside_nncf_module" IS_IN_ITERATION_SCOPE_NODE_ATTR = "is_in_iteration_scope" CALLING_MODULE_ID = "calling_module_id" + PARALLEL_INPUT_PORT_IDS = "parallel_input_port_ids" def __init__(self): self._nx_graph = nx.DiGraph() diff --git a/nncf/torch/dynamic_graph/io_handling.py b/nncf/torch/dynamic_graph/io_handling.py index 3d771125578..04448d04e10 100644 --- a/nncf/torch/dynamic_graph/io_handling.py +++ b/nncf/torch/dynamic_graph/io_handling.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from collections import OrderedDict from inspect import Parameter from inspect import Signature diff --git a/nncf/torch/dynamic_graph/layer_attributes_handlers.py b/nncf/torch/dynamic_graph/layer_attributes_handlers.py index edd5d85bd37..151645181b0 100644 --- a/nncf/torch/dynamic_graph/layer_attributes_handlers.py +++ b/nncf/torch/dynamic_graph/layer_attributes_handlers.py @@ -38,6 +38,7 @@ from nncf.torch.graph.operator_metatypes import PTPadMetatype from nncf.torch.graph.operator_metatypes import PTReshapeMetatype from nncf.torch.graph.operator_metatypes import PTSplitMetatype +from nncf.torch.graph.operator_metatypes import PTSqueezeMetatype from nncf.torch.layers import NNCF_MODULES_DICT OP_NAMES_REQUIRING_MODULE_ATTRS = [v.op_func_name for v in NNCF_MODULES_DICT] + list( @@ -55,10 +56,15 @@ def get_layer_attributes_from_module(module: TorchModule, operator_name: str) -> BaseLayerAttributes: if operator_name == "group_norm": - return GroupNormLayerAttributes(module.weight.requires_grad, module.num_channels, module.num_groups) + return GroupNormLayerAttributes( + weight_requires_grad=module.weight.requires_grad, + num_channels=module.num_channels, + num_groups=module.num_groups, + ) # torch.nn.utils.weight_norm replaces weight with weight_g and weight_v is_weight_norm_applied = hasattr(module, "weight_g") and hasattr(module, "weight_v") weight_attr = "weight_g" if is_weight_norm_applied else "weight" + with_bias = hasattr(module, "bias") and module.bias is not None if isinstance(module, (Conv1d, Conv2d, Conv3d)): return ConvolutionLayerAttributes( weight_requires_grad=getattr(module, weight_attr).requires_grad, @@ -66,9 +72,11 @@ def get_layer_attributes_from_module(module: TorchModule, operator_name: str) -> out_channels=module.out_channels, kernel_size=module.kernel_size, stride=module.stride, + dilations=module.dilation, groups=module.groups, transpose=False, padding_values=module.padding, + with_bias=with_bias, ) if isinstance(module, (ConvTranspose1d, ConvTranspose2d, ConvTranspose3d)): return ConvolutionLayerAttributes( @@ -77,21 +85,25 @@ def get_layer_attributes_from_module(module: TorchModule, operator_name: str) -> out_channels=module.out_channels, kernel_size=module.kernel_size, stride=module.stride, + dilations=module.dilation, groups=module.groups, transpose=True, padding_values=module.padding, + with_bias=with_bias, ) if isinstance(module, Linear): return LinearLayerAttributes( weight_requires_grad=getattr(module, weight_attr).requires_grad, in_features=module.in_features, out_features=module.out_features, - bias=module.bias is not None, + with_bias=with_bias, ) if hasattr(module, "weight"): return GenericWeightedLayerAttributes( - weight_requires_grad=getattr(module, weight_attr).requires_grad, weight_shape=module.weight.shape + weight_requires_grad=getattr(module, weight_attr).requires_grad, + weight_shape=module.weight.shape, + with_bias=with_bias, ) return GenericWeightedLayerAttributes(weight_requires_grad=False, weight_shape=[1, 1]) @@ -126,7 +138,7 @@ def set_nodes_attributes_in_nncf_graph(graph: NNCFGraph) -> None: layer_attributes = MultipleInputLayerAttributes(axis) node.layer_attributes = layer_attributes - if node.metatype is PTReshapeMetatype: + if node.metatype in [PTReshapeMetatype, PTSqueezeMetatype]: input_nodes = graph.get_input_edges(node) output_nodes = graph.get_output_edges(node) # In case ReshapeMetatype op is intermediate node diff --git a/nncf/torch/dynamic_graph/patch_pytorch.py b/nncf/torch/dynamic_graph/patch_pytorch.py index 2aaf69ee858..899a7e75ee3 100644 --- a/nncf/torch/dynamic_graph/patch_pytorch.py +++ b/nncf/torch/dynamic_graph/patch_pytorch.py @@ -11,6 +11,7 @@ import functools import inspect +from contextlib import contextmanager from typing import List import torch @@ -187,49 +188,38 @@ def torch_jit_script_wrapper(*args, **kwargs): # so at call of torch.jit.script function we need to # un-patch the torch operators - # If already unpatched, don't perform unpatch/patch - apply_unpatch = _OPERATORS_ALREADY_WRAPPED - if apply_unpatch: - unpatch_torch_operators() - - signature = inspect.signature(_ORIG_JIT_SCRIPT) - bound_args = signature.bind(*args, **kwargs).arguments - # Process the case when the object-to-script is a class as in the original jit.script logic - if inspect.isclass(bound_args["obj"]): - # Inserting wrapper alters the call stack, hence we need to change the resolution callback accordingly - if "_rcb" not in bound_args: - frames_up = bound_args.get("_frames_up", 0) - rcb = createResolutionCallbackFromFrame(frames_up + 1) - kwargs["_rcb"] = rcb - retval = _ORIG_JIT_SCRIPT(*args, **kwargs) - else: - # For some reason resolution callback may return patched methods, so we wrap it to avoid this - if "_rcb" in kwargs: - rcb = kwargs["_rcb"] + with disable_patching(): + signature = inspect.signature(_ORIG_JIT_SCRIPT) + bound_args = signature.bind(*args, **kwargs).arguments + # Process the case when the object-to-script is a class as in the original jit.script logic + if inspect.isclass(bound_args["obj"]): + # Inserting wrapper alters the call stack, hence we need to change the resolution callback accordingly + if "_rcb" not in bound_args: + frames_up = bound_args.get("_frames_up", 0) + rcb = createResolutionCallbackFromFrame(frames_up + 1) + kwargs["_rcb"] = rcb + retval = _ORIG_JIT_SCRIPT(*args, **kwargs) + else: + # For some reason resolution callback may return patched methods, so we wrap it to avoid this + if "_rcb" in kwargs: + rcb = kwargs["_rcb"] - def rcb_wrapper(name): - value = rcb(name) - if hasattr(value, "_original_op"): - value = value._original_op # pylint: disable=protected-access - return value + def rcb_wrapper(name): + value = rcb(name) + if hasattr(value, "_original_op"): + value = value._original_op # pylint: disable=protected-access + return value - kwargs["_rcb"] = rcb_wrapper + kwargs["_rcb"] = rcb_wrapper - retval = _ORIG_JIT_SCRIPT(*args, **kwargs) + retval = _ORIG_JIT_SCRIPT(*args, **kwargs) - if apply_unpatch: - patch_torch_operators() - - return retval + return retval def torch_jit_trace_make_module_wrapper(*args, **kwargs): - apply_unpatch = _OPERATORS_ALREADY_WRAPPED - if apply_unpatch: - unpatch_torch_operators() - retval = _ORIG_JIT_TRACE_MAKE_MODULE(*args, **kwargs) - if apply_unpatch: - patch_torch_operators() + with disable_patching(): + retval = _ORIG_JIT_TRACE_MAKE_MODULE(*args, **kwargs) return retval @@ -415,3 +405,17 @@ def unpatch_torch_operators(): for orig_op_info in ORIGINAL_OPERATORS: setattr(orig_op_info.namespace, orig_op_info.name, orig_op_info.op) + + +@contextmanager +def disable_patching(): + was_patched = _OPERATORS_ALREADY_WRAPPED + if was_patched: + unpatch_torch_operators() + try: + yield + finally: + # The code in the with statement may raise an exception, which could be expected to be handled elsewhere. + # Need to restore the previous state of patching in this case before continuing to the exception handling. + if was_patched: + patch_torch_operators() diff --git a/nncf/torch/dynamic_graph/scope.py b/nncf/torch/dynamic_graph/scope.py index f83a703c213..fa9770ad7ac 100644 --- a/nncf/torch/dynamic_graph/scope.py +++ b/nncf/torch/dynamic_graph/scope.py @@ -97,10 +97,9 @@ def from_str(string: str) -> "Scope": def get_iteration_scopes(self) -> List[str]: results = [] - scope_name = str(self) from nncf.torch.layers import ITERATION_MODULES # pylint: disable=cyclic-import - for iter_scope in ITERATION_MODULES.registry_dict: - if iter_scope in scope_name: - results.append(iter_scope) + for scope_element in self.scope_elements: + if scope_element.calling_module_class_name in ITERATION_MODULES.registry_dict: + results.append(scope_element.calling_module_class_name) return results diff --git a/nncf/torch/dynamic_graph/scope_access.py b/nncf/torch/dynamic_graph/scope_access.py index cb0617637fd..9837e1b9444 100644 --- a/nncf/torch/dynamic_graph/scope_access.py +++ b/nncf/torch/dynamic_graph/scope_access.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from typing import Optional import torch.nn diff --git a/nncf/torch/dynamic_graph/wrappers.py b/nncf/torch/dynamic_graph/wrappers.py index 1e1086c50a7..a8eade450dd 100644 --- a/nncf/torch/dynamic_graph/wrappers.py +++ b/nncf/torch/dynamic_graph/wrappers.py @@ -204,7 +204,7 @@ def _collect_module_attrs_and_ignored_algorithms( curr_module = ctx.get_current_module() if curr_module is None: raise RuntimeError( - f"Operation {op_name} requires module attributes, " f"but it was executed outside any module" + f"Operation {op_name} requires module attributes, but it was executed outside any module" ) layer_attrs = get_layer_attributes_from_module(curr_module, op_name) if isinstance(curr_module, _NNCFModuleMixin): diff --git a/nncf/torch/exporter.py b/nncf/torch/exporter.py index 124fa25b4e1..624457aed70 100644 --- a/nncf/torch/exporter.py +++ b/nncf/torch/exporter.py @@ -115,7 +115,7 @@ def export_model(self, save_path: str, save_format: str = PTExportFormat.ONNX) - if export_fn is None: available_formats = list(format_to_export_fn.keys()) - raise ValueError(f"Unsupported saving format: '{save_format}'. " f"Available formats: {available_formats}") + raise ValueError(f"Unsupported saving format: '{save_format}'. Available formats: {available_formats}") export_fn(**fn_args) diff --git a/nncf/torch/extensions/__init__.py b/nncf/torch/extensions/__init__.py index c7e30acac3f..58dd0fe29d3 100644 --- a/nncf/torch/extensions/__init__.py +++ b/nncf/torch/extensions/__init__.py @@ -1,6 +1,21 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import enum +import os +import textwrap from abc import ABC from abc import abstractmethod +from multiprocessing.context import TimeoutError as MPTimeoutError +from multiprocessing.pool import ThreadPool from pathlib import Path from typing import Callable @@ -14,6 +29,9 @@ EXTENSIONS = Registry("extensions") +EXTENSION_LOAD_TIMEOUT_ENV_VAR = "NNCF_EXTENSION_LOAD_TIMEOUT" +DEFAULT_EXTENSION_LOAD_TIMEOUT = 60 + class ExtensionsType(enum.Enum): CPU = 0 @@ -49,6 +67,10 @@ def get_build_dir(cls) -> str: return str(get_build_directory_for_extension(cls.name())) +class ExtensionLoaderTimeoutException(Exception): + """Raised when the extension takes too long to load""" + + class ExtensionNamespace: """ Provides lazy loading of the underlying extension, i.e. on the first request of a function from the extension. @@ -70,8 +92,31 @@ def get(self, fn_name: str) -> Callable: :return: A callable object corresponding to the requested function. """ if self._loaded_namespace is None: + timeout = int(os.environ.get(EXTENSION_LOAD_TIMEOUT_ENV_VAR, DEFAULT_EXTENSION_LOAD_TIMEOUT)) + timeout = timeout if timeout > 0 else None + with extension_is_loading_info_log(self._loader.name()): - self._loaded_namespace = self._loader.load() + try: + pool = ThreadPool(processes=1) + async_result = pool.apply_async(self._loader.load) + self._loaded_namespace = async_result.get(timeout=timeout) + except MPTimeoutError as error: + # pylint: disable=line-too-long + msg = textwrap.dedent( + f"""\ + The extension load function failed to execute within {timeout} seconds. + This may be due to leftover lock files from the PyTorch C++ extension build process. + If this is the case, running the following command should help: + rm -rf {self._loader.get_build_dir()} + For a machine with poor performance, you may try increasing the time limit by setting the environment variable: + {EXTENSION_LOAD_TIMEOUT_ENV_VAR}=180 + Or disable timeout by set: + {EXTENSION_LOAD_TIMEOUT_ENV_VAR}=0 + For more information, see FAQ entry at: https://github.com/openvinotoolkit/nncf/blob/develop/docs/FAQ.md#importing-anything-from-nncftorch-hangs + """ + ) + raise ExtensionLoaderTimeoutException(msg) from error + return getattr(self._loaded_namespace, fn_name) diff --git a/nncf/torch/extensions/include/common_cpu_funcs.h b/nncf/torch/extensions/include/common_cpu_funcs.h index d6a9d54b722..c044d984514 100644 --- a/nncf/torch/extensions/include/common_cpu_funcs.h +++ b/nncf/torch/extensions/include/common_cpu_funcs.h @@ -2,6 +2,7 @@ #define _COMMON_CPU_FUNCS_H_ #include +#include "dispatch.h" void sum_like(at::Tensor& target_tensor, const at::Tensor& ref_tensor); void sum_to_act_channels(at::Tensor& target_tensor); diff --git a/nncf/torch/extensions/include/common_cuda_defs.cuh b/nncf/torch/extensions/include/common_cuda_defs.cuh index b2853fa0c07..782f7536fb9 100644 --- a/nncf/torch/extensions/include/common_cuda_defs.cuh +++ b/nncf/torch/extensions/include/common_cuda_defs.cuh @@ -23,7 +23,7 @@ inline uint32_t GET_BLOCKS(const uint32_t total_required_threads) { } inline c10::TensorOptions get_accum_options(const c10::TensorOptions options) { - if (options.dtype() == c10::ScalarType::Half) { + if (options.dtype() == c10::ScalarType::Half || options.dtype() == c10::ScalarType::BFloat16) { return options.dtype(c10::ScalarType::Float); } return options; @@ -75,7 +75,8 @@ inline dim3 get_2d_grid_size_for_per_channel(const uint32_t scale_count) #endif #define ACCUM_TYPE_FOR(SOURCE_TYPE) \ -std::conditional_t::value, float, SOURCE_TYPE> +std::conditional_t::value, float, \ + std::conditional_t::value, float, SOURCE_TYPE>> #endif // _COMMON_CUDA_DEFS_CUH_ diff --git a/nncf/torch/extensions/include/common_cuda_funcs.cuh b/nncf/torch/extensions/include/common_cuda_funcs.cuh index 1dd0d1d6da2..336631b8467 100644 --- a/nncf/torch/extensions/include/common_cuda_funcs.cuh +++ b/nncf/torch/extensions/include/common_cuda_funcs.cuh @@ -5,35 +5,43 @@ // to separate translation units will require relocatable device code compilation, // which is rumoured to degrade performance. +#include "dispatch.h" #include "common_cuda_defs.cuh" -#define DISABLE_FP16(TYPE_NAME) std::enable_if_t< \ + + +#define ENABLE_ONLY_FOR_NONREDUCED_FP_TYPES(TYPE_NAME) std::enable_if_t< \ std::is_same::value || \ std::is_same::value, bool> = true -// support only warp size = 32 -template -__device__ void sum_warp(volatile scalar_t* sharr) { +// Volatile c10::Half and c10::BFloat16 arithmetic is not supported, thus the implicit warp-synchronous +// programming via "volatile" (which is deprecated anyway) cannot be used. +// Using modern explicit intra-warp thread synchronization primitives. +// For more information, see https://developer.nvidia.com/blog/using-cuda-warp-level-primitives/ and +// https://docs.nvidia.com/cuda/cuda-c-programming-guide/index.html?highlight=__shfl#warp-shuffle-functions + +template +__device__ void sum_warp(scalar_accum_t* sharr) { int tidx = threadIdx.x & 31; - if (tidx < 16) { - sharr[tidx] += sharr[tidx + 16]; - sharr[tidx] += sharr[tidx + 8]; - sharr[tidx] += sharr[tidx + 4]; - sharr[tidx] += sharr[tidx + 2]; - sharr[tidx] += sharr[tidx + 1]; - } + scalar_accum_t v = sharr[tidx]; + v += __shfl_down_sync(-1, v, 16); + v += __shfl_down_sync(-1, v, 8); + v += __shfl_down_sync(-1, v, 4); + v += __shfl_down_sync(-1, v, 2); + v += __shfl_down_sync(-1, v, 1); + sharr[tidx] = v; } -template -__device__ inline void gather_warp_execution_results(scalar_t* sharr, const uint16_t tidx) { - sharr[tidx] = tidx * CUDA_WARP_SIZE < CUDA_MAX_NUM_THREADS_PER_BLOCK ? sharr[tidx * CUDA_WARP_SIZE] : static_cast(0.0); +template +__device__ inline void gather_warp_execution_results(scalar_accum_t* sharr, const uint16_t tidx) { + sharr[tidx] = tidx * CUDA_WARP_SIZE < CUDA_MAX_NUM_THREADS_PER_BLOCK ? sharr[tidx * CUDA_WARP_SIZE] : static_cast(0.0); } // Reduces the contents of a shared memory array of CUDA_MAX_NUM_THREADS_PER_BLOCK using // warp-powered reduction. The final sum will be stored in the 0-th element of the shared memory array. -template -__device__ void reduce_in_block_using_warp_sums(scalar_t* __restrict__ sh_mem, +template +__device__ void reduce_in_block_using_warp_sums(scalar_accum_t* __restrict__ sh_mem, uint16_t tidx) { __syncthreads(); // Will reduce the summation to CUDA_MAX_WARPS_PER_BLOCK elements that are @@ -62,7 +70,7 @@ __device__ bool last_block(int32_t* counter, uint32_t total_blocks_count) { } -template +template __device__ void reduce_with_shared_memory( scalar_accum_t* __restrict__ sh_arr, scalar_accum_t current_thread_sum, diff --git a/nncf/torch/extensions/include/common_defs.h b/nncf/torch/extensions/include/common_defs.h index e3bc78b57ae..a554374eb53 100644 --- a/nncf/torch/extensions/include/common_defs.h +++ b/nncf/torch/extensions/include/common_defs.h @@ -6,4 +6,5 @@ #define CHECK_CUDA(x) AT_ASSERTM(x.is_cuda(), #x " must be a CUDA tensor") #define CHECK_CONTIGUOUS(x) AT_ASSERTM(x.is_contiguous(), #x " must be contiguous") + #endif // _COMMON_DEFS_H_ diff --git a/nncf/torch/extensions/include/dispatch.h b/nncf/torch/extensions/include/dispatch.h new file mode 100644 index 00000000000..b745c645bc1 --- /dev/null +++ b/nncf/torch/extensions/include/dispatch.h @@ -0,0 +1,12 @@ +#ifndef _DISPATCH_H_ +#define _DISPATCH_H_ + +#include + +// MSVC cannot even pass __VA_ARGS__ to another macro properly, in contrast with GCC. +// For the DISPATCH_TENSOR_DATA_TYPES macro to work on Windows, had to apply a workaround as described in +// https://renenyffenegger.ch/notes/development/languages/C-C-plus-plus/preprocessor/macros/__VA_ARGS__/index +#define PASS_ON(...) __VA_ARGS__ +#define DISPATCH_TENSOR_DATA_TYPES(...) PASS_ON(PASS_ON(AT_DISPATCH_FLOATING_TYPES_AND2)(at::kHalf, at::kBFloat16, __VA_ARGS__)) + +#endif // _DISPATCH_H_ \ No newline at end of file diff --git a/nncf/torch/extensions/src/binarization/cpu/functions_cpu.cpp b/nncf/torch/extensions/src/binarization/cpu/functions_cpu.cpp index 43a4d36bd3c..e4c19169dce 100644 --- a/nncf/torch/extensions/src/binarization/cpu/functions_cpu.cpp +++ b/nncf/torch/extensions/src/binarization/cpu/functions_cpu.cpp @@ -89,7 +89,7 @@ at::Tensor wb_forward( CHECK_INPUT(input); at::Tensor output; - AT_DISPATCH_FLOATING_TYPES_AND_HALF(input.type(), "wb_cpu_forward", ([&] { + DISPATCH_TENSOR_DATA_TYPES(input.type(), "wb_cpu_forward", ([&] { output = wb_cpu_forward(input, per_channel); })); @@ -105,7 +105,7 @@ at::Tensor ab_forward( CHECK_INPUT(thresholds); at::Tensor output; - AT_DISPATCH_FLOATING_TYPES_AND_HALF(input.type(), "ab_cpu_forward", ([&] { + DISPATCH_TENSOR_DATA_TYPES(input.type(), "ab_cpu_forward", ([&] { output = ab_cpu_forward(input, scale, thresholds); })); @@ -123,7 +123,7 @@ std::vector ab_backward( CHECK_INPUT(output); std::vector retval; - AT_DISPATCH_FLOATING_TYPES_AND_HALF(input.type(), "ab_cpu_forward", ([&] { + DISPATCH_TENSOR_DATA_TYPES(input.type(), "ab_cpu_forward", ([&] { retval = ab_cpu_backward(grad_output, input, scale, output); })); diff --git a/nncf/torch/extensions/src/binarization/cuda/functions_cuda_impl.cu b/nncf/torch/extensions/src/binarization/cuda/functions_cuda_impl.cu index c7dbf003248..e3ce31e9ac9 100644 --- a/nncf/torch/extensions/src/binarization/cuda/functions_cuda_impl.cu +++ b/nncf/torch/extensions/src/binarization/cuda/functions_cuda_impl.cu @@ -168,7 +168,7 @@ at::Tensor wb_cuda_forward( for (int ch_idx = 0; ch_idx < scale_count; ch_idx++) { - AT_DISPATCH_FLOATING_TYPES_AND_HALF(input.type(), "wb_cuda_forward_scale", ([&] { + DISPATCH_TENSOR_DATA_TYPES(input.type(), "wb_cuda_forward_scale", ([&] { using scalar_accum_t = ACCUM_TYPE_FOR(scalar_t); wb_cuda_scale_calc_kernel<<>>( input.data() + ch_idx * elements_per_scale, @@ -181,7 +181,7 @@ at::Tensor wb_cuda_forward( dev_last_block_counter.fill_(0); } - AT_DISPATCH_FLOATING_TYPES_AND_HALF(input.type(), "wb_cuda_forward_binarize", ([&] { + DISPATCH_TENSOR_DATA_TYPES(input.type(), "wb_cuda_forward_binarize", ([&] { wb_cuda_binarize_kernel<<>>( output.data(), input.data(), @@ -210,7 +210,7 @@ at::Tensor ab_cuda_forward( auto output = at::empty_like(input); - AT_DISPATCH_FLOATING_TYPES_AND_HALF(input.type(), "ab_cuda_forward", ([&] { + DISPATCH_TENSOR_DATA_TYPES(input.type(), "ab_cuda_forward", ([&] { ab_cuda_forward_kernel<<>>( output.data(), input.data(), @@ -256,7 +256,7 @@ std::vector ab_cuda_backward( int64_t total_elements_per_threshold = input.numel() / threshold_count; int64_t contiguous_elements_per_threshold = input_elements_count / input.size(0) / input.size(1); - AT_DISPATCH_FLOATING_TYPES_AND_HALF(input.type(), "ab_cuda_backward", ([&] { + DISPATCH_TENSOR_DATA_TYPES(input.type(), "ab_cuda_backward", ([&] { ab_cuda_grad_input_kernel<<>>( grad_input.data(), grad_output.data(), @@ -272,7 +272,7 @@ std::vector ab_cuda_backward( auto dev_last_block_counter = at::zeros({1}, at::device(grad_output.options().device()).dtype(at::kInt)); - AT_DISPATCH_FLOATING_TYPES_AND_HALF(input.type(), "ab_cuda_backward", ([&] { + DISPATCH_TENSOR_DATA_TYPES(input.type(), "ab_cuda_backward", ([&] { using scalar_accum_t = ACCUM_TYPE_FOR(scalar_t); ab_cuda_grad_scale_kernel<<>>( grad_scale.data(), @@ -293,7 +293,7 @@ std::vector ab_cuda_backward( for (int64_t ch_idx = 0; ch_idx < threshold_count; ch_idx++) { auto init_element_offset = contiguous_elements_per_threshold * ch_idx; - AT_DISPATCH_FLOATING_TYPES_AND_HALF(input.type(), "ab_cuda_backward", ([&] { + DISPATCH_TENSOR_DATA_TYPES(input.type(), "ab_cuda_backward", ([&] { using scalar_accum_t = ACCUM_TYPE_FOR(scalar_t); ab_cuda_grad_thresholds_kernel<<>>( grad_thresholds.data() + ch_idx, diff --git a/nncf/torch/extensions/src/quantization/cpu/functions_cpu.cpp b/nncf/torch/extensions/src/quantization/cpu/functions_cpu.cpp index 97cc47d0b3f..a49c230c1ca 100644 --- a/nncf/torch/extensions/src/quantization/cpu/functions_cpu.cpp +++ b/nncf/torch/extensions/src/quantization/cpu/functions_cpu.cpp @@ -80,7 +80,8 @@ at::Tensor q_forward( } at::Tensor output; - AT_DISPATCH_FLOATING_TYPES_AND_HALF(input.type(), "q_cpu_forward", ([&] { + + DISPATCH_TENSOR_DATA_TYPES(input.type(), "q_cpu_forward", ([&] { output = q_cpu_forward(input, input_low, input_range, levels); })); @@ -102,7 +103,7 @@ std::vector q_backward( CHECK_INPUT(input_range); std::vector results; - AT_DISPATCH_FLOATING_TYPES_AND_HALF(input.type(), "q_cpu_backward", ([&] { + DISPATCH_TENSOR_DATA_TYPES(input.type(), "q_cpu_backward", ([&] { results = q_cpu_backward(grad_output, input, input_low, input_range, levels, level_low, level_high, is_asymmetric); })); diff --git a/nncf/torch/extensions/src/quantization/cuda/functions_cuda_impl.cu b/nncf/torch/extensions/src/quantization/cuda/functions_cuda_impl.cu index b40a4b524ff..c7a0989c840 100644 --- a/nncf/torch/extensions/src/quantization/cuda/functions_cuda_impl.cu +++ b/nncf/torch/extensions/src/quantization/cuda/functions_cuda_impl.cu @@ -323,7 +323,7 @@ at::Tensor q_cuda_forward( auto output = at::empty_like(input); - PROFILE(AT_DISPATCH_FLOATING_TYPES_AND_HALF(input.scalar_type(), "q_cuda_forward", ([&] { + PROFILE(DISPATCH_TENSOR_DATA_TYPES(input.scalar_type(), "q_cuda_forward", ([&] { q_cuda_forward_kernel<<>>( output.data_ptr(), input.data_ptr(), @@ -361,7 +361,7 @@ std::vector q_single_scale_cuda_backward(at::Tensor grad_output, auto dev_last_block_counter_range = at::zeros({1}, at::device(grad_output.options().device()).dtype(at::kInt)); auto dev_last_block_counter_low = at::zeros({1}, at::device(grad_output.options().device()).dtype(at::kInt)); - PROFILE(AT_DISPATCH_FLOATING_TYPES_AND_HALF(input.scalar_type(), "q_single_scale_cuda_backward", ([&] { + PROFILE(DISPATCH_TENSOR_DATA_TYPES(input.scalar_type(), "q_single_scale_cuda_backward", ([&] { using scalar_accum_t = ACCUM_TYPE_FOR(scalar_t); q_single_scale_cuda_backward_kernel<<>>( grad_input.data_ptr(), @@ -409,7 +409,7 @@ std::vector q_scale_per_weight_channel_cuda_backward(at::Tensor grad auto dev_last_block_counter_range = at::zeros({grid_size.x, 1}, at::device(grad_output.options().device()).dtype(at::kInt)); auto dev_last_block_counter_low = at::zeros({grid_size.x, 1}, at::device(grad_output.options().device()).dtype(at::kInt)); - PROFILE(AT_DISPATCH_FLOATING_TYPES_AND_HALF(input.scalar_type(), "q_single_scale_cuda_backward", ([&] { + PROFILE(DISPATCH_TENSOR_DATA_TYPES(input.scalar_type(), "q_single_scale_cuda_backward", ([&] { using scalar_accum_t = ACCUM_TYPE_FOR(scalar_t); q_scale_per_weight_channel_cuda_backward_kernel<<>>( grad_input.data_ptr(), @@ -460,7 +460,7 @@ std::vector q_scale_per_activation_channel_cuda_backward(at::Tensor auto dev_last_block_counter_low = at::zeros({grid_size.x, 1}, at::device(grad_output.options().device()).dtype(at::kInt)); PROFILE( - AT_DISPATCH_FLOATING_TYPES_AND_HALF(input.scalar_type(), "q_scale_per_activation_channel_cuda_backward", ([&] { + DISPATCH_TENSOR_DATA_TYPES(input.scalar_type(), "q_scale_per_activation_channel_cuda_backward", ([&] { using scalar_accum_t = ACCUM_TYPE_FOR(scalar_t); q_scale_per_activation_channel_cuda_backward_kernel<<>>( grad_input.data_ptr(), diff --git a/nncf/torch/graph/graph.py b/nncf/torch/graph/graph.py index 48afd68d1bc..5c1adfac91e 100644 --- a/nncf/torch/graph/graph.py +++ b/nncf/torch/graph/graph.py @@ -35,7 +35,10 @@ def get_input_shapes_for_node(self, node_name: NNCFNodeName) -> Dict[int, Tuple] edge_attr_dict = self._nx_graph.edges[in_edge] port_id = edge_attr_dict[NNCFGraph.INPUT_PORT_ID_EDGE_ATTR] assert port_id not in retval - retval[port_id] = edge_attr_dict[NNCFGraph.ACTIVATION_SHAPE_EDGE_ATTR] + for p in [ + port_id, + ] + edge_attr_dict[NNCFGraph.PARALLEL_INPUT_PORT_IDS_ATTR]: + retval[p] = edge_attr_dict[NNCFGraph.ACTIVATION_SHAPE_EDGE_ATTR] return retval def get_input_shape_for_insertion_point(self, insertion_point: PTTargetPoint) -> Tuple[int]: diff --git a/nncf/torch/graph/graph_builder.py b/nncf/torch/graph/graph_builder.py index f133275ca87..f98c2f7b43e 100644 --- a/nncf/torch/graph/graph_builder.py +++ b/nncf/torch/graph/graph_builder.py @@ -96,6 +96,7 @@ def convert(dynamic_graph: DynamicGraph, input_infos: List[ModelInputInfo] = Non input_port_id=dynamic_graph_edge.input_port_id, output_port_id=dynamic_graph_edge.output_port_id, dtype=dynamic_graph_edge.dtype, + parallel_input_port_ids=dynamic_graph_edge.parallel_input_port_ids, ) set_nodes_attributes_in_nncf_graph(nncf_graph) diff --git a/nncf/torch/graph/operator_metatypes.py b/nncf/torch/graph/operator_metatypes.py index 474ef297bff..484685667bf 100644 --- a/nncf/torch/graph/operator_metatypes.py +++ b/nncf/torch/graph/operator_metatypes.py @@ -9,6 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from typing import Dict, List, Optional, Type, TypeVar from nncf.common.graph.definitions import NNCFGraphNodeType @@ -39,21 +40,23 @@ class PTOperatorMetatype(OperatorMetatype): so that the entire group of operations is visible in the internal graph. Grouping also allows efficient application of HW specifics to compression of certain operation groups. + + :param external_op_names: Names of functions registered as operators via @register_operator to be associated + with this metatype. + :param module_to_function_names: Names of functions from 'torch.nn.function', 'torch.tensor' and 'torch' modules + respectively, which are associated with this metatype. + :param subtypes: List of subtypes of PyTorch operator. """ - # Names of functions registered as operators via @register_operator to be associated - # with this metatype - external_op_names = [] # type: List[str] + external_op_names: List[str] = [] - # Names of functions from 'torch.nn.function', 'torch.tensor' and 'torch' modules respectively, - # which are associated with this metatype. - module_to_function_names = { + module_to_function_names: Dict[NamespaceTarget, List[str]] = { NamespaceTarget.TORCH_NN_FUNCTIONAL: [], NamespaceTarget.TORCH_TENSOR: [], NamespaceTarget.TORCH: [], - } # type: Dict[NamespaceTarget, List[str]] + } - subtypes = [] # type: List[Type[PTOperatorMetatype]] + subtypes: List[Type["PTOperatorMetatype"]] = [] @classmethod def get_subtypes(cls) -> List[Type["PTOperatorMetatype"]]: @@ -161,6 +164,7 @@ class PTDepthwiseConv1dSubtype(PTDepthwiseConvOperatorSubtype): name = "Conv1DOp" hw_config_name = [HWConfigOpName.DEPTHWISECONVOLUTION] module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["conv1d"]} + output_channel_axis = 1 @PT_OPERATOR_METATYPES.register() @@ -169,6 +173,7 @@ class PTModuleConv1dMetatype(PTModuleOperatorSubtype): hw_config_names = [HWConfigOpName.CONVOLUTION] module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["conv1d"]} subtypes = [PTDepthwiseConv1dSubtype] + output_channel_axis = 1 @PT_OPERATOR_METATYPES.register() @@ -177,6 +182,7 @@ class PTConv1dMetatype(PTOperatorMetatype): hw_config_names = [HWConfigOpName.CONVOLUTION] module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["conv1d"]} subtypes = [PTModuleConv1dMetatype] + output_channel_axis = 1 @PT_OPERATOR_METATYPES.register() @@ -184,6 +190,7 @@ class PTDepthwiseConv2dSubtype(PTDepthwiseConvOperatorSubtype): name = "Conv2DOp" hw_config_names = [HWConfigOpName.DEPTHWISECONVOLUTION] module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["conv2d"]} + output_channel_axis = 1 @PT_OPERATOR_METATYPES.register() @@ -192,6 +199,7 @@ class PTModuleConv2dMetatype(PTModuleOperatorSubtype): hw_config_names = [HWConfigOpName.CONVOLUTION] module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["conv2d"]} subtypes = [PTDepthwiseConv2dSubtype] + output_channel_axis = 1 @PT_OPERATOR_METATYPES.register() @@ -200,6 +208,7 @@ class PTConv2dMetatype(PTOperatorMetatype): hw_config_names = [HWConfigOpName.CONVOLUTION] module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["conv2d"]} subtypes = [PTModuleConv2dMetatype] + output_channel_axis = 1 @PT_OPERATOR_METATYPES.register() @@ -207,6 +216,7 @@ class PTDepthwiseConv3dSubtype(PTDepthwiseConvOperatorSubtype): name = "Conv3DOp" hw_config_names = [HWConfigOpName.DEPTHWISECONVOLUTION] module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["conv3d"]} + output_channel_axis = 1 @PT_OPERATOR_METATYPES.register() @@ -215,6 +225,7 @@ class PTModuleConv3dMetatype(PTModuleOperatorSubtype): hw_config_names = [HWConfigOpName.CONVOLUTION] module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["conv3d"]} subtypes = [PTDepthwiseConv3dSubtype] + output_channel_axis = 1 @PT_OPERATOR_METATYPES.register() @@ -223,6 +234,7 @@ class PTConv3dMetatype(PTOperatorMetatype): hw_config_names = [HWConfigOpName.CONVOLUTION] module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["conv3d"]} subtypes = [PTModuleConv3dMetatype] + output_channel_axis = 1 @PT_OPERATOR_METATYPES.register() @@ -230,6 +242,7 @@ class PTModuleConvTranspose1dMetatype(PTModuleOperatorSubtype): name = "ConvTranspose1DOp" hw_config_names = [HWConfigOpName.CONVOLUTION] module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["conv_transpose1d"]} + output_channel_axis = 1 @PT_OPERATOR_METATYPES.register() @@ -238,6 +251,7 @@ class PTConvTranspose1dMetatype(PTOperatorMetatype): hw_config_names = [HWConfigOpName.CONVOLUTION] module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["conv_transpose1d"]} subtypes = [PTModuleConvTranspose1dMetatype] + output_channel_axis = 1 @PT_OPERATOR_METATYPES.register() @@ -245,6 +259,7 @@ class PTModuleConvTranspose2dMetatype(PTModuleOperatorSubtype): name = "ConvTranspose2DOp" hw_config_names = [HWConfigOpName.CONVOLUTION] module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["conv_transpose2d"]} + output_channel_axis = 1 @PT_OPERATOR_METATYPES.register() @@ -253,6 +268,7 @@ class PTConvTranspose2dMetatype(PTOperatorMetatype): hw_config_names = [HWConfigOpName.CONVOLUTION] module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["conv_transpose2d"]} subtypes = [PTModuleConvTranspose2dMetatype] + output_channel_axis = 1 @PT_OPERATOR_METATYPES.register() @@ -260,6 +276,7 @@ class PTModuleConvTranspose3dMetatype(PTModuleOperatorSubtype): name = "ConvTranspose3DOp" hw_config_names = [HWConfigOpName.CONVOLUTION] module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["conv_transpose3d"]} + output_channel_axis = 1 @PT_OPERATOR_METATYPES.register() @@ -268,6 +285,7 @@ class PTConvTranspose3dMetatype(PTOperatorMetatype): hw_config_names = [HWConfigOpName.CONVOLUTION] module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["conv_transpose3d"]} subtypes = [PTModuleConvTranspose3dMetatype] + output_channel_axis = 1 @PT_OPERATOR_METATYPES.register() @@ -288,6 +306,7 @@ class PTModuleLinearMetatype(PTModuleOperatorSubtype): name = "LinearOp" module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["linear"], NamespaceTarget.TORCH: ["addmm"]} hw_config_names = [HWConfigOpName.MATMUL] + output_channel_axis = -1 @PT_OPERATOR_METATYPES.register() @@ -296,6 +315,7 @@ class PTLinearMetatype(PTOperatorMetatype): module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["linear"], NamespaceTarget.TORCH: ["addmm"]} hw_config_names = [HWConfigOpName.MATMUL] subtypes = [PTModuleLinearMetatype] + output_channel_axis = -1 @PT_OPERATOR_METATYPES.register() @@ -546,6 +566,12 @@ class PTAvgPool3dMetatype(PTOperatorMetatype): hw_config_names = [HWConfigOpName.AVGPOOL] +class PTMaxPool1dMetatype(PTOperatorMetatype): + name = "MaxPool1DOp" + module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["max_pool1d", "adaptive_max_pool1d"]} + hw_config_names = [HWConfigOpName.MAXPOOL] + + @PT_OPERATOR_METATYPES.register() class PTMaxPool2dMetatype(PTOperatorMetatype): name = "MaxPool2DOp" @@ -560,6 +586,18 @@ class PTMaxPool3dMetatype(PTOperatorMetatype): hw_config_names = [HWConfigOpName.MAXPOOL] +@PT_OPERATOR_METATYPES.register() +class PTMaxUnpool1dMetatype(PTOperatorMetatype): + name = "MaxUnPool1DOp" + module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["max_unpool1d"]} + + +@PT_OPERATOR_METATYPES.register() +class PTMaxUnpool2dMetatype(PTOperatorMetatype): + name = "MaxUnPool2DOp" + module_to_function_names = {NamespaceTarget.TORCH_NN_FUNCTIONAL: ["max_unpool2d"]} + + @PT_OPERATOR_METATYPES.register() class PTMaxUnpool3dMetatype(PTOperatorMetatype): name = "MaxUnPool3DOp" @@ -634,10 +672,20 @@ class PTScatterMetatype(PTOperatorMetatype): class PTReshapeMetatype(PTOperatorMetatype): name = "ReshapeOp" module_to_function_names = { - NamespaceTarget.TORCH_TENSOR: ["reshape", "view", "flatten", "squeeze", "unsqueeze"], - NamespaceTarget.TORCH: ["squeeze", "flatten", "unsqueeze"], + NamespaceTarget.TORCH_TENSOR: ["reshape", "view", "flatten", "unsqueeze"], + NamespaceTarget.TORCH: ["flatten", "unsqueeze"], + } + hw_config_names = [HWConfigOpName.RESHAPE, HWConfigOpName.UNSQUEEZE, HWConfigOpName.FLATTEN] + + +@PT_OPERATOR_METATYPES.register() +class PTSqueezeMetatype(PTOperatorMetatype): + name = "SqueezeOp" + module_to_function_names = { + NamespaceTarget.TORCH_TENSOR: ["squeeze"], + NamespaceTarget.TORCH: ["squeeze"], } - hw_config_names = [HWConfigOpName.RESHAPE, HWConfigOpName.SQUEEZE, HWConfigOpName.UNSQUEEZE, HWConfigOpName.FLATTEN] + hw_config_names = [HWConfigOpName.SQUEEZE] @PT_OPERATOR_METATYPES.register() @@ -853,4 +901,36 @@ def get_operator_metatypes() -> List[Type[OperatorMetatype]]: PTModuleEmbeddingBagMetatype, ] +UNIFICATION_PRODUCING_METATYPES = [ + PTModuleConv1dMetatype, + PTModuleConv2dMetatype, + PTModuleConv3dMetatype, + PTDepthwiseConv1dSubtype, + PTDepthwiseConv2dSubtype, + PTDepthwiseConv3dSubtype, + PTModuleConvTranspose1dMetatype, + PTModuleConvTranspose2dMetatype, + PTModuleConvTranspose3dMetatype, + PTModuleLinearMetatype, +] + OP_NAMES_WITH_WEIGHTS = [x for meta in OPERATORS_WITH_WEIGHTS_METATYPES for x in meta.get_all_aliases()] + +# Contains the operation metatypes for which bias can be applied. +OPERATORS_WITH_BIAS_METATYPES = [ + PTModuleConv1dMetatype, + PTModuleConv2dMetatype, + PTModuleConv3dMetatype, + PTDepthwiseConv1dSubtype, + PTDepthwiseConv2dSubtype, + PTDepthwiseConv3dSubtype, + PTModuleConvTranspose1dMetatype, + PTModuleConvTranspose2dMetatype, + PTModuleConvTranspose3dMetatype, +] + +OPERATORS_FUSED_METATYPES = [ + PTModuleBatchNormMetatype, +] + +OP_NAMES_QUANTIZE_NODE = ["symmetric_quantize", "asymmetric_quantize"] diff --git a/nncf/torch/graph/transformations/command_creation.py b/nncf/torch/graph/transformations/command_creation.py new file mode 100644 index 00000000000..8408c92aa4c --- /dev/null +++ b/nncf/torch/graph/transformations/command_creation.py @@ -0,0 +1,29 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from torch import Tensor + +from nncf.common.graph.graph import NNCFNode +from nncf.common.graph.transformations.commands import TargetType +from nncf.torch.graph.transformations.commands import PTBiasCorrectionCommand +from nncf.torch.graph.transformations.commands import PTTargetPoint + + +def create_bias_correction_command(node: NNCFNode, bias_value: Tensor) -> PTBiasCorrectionCommand: + """ + Creates bias correction command. + + :param node: The node in the NNCF graph that corresponds to operation with bias. + :param bias_value: The new bias value that will be set. + :return: The `PTBiasCorrectionCommand` command to update bias. + """ + target_point = PTTargetPoint(TargetType.LAYER, node.node_name) + return PTBiasCorrectionCommand(target_point, bias_value) diff --git a/nncf/torch/graph/transformations/commands.py b/nncf/torch/graph/transformations/commands.py index dbc259d0585..74d6dd43bea 100644 --- a/nncf/torch/graph/transformations/commands.py +++ b/nncf/torch/graph/transformations/commands.py @@ -1,6 +1,20 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from typing import Any, Callable, Dict +import torch + from nncf.common.graph import NNCFNodeName +from nncf.common.graph.transformations.commands import Command from nncf.common.graph.transformations.commands import TargetPoint from nncf.common.graph.transformations.commands import TargetType from nncf.common.graph.transformations.commands import TransformationCommand @@ -22,13 +36,15 @@ class PTTargetPoint(TargetPoint): ] _HOOK_TYPES = [TargetType.OPERATOR_PRE_HOOK, TargetType.OPERATOR_POST_HOOK] + _LAYER_TYPE = [TargetType.LAYER] + _state_names = PTTargetPointStateNames def __init__(self, target_type: TargetType, target_node_name: NNCFNodeName, *, input_port_id: int = None): super().__init__(target_type) self.target_node_name = target_node_name self.target_type = target_type - if self.target_type not in self._OPERATION_TYPES + self._HOOK_TYPES: + if self.target_type not in self._OPERATION_TYPES + self._HOOK_TYPES + self._LAYER_TYPE: raise NotImplementedError("Unsupported target type: {}".format(target_type)) self.input_port_id = input_port_id @@ -43,7 +59,7 @@ def __eq__(self, other: "PTTargetPoint"): def __str__(self): prefix = str(self.target_type) retval = prefix - if self.target_type in self._OPERATION_TYPES: + if self.target_type in self._OPERATION_TYPES + self._LAYER_TYPE: retval += " {}".format(self.target_node_name) elif self.target_type in self._HOOK_TYPES: if self.input_port_id is not None: @@ -82,7 +98,42 @@ def from_state(cls, state: Dict[str, Any]) -> "PTTargetPoint": return cls(**kwargs) -class PTInsertionCommand(TransformationCommand): +class PTCommand(Command): + """ + The base class for all Command for PyTorch. + """ + + def requires_graph_rebuild(self): + """ + Return boolean flag to rebuild graph of model. + + :return: Boolean flag. + """ + return False + + +class PTTransformationCommand(TransformationCommand): + """ + The base class for all TransformationCommand for PyTorch. + """ + + def requires_graph_rebuild(self): + """ + Return boolean flag to rebuild graph of model. + + :return: Boolean flag. + """ + return False + + def union(self, other: "PTTransformationCommand") -> "PTTransformationCommand": + raise NotImplementedError() + + +class PTInsertionCommand(PTTransformationCommand): + """ + Insertion operation to the models. + """ + def __init__( self, point: PTTargetPoint, @@ -93,6 +144,48 @@ def __init__( self.fn = fn # type: Callable self.priority = priority # type: TransformationPriority - def union(self, other: "TransformationCommand") -> "TransformationCommand": + def union(self, other: "PTTransformationCommand") -> "PTTransformationCommand": # TODO: keep all TransformationCommands atomic, refactor TransformationLayout instead raise NotImplementedError() + + def requires_graph_rebuild(self): + """ + Return boolean flag to rebuild graph of model. + + :return: Boolean flag. + """ + # Rebuild graph when adding quantization nodes. + return self.priority == TransformationPriority.QUANTIZATION_PRIORITY + + +class PTModelExtractionWithFusedBiasCommand(PTCommand): + """ + Extracts sequence by name with node that contain fused bias. + """ + + def __init__(self, node_name: str): + """ + :param node_name: Node name that will be extracted. + """ + super().__init__(TransformationType.EXTRACT) + self.node_name = node_name + + def union(self, other: "Command") -> "Command": + raise NotImplementedError() + + +class PTBiasCorrectionCommand(PTTransformationCommand): + """ + Corrects bias value in the model based on the input value. + """ + + def __init__(self, target_point: PTTargetPoint, bias_value: torch.Tensor): + """ + :param target_point: The TargetPoint instance for the correction that contains layer's information. + :param bias_value: The bias shift value that will be added to the original bias value. + """ + super().__init__(TransformationType.CHANGE, target_point) + self.bias_value = bias_value + + def union(self, other: "PTTransformationCommand") -> "PTTransformationCommand": + raise NotImplementedError() diff --git a/nncf/torch/graph/transformations/layout.py b/nncf/torch/graph/transformations/layout.py index 7cfa1a61f38..6d48bdd0c08 100644 --- a/nncf/torch/graph/transformations/layout.py +++ b/nncf/torch/graph/transformations/layout.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from nncf.common.graph.transformations.layout import TransformationLayout diff --git a/nncf/torch/hardware/fused_patterns.py b/nncf/torch/hardware/fused_patterns.py index 4ab781898d1..c30e5b0d4c0 100644 --- a/nncf/torch/hardware/fused_patterns.py +++ b/nncf/torch/hardware/fused_patterns.py @@ -12,6 +12,7 @@ from nncf.common.graph.patterns import GraphPattern from nncf.common.graph.patterns import HWFusedPatternNames from nncf.common.utils.registry import Registry +from nncf.torch.graph.operator_metatypes import PTInputNoopMetatype from nncf.torch.graph.pattern_operations import ARITHMETIC_OPERATIONS from nncf.torch.graph.pattern_operations import ATOMIC_ACTIVATIONS_OPERATIONS from nncf.torch.graph.pattern_operations import BATCH_NORMALIZATION_OPERATIONS @@ -46,36 +47,25 @@ def create_l2_norm_operations() -> GraphPattern: return pattern -@PT_HW_FUSED_PATTERNS.register(HWFusedPatternNames.MATMUL_SOFTMAX_MATMUL) -def create_matmul_softmax_matmul() -> GraphPattern: - matmul_aliases = ["linear", "addmm", "matmul", "bmm", "mm", "baddbmm"] - pattern = GraphPattern() - softmax_1 = pattern.add_node(label="SOFTMAX", type="softmax") - mat_mul_1_1 = pattern.add_node(label="MATMUL_1", type=matmul_aliases) - mat_mul_2_1 = pattern.add_node(label="MATMUL_2", type=matmul_aliases) - - any_1 = pattern.add_node(label="ANY", type=GraphPattern.NON_PATTERN_NODE_TYPE) - - pattern.add_edge(mat_mul_1_1, softmax_1) - pattern.add_edge(softmax_1, mat_mul_2_1) - pattern.add_edge(any_1, mat_mul_2_1) - - softmax_2 = pattern.add_node(label="SOFTMAX", type="softmax") - add_2 = pattern.add_node(label="ADD", type=["add", "__add__", "__iadd__", "__radd__"]) - mat_mul_1_2 = pattern.add_node(label="MATMUL_1", type=matmul_aliases) - mat_mul_2_2 = pattern.add_node(label="MATMUL_2", type=matmul_aliases) - - any_2 = pattern.add_node(label="ANY", type=GraphPattern.NON_PATTERN_NODE_TYPE) +# COMBINATIONS - pattern.add_edge(mat_mul_1_2, add_2) - pattern.add_edge(add_2, softmax_2) - pattern.add_edge(softmax_2, mat_mul_2_2) - pattern.add_edge(any_2, mat_mul_2_2) +@PT_HW_FUSED_PATTERNS.register(HWFusedPatternNames.SHIFT_SCALE) +def create_shift_scale() -> GraphPattern: + pattern = GraphPattern() + add_node = pattern.add_node(label="ADD, SUB", type=["__add__", "__sub__"]) + truediv_node = pattern.add_node(label="MUL, DIV", type=["__mul__", "__truediv__"]) + pattern.add_edge(add_node, truediv_node) return pattern -# COMBINATIONS +@PT_HW_FUSED_PATTERNS.register(HWFusedPatternNames.INPUT_SHIFT_SCALE) +def create_input_shift_scale() -> GraphPattern: + pattern = GraphPattern() + pattern.add_node(**{GraphPattern.LABEL_ATTR: "MODEL_INPUT", GraphPattern.METATYPE_ATTR: PTInputNoopMetatype}) + shift_scale = create_shift_scale() + pattern.join_patterns(shift_scale) + return pattern @PT_HW_FUSED_PATTERNS.register(HWFusedPatternNames.LINEAR_ARITHMETIC) diff --git a/nncf/torch/initialization.py b/nncf/torch/initialization.py index 24a5893c6e5..5e06f395b02 100644 --- a/nncf/torch/initialization.py +++ b/nncf/torch/initialization.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import math from contextlib import contextmanager from functools import partial diff --git a/nncf/torch/layer_utils.py b/nncf/torch/layer_utils.py index 3571329009e..6ff3f361b6b 100644 --- a/nncf/torch/layer_utils.py +++ b/nncf/torch/layer_utils.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import torch from torch import nn @@ -13,6 +24,10 @@ def __init__(self, module): def __getattr__(self, name): return getattr(self._module, name) + @property + def __class__(self): + return type(self._module) + class _NNCFModuleMixin: """ diff --git a/nncf/torch/layers.py b/nncf/torch/layers.py index 090be32913a..f22dba2dac1 100644 --- a/nncf/torch/layers.py +++ b/nncf/torch/layers.py @@ -349,6 +349,7 @@ def from_module(module): class NNCFEmbedding(_NNCFModuleMixin, nn.Embedding): op_func_name = "embedding" + target_weight_dim_for_compression = 0 # Note that this does not require activation quantization because it's basically a lookup. @staticmethod @@ -449,7 +450,9 @@ def from_module(module): @api(canonical_alias="nncf.torch.register_module") -def register_module(*quantizable_field_names: str, ignored_algorithms: list = None): +def register_module( + *quantizable_field_names: str, ignored_algorithms: list = None, target_weight_dim_for_compression: int = 0 +): # quantizable_field_names will work for `weight` attributes only. Should later extend to registering # customly named attributes if it becomes necessary def wrap(cls): @@ -462,6 +465,10 @@ def wrap(cls): setattr(NNCF_WRAPPED_USER_MODULES_DICT[cls], "get_weight_shape", get_base_attributes_fn) if ignored_algorithms: setattr(NNCF_WRAPPED_USER_MODULES_DICT[cls], "ignored_algorithms", ignored_algorithms) + + setattr( + NNCF_WRAPPED_USER_MODULES_DICT[cls], "target_weight_dim_for_compression", target_weight_dim_for_compression + ) return cls return wrap diff --git a/nncf/torch/model_analyzer.py b/nncf/torch/model_analyzer.py new file mode 100644 index 00000000000..bf4a8daf57f --- /dev/null +++ b/nncf/torch/model_analyzer.py @@ -0,0 +1,87 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional + +import torch + +from nncf.common.graph.graph import NNCFGraph +from nncf.common.graph.graph import NNCFNode +from nncf.torch.graph.operator_metatypes import OP_NAMES_QUANTIZE_NODE +from nncf.torch.graph.operator_metatypes import OPERATORS_FUSED_METATYPES +from nncf.torch.graph.operator_metatypes import OPERATORS_WITH_BIAS_METATYPES +from nncf.torch.nncf_network import NNCFNetwork + + +def get_potential_fused_node(node_name: str, nncf_graph: NNCFGraph) -> Optional[NNCFNode]: + """ + Get next node that can contain fused bias in runtime. + + :param node_name: The node name. + :param nncf_graph: The NNCF graph. + :return: The node that can be fused or None. + """ + target_node = nncf_graph.get_node_by_name(node_name) + + if target_node.metatype in OPERATORS_WITH_BIAS_METATYPES: + next_nodes = nncf_graph.get_next_nodes(target_node) + for node in next_nodes: + if node.metatype in OPERATORS_FUSED_METATYPES: + return node + return None + + +def is_node_with_fused_bias(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: + """ + Checks if the node has a fused bias. + + :param node: The node to check. + :param nncf_graph: The NNCF graph. + :return: Return `True` if `node` corresponds to the operation + with bias (bias is added to the output tensor of that operation), + `False` otherwise. + """ + fused_node = get_potential_fused_node(node.node_name, nncf_graph) + + return node.metatype in OPERATORS_WITH_BIAS_METATYPES and ( + node.layer_attributes.with_bias if fused_node is None else fused_node.layer_attributes.with_bias + ) + + +def get_fused_bias_value(node: NNCFNode, model: NNCFNetwork) -> Optional[torch.Tensor]: + """ + Returns the bias tensor for the node or potential fused node. + + :param node: The node that corresponds to the operation with bias. + :param model: The model that contains this operation. + :return: The bias value that is applied to the output tensor of the node's operation. + """ + nncf_graph = model.nncf.get_graph() + fused_node = get_potential_fused_node(node.node_name, nncf_graph) + target_node_name = fused_node.node_name if fused_node else node.node_name + node_module = model.nncf.get_containing_module(target_node_name) + if node_module.bias is None: + return None + return node_module.bias.data + + +def is_quantized_weights(node: NNCFNode, nncf_graph: NNCFGraph) -> bool: + """ + Check that module have fake_quantizer for weight. + + :param node: The target node. + :param nncf_graph: The NNCF graph. + :return bool: return `True` if the node is quantized. + """ + for prev_node in nncf_graph.get_previous_nodes(node): + if prev_node.node_type in OP_NAMES_QUANTIZE_NODE: + return True + return False diff --git a/nncf/torch/model_creation.py b/nncf/torch/model_creation.py index bfb07930888..45652a702a6 100644 --- a/nncf/torch/model_creation.py +++ b/nncf/torch/model_creation.py @@ -1,15 +1,14 @@ -""" - Copyright (c) 2020-2023 Intel Corporation - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from os import path as osp from typing import Any, Callable, Dict, List, Optional, Tuple @@ -19,6 +18,7 @@ from nncf.api.compression import CompressionAlgorithmController from nncf.common.compression import BaseCompressionAlgorithmController as BaseController +from nncf.common.deprecation import warning_deprecated from nncf.common.logging import nncf_logger from nncf.common.utils.api_marker import api from nncf.common.utils.debug import set_debug_log_dir @@ -93,6 +93,21 @@ def create_compressed_model( is an instance of CompositeCompressionController) and the model ready for compression parameter training wrapped as an object of NNCFNetwork. """ + if isinstance(model, NNCFNetwork): + raise RuntimeError( + "The model object has already been compressed.\n" + "NNCF for PyTorch modifies the model object in-place, and repeat calls to " + "`nncf.torch.create_compressed_model` with the same model object passed as argument " + "will lead to an incorrect attempt to compress the model twice.\n" + "Make sure that the model object you are passing has not already been compressed (for " + "instance, by testing `if isinstance(model, nncf.torch.nncf_network.NNCFNetwork)`).\n" + "If you are encountering this in a Jupyter notebook context - make sure that when " + "re-running cells involving `nncf.torch.create_compressed_model` the original model object " + "is also re-created (via constructor call)." + ) + + if config.get("target_device") == "VPU": + warning_deprecated("VPU device is deprecated and will no longer be supported in the future.") set_debug_log_dir(config.get("log_dir", ".")) diff --git a/nncf/torch/model_transformer.py b/nncf/torch/model_transformer.py new file mode 100644 index 00000000000..befee30ca2b --- /dev/null +++ b/nncf/torch/model_transformer.py @@ -0,0 +1,169 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy +from collections import defaultdict +from typing import Callable, Dict, List, Tuple + +from torch import Tensor +from torch import nn + +from nncf.common.graph.model_transformer import ModelTransformer +from nncf.common.graph.transformations.commands import TargetType +from nncf.common.graph.transformations.commands import TransformationPriority +from nncf.torch.graph.transformations.commands import PTBiasCorrectionCommand +from nncf.torch.graph.transformations.commands import PTInsertionCommand +from nncf.torch.graph.transformations.commands import PTModelExtractionWithFusedBiasCommand +from nncf.torch.graph.transformations.commands import PTTargetPoint +from nncf.torch.graph.transformations.layout import PTTransformationLayout +from nncf.torch.model_analyzer import get_potential_fused_node +from nncf.torch.module_operations import UpdateWeight +from nncf.torch.nncf_network import NNCFNetwork +from nncf.torch.nncf_network import PTInsertionPoint + + +class PTModelTransformer(ModelTransformer): + """ + Applies transformations upon PyTorch model. + """ + + def __init__(self, model: NNCFNetwork): + super().__init__(model) + + self._command_transformation_ordered_pairs = [ + (PTModelExtractionWithFusedBiasCommand, self._apply_extraction_with_fused_bias_transformations), + (PTInsertionCommand, self._apply_insertion_transformations), + (PTBiasCorrectionCommand, self._apply_bias_correction_transformations), + ] + + def transform(self, transformation_layout: PTTransformationLayout) -> NNCFNetwork: + transformations = transformation_layout.transformations + aggregated_transformations = defaultdict(list) + requires_graph_rebuild = False + for transformation in transformations: + aggregated_transformations[transformation.__class__].append(transformation) + requires_graph_rebuild = requires_graph_rebuild or transformation.requires_graph_rebuild() + + model = self._model + for transformation_cls, transformation_fn in self._command_transformation_ordered_pairs: + transformations = aggregated_transformations[transformation_cls] + if transformations: + model = transformation_fn(model, transformations) + + if requires_graph_rebuild: + model.nncf.rebuild_graph() + + return model + + @staticmethod + def _apply_insertion_transformations(model: NNCFNetwork, transformations: List[PTInsertionCommand]) -> NNCFNetwork: + """ + Applies insertion transformations to the model. + + :param model: Model to apply transformations. + :param transformations: List of the bias correction transformations. + """ + node_to_op_address_mapping = model.nncf.get_node_to_op_address_mapping() + fns_grouped_by_points = {} # type: Dict[PTInsertionPoint, List[Tuple[Callable, TransformationPriority]]] + + for transformation_command in transformations: # type: PTInsertionCommand + target_point = transformation_command.target_point # type: PTTargetPoint + target_node_name = target_point.target_node_name + pt_ip = PTInsertionPoint( + target_type=target_point.target_type, + op_address=node_to_op_address_mapping[target_node_name], + input_port_id=target_point.input_port_id, + ) + fn = transformation_command.fn + if target_point.type is TargetType.OPERATION_WITH_WEIGHTS: + fn = UpdateWeight(fn) + tup = (fn, transformation_command.priority) + if pt_ip not in fns_grouped_by_points: + fns_grouped_by_points[pt_ip] = [tup] + else: + fns_grouped_by_points[pt_ip].append(tup) + + for pt_ip, fn_list_with_priority in fns_grouped_by_points.items(): + fn_list_with_priority = sorted(fn_list_with_priority, key=lambda x: x[1]) + model.nncf.insert_at_point(pt_ip, [x[0] for x in fn_list_with_priority]) + + return model + + @staticmethod + def _apply_extraction_with_fused_bias_transformations( + model: NNCFNetwork, transformations: List[PTModelExtractionWithFusedBiasCommand] + ) -> nn.Sequential: + """ + Extracts copy of sub-modules from the original base on node name and potential fused nodes. + + :param model: Model to apply transformations. + :param transformation: Model extraction transformation. + :return: Extracted sub-modules. + """ + transformation = transformations[-1] + return extraction_potential_fused_modules(transformation.node_name, model) + + @staticmethod + def _apply_bias_correction_transformations( + model: NNCFNetwork, transformations: List[PTBiasCorrectionCommand] + ) -> NNCFNetwork: + """ + Applies bias correction transformations on the model. + + :param model: Model to apply transformations. + :param transformations: List of the bias correction transformations. + :return: Model with corrected bias. + """ + for transformation in transformations: + update_fused_bias( + target_node_name=transformation.target_point.target_node_name, + new_bias=transformation.bias_value, + model=model, + ) + return model + + +def update_fused_bias(target_node_name: str, new_bias: Tensor, model: NNCFNetwork) -> None: + """ + Update bias for target module or potential fused module. + + :param target_node_name: The target node name. + :param new_bias: New bias value. + :param model: The model. + """ + nncf_graph = model.nncf.get_graph() + fused_node = get_potential_fused_node(target_node_name, nncf_graph) + if fused_node: + target_node_name = fused_node.node_name + + node = model.nncf.get_containing_module(target_node_name) + node.bias.data = new_bias + + +def extraction_potential_fused_modules(node_name: str, model: NNCFNetwork) -> nn.Sequential: + """ + Return Sequential from the copy of module by node_name and potential fused node if exists. + + :param node_name: The node name. + :param model: The model. + + :return nn.Sequential: Copy of the modules. + """ + extracted_node_names = [node_name] + nncf_graph = model.nncf.get_graph() + fused_node = get_potential_fused_node(node_name, nncf_graph) + if fused_node: + extracted_node_names.append(fused_node.node_name) + + extracted_modules = [ + copy.deepcopy(model.nncf.get_containing_module(node_name)) for node_name in extracted_node_names + ] + return nn.Sequential(*extracted_modules) diff --git a/nncf/torch/nncf_module_replacement.py b/nncf/torch/nncf_module_replacement.py index 342537d6969..b582eb9ca6b 100644 --- a/nncf/torch/nncf_module_replacement.py +++ b/nncf/torch/nncf_module_replacement.py @@ -44,10 +44,21 @@ def is_nncf_module(module: nn.Module) -> bool: return False -def collect_all_scopes_for_extendable_and_extended_modules(module: nn.Module) -> Dict[nn.Module, Set[Scope]]: +def collect_all_scopes_for_extendable_and_extended_modules( + model: nn.Module, predicate: Callable = None +) -> Dict[nn.Module, Set[Scope]]: + """ + Collects all ranges for all modules in the model that match the condition from predicate. + + :param module: The model. + :param predicate: A predicate function that can be used to filter modules. + By default, the predicate function filters all NNCF modules and modules that can be replaced with NNCF modules. + :return: A dictionary mapping modules to sets of scopes. + """ retval = {} - predicate = lambda x: _can_extend(x) or is_nncf_module(x) - return _collect_modules_and_scopes_recursive_helper(module, Scope(), predicate, retval) + if predicate is None: + predicate = lambda x: _can_extend(x) or is_nncf_module(x) + return _collect_modules_and_scopes_recursive_helper(model, Scope(), predicate, retval) def collect_modules_and_scopes_by_predicate( @@ -128,6 +139,7 @@ def replace_modules_by_nncf_modules( target_scopes: Optional[List[str]] = None, eval_op_scopes: Optional[List[Scope]] = None, custom_replacer: Callable[[nn.Module], None] = None, + predicate_fn: Optional[Callable] = None, ) -> Tuple[nn.Module, Dict[torch.nn.Module, List[Scope]]]: """ Replaces certain modules in the model hierarchy with NNCF-wrapped versions of the same modules. @@ -151,12 +163,13 @@ def replace_modules_by_nncf_modules( that end up having a scope not in this list will be considered train-only and will not be replaced). :param custom_replacer: The function to be used instead of the regular approach to replace a module with NNCF- extended counterpart. + :param predicate_fn: The function to find modules that can be replaced. :return: The model with the modules replaced and the dictionary of all extended modules vs list of scopes through which the module is accessible. The list of scope shall be sorted lexicographically w.r.t. the string representation of the Scope objects. The dictionary will also include the extended modules that have already been present in the model. """ - modules_vs_scopes_dict = collect_all_scopes_for_extendable_and_extended_modules(model) + modules_vs_scopes_dict = collect_all_scopes_for_extendable_and_extended_modules(model, predicate=predicate_fn) inter_dict = {} # type: Dict[nn.Module, Set[Scope]] ret_dict = {} # type: Dict[nn.Module, List[Scope]] for module, scope_set in modules_vs_scopes_dict.items(): @@ -268,7 +281,7 @@ def _is_scopes_allow_replacement( for scope in scope_set_for_module: if matches_any(str(scope), ignored_scopes): nncf_logger.info( - f"Not processing a module that matched to an ignored scope in config; " f"module scope = {str(scope)}" + f"Not processing a module that matched to an ignored scope in config; module scope = {str(scope)}" ) return False if eval_op_scopes is not None: diff --git a/nncf/torch/nncf_network.py b/nncf/torch/nncf_network.py index 3e8fab5fb56..3ce64514848 100644 --- a/nncf/torch/nncf_network.py +++ b/nncf/torch/nncf_network.py @@ -23,12 +23,10 @@ from torch import nn from nncf import nncf_logger -from nncf.common.deprecation import warning_deprecated from nncf.common.graph import NNCFNode from nncf.common.graph import NNCFNodeName from nncf.common.graph.definitions import MODEL_INPUT_OP_NAME from nncf.common.graph.definitions import MODEL_OUTPUT_OP_NAME -from nncf.common.graph.model_transformer import ModelTransformer from nncf.common.graph.transformations.commands import TargetType from nncf.common.graph.transformations.commands import TransformationPriority from nncf.common.insertion_point_graph import InsertionPointGraph @@ -58,10 +56,8 @@ from nncf.torch.graph.operator_metatypes import OPERATORS_WITH_WEIGHTS_METATYPES from nncf.torch.graph.operator_metatypes import PTSplitMetatype from nncf.torch.graph.transformations.commands import PTTargetPoint -from nncf.torch.graph.transformations.layout import PTTransformationLayout from nncf.torch.knowledge_distillation.knowledge_distillation_handler import KnowledgeDistillationLossHandler from nncf.torch.layer_utils import _NNCFModuleMixin -from nncf.torch.module_operations import UpdateWeight from nncf.torch.nested_objects_traversal import objwalk from nncf.torch.nncf_module_replacement import replace_modules_by_nncf_modules from nncf.torch.utils import compute_FLOPs_hook @@ -151,6 +147,8 @@ def get_original_forward(self) -> Callable: Returns the forward function of the original model, unmodified by NNCF. The returned function will have its 0-th implicit `self` argument bound to the model object. """ + if self._original_instance_forward is not None: + return functools.partial(self._original_instance_forward, self._model_ref) return functools.partial(self._original_unbound_forward, self._model_ref) @contextmanager @@ -214,10 +212,12 @@ def __init__( self._original_class = model.nncf._original_class self._bound_original_forward = model.nncf._bound_original_forward self._custom_original_unbound_forward = model.nncf._custom_original_unbound_forward + self._original_instance_forward = model.nncf._original_instance_forward else: self._original_class = model.__class__ self._bound_original_forward = None self._custom_original_unbound_forward = None + self._original_instance_forward = model.__dict__.get("forward") self._forward_signature = inspect.signature(self.get_original_forward()) self._input_infos = input_infos @@ -367,7 +367,7 @@ def get_clean_shallow_copy(self) -> "NNCFNetwork": # WARNING: Will reset pre- and post-ops of the underlying model. Use save_nncf_module_additions # and load_nncf_module_additions to preserve these, or temporary_clean_view(). from nncf.torch.utils import load_module_state # pylint: disable=cyclic-import - from nncf.torch.utils import save_module_state + from nncf.torch.utils import save_module_state # pylint: disable=cyclic-import saved_state = save_module_state(self._model_ref) new_interface = NNCFNetworkInterface( @@ -606,9 +606,11 @@ def get_insertion_point_graph(self) -> InsertionPointGraph: # a port ID attribute. in_edges = nncf_graph.get_input_edges(node) for edge in in_edges: - port_id = edge.input_port_id - pre_hook_ip = PreHookInsertionPoint(target_node_name=node.node_name, input_port_id=port_id) - pre_hooks.append(pre_hook_ip) + for port_id in [ + edge.input_port_id, + ] + edge.parallel_input_port_ids: + pre_hook_ip = PreHookInsertionPoint(target_node_name=node.node_name, input_port_id=port_id) + pre_hooks.append(pre_hook_ip) if issubclass(node.metatype, PTSplitMetatype): # chunk returns a tuple of tensors, which can only be handled in NNCF @@ -741,6 +743,21 @@ def get_node_to_op_address_mapping(self) -> Dict[NNCFNodeName, OperationAddress] def set_compression_controller(self, ctrl: "PTCompressionAlgorithmController"): self.compression_controller = ctrl + def strip(self, do_copy: bool = True) -> "NNCFNetwork": + """ + Returns the model object with as much custom NNCF additions as possible removed + while still preserving the functioning of the model object as a compressed model. + :param do_copy: If True (default), will return a copy of the currently associated model object. If False, + will return the currently associated model object "stripped" in-place. + :return: The stripped model. + """ + if self.compression_controller is None: + # PTQ algorithm does not set compressed controller + from nncf.torch.quantization.strip import strip_quantized_model + + return strip_quantized_model(self._model_ref) + return self.compression_controller.strip(do_copy) + class NNCFNetworkMeta(type): """ @@ -814,13 +831,13 @@ def __call__( ) # Make the signature of the forward on the resulting object same as for # the original forward. - fn = NNCFNetwork.forward - new_forward = types.FunctionType(fn.__code__, fn.__globals__, fn.__name__, fn.__defaults__, fn.__closure__) - new_forward.__dict__.update(fn.__dict__) - new_forward.__signature__ = inspect.signature(original_class.forward) - if is_debug(): - new_forward = debuggable_forward(new_forward) - new_class.forward = new_forward + new_class.forward = _get_nncf_forward_function_with_signature(inspect.signature(original_class.forward)) + + # In case of overriding forward by code like `model.forward = wrapper(model.forward)` + forward_inst_attr_fn = original_model.__dict__.get("forward") + if forward_inst_attr_fn is not None: + new_inst_forward = _get_nncf_forward_function_with_signature(inspect.signature(forward_inst_attr_fn)) + original_model.__dict__["forward"] = functools.partial(new_inst_forward, original_model) # Make resulting class keep __module__ attributes of the original class, # otherwise these will point to NNCF @@ -850,7 +867,7 @@ def __hash__(cls): if len(cls.__bases__) == 2: original_class = cls.__bases__[1] return hash(original_class) - return id(NNCFNetwork) # conforms to a default hashing behaviour in Python for cls objects + return id(NNCFNetwork) # conforms to a default hashing behavior in Python for cls objects def __eq__(cls, other): """ @@ -865,6 +882,21 @@ def __eq__(cls, other): return other is NNCFNetwork +def _get_nncf_forward_function_with_signature(signature: inspect.Signature): + """ + Create forward function with copy signature of forward function. + :param signature: Signature of function that will used for forward function. + :return: New copy of function NNCFNetwork.forward with specified signature. + """ + fn = NNCFNetwork.forward + new_forward = types.FunctionType(fn.__code__, fn.__globals__, fn.__name__, fn.__defaults__, fn.__closure__) + new_forward.__dict__.update(fn.__dict__) + new_forward.__signature__ = signature + if is_debug(): + new_forward = debuggable_forward(new_forward) + return new_forward + + class NNCFNetwork(torch.nn.Module, metaclass=NNCFNetworkMeta): """ A mixin-like class to dynamically extend the original model object's class with. @@ -901,8 +933,15 @@ def forward(self, *args, **kwargs): args, kwargs = self.nncf._wrap_inputs_fn(args, kwargs) # For purposes of scope tracking, need the original forward call to occur as if it were - # a module call of the correponding object. - if self.nncf._bound_original_forward is None: + # a module call of the corresponding object. + if self.nncf._original_instance_forward is not None: + + def _unbound_like_original_instance_forward(_self, *args, **kwargs): + return self.nncf._original_instance_forward(*args, **kwargs) + + retval = wrap_module_call(_unbound_like_original_instance_forward)(self, *args, **kwargs) + + elif self.nncf._bound_original_forward is None: retval = wrap_module_call(self.nncf._original_unbound_forward)(self, *args, **kwargs) else: @@ -927,34 +966,13 @@ def nncf(self) -> NNCFNetworkInterface: # self._nncf is being set in the creation function defined in the NNCFNetworkMeta metaclass return self._nncf - def __getattr__(self, key): - """ - Only defined for purposes of deprecation warnings. This method should be removed after v2.5.0. - """ - try: - return super().__getattr__(key) - except AttributeError as e: - if hasattr(self._nncf, key): - warning_deprecated( - "Old style of accessing NNCF-specific attributes and methods on NNCFNetwork " - "objects is deprecated. " - "Access the NNCF-specific attrs through the NNCFInterface, which is " - "set up as an `nncf` attribute on the compressed model object.\n" - "For instance, instead of `compressed_model.get_graph()` " - "you should now write `compressed_model.nncf.get_graph()`.\n" - "The old style will be removed after NNCF v2.5.0" - ) - return getattr(self._nncf, key) - raise e - def __setattr__(self, key, value): # If setting `forward`, set it on the original model. if key == "forward": nncf_logger.warning( "You are setting `forward` on an NNCF-processed model object.\n" "NNCF relies on custom-wrapping the `forward` call in order to function properly.\n" - "Arbitrary adjustments to the forward function on an NNCFNetwork object have undefined " - "behaviour.\n" + "Arbitrary adjustments to the forward function on an NNCFNetwork object have undefined behavior.\n" "If you need to replace the underlying forward function of the original model so that " "NNCF should be using that instead of the original forward function that NNCF saved " "during the compressed model creation, you can do this by calling:\n" @@ -965,15 +983,6 @@ def __setattr__(self, key, value): ) super().__setattr__(key, value) - def get_nncf_wrapped_model(self) -> "NNCFNetwork": - warning_deprecated( - "Calls to NNCFNetwork.get_nncf_wrapped_model() are deprecated and will be removed " - "in NNCF v2.6.0.\n" - "Starting from NNCF v2.5.0, the compressed model object already inherits the original " - "class of the uncompressed model and the forward signature, so the call to " - ".get_nncf_wrapped_model() may be simply omitted." - ) - return self class NNCFSkippingIter: """ @@ -1021,33 +1030,3 @@ def hook_fn( def close(self): self.hook.remove() - - -class PTModelTransformer(ModelTransformer): - def __init__(self, model: NNCFNetwork): - super().__init__(model) - self._node_to_op_address_mapping = model.nncf.get_node_to_op_address_mapping() - - def transform(self, transformation_layout: PTTransformationLayout) -> NNCFNetwork: - fns_grouped_by_points = {} # type: Dict[PTInsertionPoint, List[Tuple[Callable, TransformationPriority]]] - for transformation_command in transformation_layout.transformations: # type: PTInsertionCommand - target_point = transformation_command.target_point # type: PTTargetPoint - target_node_name = target_point.target_node_name - pt_ip = PTInsertionPoint( - target_type=target_point.target_type, - op_address=self._node_to_op_address_mapping[target_node_name], - input_port_id=target_point.input_port_id, - ) - fn = transformation_command.fn - if target_point.type is TargetType.OPERATION_WITH_WEIGHTS: - fn = UpdateWeight(fn) - tup = (fn, transformation_command.priority) - if pt_ip not in fns_grouped_by_points: - fns_grouped_by_points[pt_ip] = [tup] - else: - fns_grouped_by_points[pt_ip].append(tup) - - for pt_ip, fn_list_with_priority in fns_grouped_by_points.items(): - fn_list_with_priority = sorted(fn_list_with_priority, key=lambda x: x[1]) - self._model.nncf.insert_at_point(pt_ip, [x[0] for x in fn_list_with_priority]) - return self._model diff --git a/nncf/torch/pruning/base_algo.py b/nncf/torch/pruning/base_algo.py index dac4bfab299..79c7fbe9924 100644 --- a/nncf/torch/pruning/base_algo.py +++ b/nncf/torch/pruning/base_algo.py @@ -156,7 +156,7 @@ def _prune_weights(self, target_model: NNCFNetwork): all_norm_layers = target_model_graph.get_nodes_by_types(types_to_apply_mask) for node in all_norm_layers: - if node.data["output_mask"] is None: + if node.attributes["output_mask"] is None: # Skip elements that will not be pruned continue diff --git a/nncf/torch/pruning/filter_pruning/algo.py b/nncf/torch/pruning/filter_pruning/algo.py index dfe3e591d7f..589583d723a 100644 --- a/nncf/torch/pruning/filter_pruning/algo.py +++ b/nncf/torch/pruning/filter_pruning/algo.py @@ -608,7 +608,7 @@ def _propagate_masks(self): for node, pruning_block, node_module in self._pruned_norms_operators: if node_module not in pruned_node_modules: # Setting masks for BN nodes - pruning_block.binary_filter_pruning_mask = node.data["output_mask"].tensor + pruning_block.binary_filter_pruning_mask = node.attributes["output_mask"].tensor pruned_node_modules.append(node_module) def prepare_for_export(self): diff --git a/nncf/torch/pruning/operations.py b/nncf/torch/pruning/operations.py index 3a09f009546..67f2fad49d3 100644 --- a/nncf/torch/pruning/operations.py +++ b/nncf/torch/pruning/operations.py @@ -80,6 +80,7 @@ from nncf.torch.graph.operator_metatypes import PTSILUMetatype from nncf.torch.graph.operator_metatypes import PTSoftmaxMetatype from nncf.torch.graph.operator_metatypes import PTSplitMetatype +from nncf.torch.graph.operator_metatypes import PTSqueezeMetatype from nncf.torch.graph.operator_metatypes import PTSubMetatype from nncf.torch.graph.operator_metatypes import PTSumMetatype from nncf.torch.graph.operator_metatypes import PTTanhMetatype @@ -170,6 +171,7 @@ class PTIdentityMaskForwardPruningOp(IdentityMaskForwardPruningOp, PTPruner): PTMaxPool2dMetatype, PTAvgPool3dMetatype, PTMaxPool3dMetatype, + PTMeanMetatype, PTDropoutMetatype, PTSILUMetatype, PTPowerMetatype, @@ -214,7 +216,7 @@ def input_prune(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph, prun_ node_module.in_channels = new_num_channels node_module.weight = torch.nn.Parameter(node_module.weight[broadcasted_mask].view(new_weight_shape)) nncf_logger.debug( - f'Pruned Convolution {node.data["key"]} by input mask. ' + f"Pruned Convolution {node.node_key} by input mask. " f"Old input filters number: {old_num_channels}, new filters number: {new_num_channels}." ) else: @@ -223,7 +225,7 @@ def input_prune(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph, prun_ @classmethod def output_prune(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph, prun_type: PrunType) -> None: - mask = node.data["output_mask"] + mask = node.attributes["output_mask"] if mask is None: return @@ -242,7 +244,7 @@ def output_prune(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph, prun node_module.bias = torch.nn.Parameter(node_module.bias[bool_mask]) nncf_logger.debug( - f'Pruned Convolution {node.data["key"]} by pruning mask. ' + f"Pruned Convolution {node.node_key} by pruning mask. " f"Old output filters number: {old_num_channels}, new filters number: {node_module.out_channels}." ) else: @@ -263,12 +265,12 @@ def input_reorder(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph): conv.weight.data = torch.index_select(conv.weight.data, 1, reorder_indexes) nncf_logger.debug( f"Reordered input channels (first 10 reorder indexes {reorder_indexes[:10]}) " - f'of Convolution: {node.data["key"]} ' + f"of Convolution: {node.node_key} " ) @classmethod def output_reorder(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph): - reorder_indexes = node.data["output_mask"] + reorder_indexes = node.attributes["output_mask"] if reorder_indexes is None: return conv = model.nncf.get_containing_module(node.node_name) @@ -278,7 +280,7 @@ def output_reorder(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph): conv.bias.data = torch.index_select(conv.bias.data, 0, reorder_indexes) nncf_logger.debug( f"Reordered output channels (first 10 reorder indexes {reorder_indexes[:10]}) " - f'of Convolution: {node.data["key"]} ' + f"of Convolution: {node.node_key} " ) @@ -304,7 +306,7 @@ def input_prune(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph, prun_ node_module.weight = torch.nn.Parameter(node_module.weight[bool_mask]) nncf_logger.debug( - f'Pruned ConvTranspose {node.data["key"]} by input mask. ' + f"Pruned ConvTranspose {node.node_key} by input mask. " f"Old input filters number: {old_num_channels}, new filters number: {node_module.in_channels}." ) else: @@ -312,7 +314,7 @@ def input_prune(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph, prun_ @classmethod def output_prune(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph, prun_type: PrunType) -> None: - output_mask = node.data["output_mask"] + output_mask = node.attributes["output_mask"] if output_mask is None: return @@ -339,7 +341,7 @@ def output_prune(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph, prun node_module.bias = torch.nn.Parameter(node_module.bias[bool_mask]) nncf_logger.debug( - f'Pruned ConvTranspose {node.data["key"]} by pruning mask. ' + f"Pruned ConvTranspose {node.node_key} by pruning mask. " f"Old output filters number: {old_num_channels}, new filters number: {node_module.out_channels}." ) else: @@ -375,7 +377,7 @@ def input_prune(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph, prun_ node_module.weight = torch.nn.Parameter(node_module.weight[broadcasted_mask].view(new_weight_shape)) nncf_logger.debug( - f'Pruned Linear {node.data["key"]} by pruning mask. ' + f"Pruned Linear {node.node_key} by pruning mask. " f"Old input filters number: {in_features}, new filters number: {node_module.in_features}." ) else: @@ -391,12 +393,12 @@ def input_reorder(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph): fc = model.nncf.get_containing_module(node.node_name) fc.weight.data = torch.index_select(fc.weight.data, 1, reorder_indexes) nncf_logger.debug( - f"Reordered input channels (first 10 reorder indexes {reorder_indexes[:10]}) of Linear: {node.data['key']}" + f"Reordered input channels (first 10 reorder indexes {reorder_indexes[:10]}) of Linear: {node.node_key}" ) @classmethod def output_prune(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph, prun_type: PrunType) -> None: - output_mask = node.data["output_mask"] + output_mask = node.attributes["output_mask"] if output_mask is None: return @@ -412,7 +414,7 @@ def output_prune(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph, prun node_module.out_features = new_out_features node_module.weight = torch.nn.Parameter(node_module.weight[bool_mask]) nncf_logger.debug( - f'Pruned Linear {node.data["key"]} by pruning mask. ' + f"Pruned Linear {node.node_key} by pruning mask. " f"Old output filters number: {out_features}, new filters number: {node_module.out_features}." ) if node_module.bias is not None: @@ -424,7 +426,7 @@ def output_prune(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph, prun @classmethod def output_reorder(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph): - reorder_indexes = node.data["output_mask"] + reorder_indexes = node.attributes["output_mask"] if reorder_indexes is None: return fc = model.nncf.get_containing_module(node.node_name) @@ -433,7 +435,7 @@ def output_reorder(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph): if fc.bias is not None: fc.bias.data = torch.index_select(fc.bias.data, 0, reorder_indexes) nncf_logger.debug( - f"Reordered output channels (first 10 reorder indexes {reorder_indexes[:10]}) of Linear: {node.data['key']}" + f"Reordered output channels (first 10 reorder indexes {reorder_indexes[:10]}) of Linear: {node.node_key}" ) @@ -465,7 +467,7 @@ def input_prune(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph, prun_ node_module.running_var = torch.nn.Parameter(node_module.running_var[bool_mask], requires_grad=False) nncf_logger.debug( - f'Pruned BatchNorm {node.data["key"]} by input mask. ' + f"Pruned BatchNorm {node.node_key} by input mask. " f"Old num features: {old_num_channels}, new num features: {new_num_channels}." ) else: @@ -495,7 +497,7 @@ def input_reorder(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph): bn.running_var.data = torch.index_select(bn.running_var.data, 0, reorder_indexes) nncf_logger.debug( - f'Reordered channels (first 10 reorder indexes {reorder_indexes[:10]}) of BatchNorm: {node.data["key"]} ' + f"Reordered channels (first 10 reorder indexes {reorder_indexes[:10]}) of BatchNorm: {node.node_key} " ) @@ -526,7 +528,7 @@ def input_prune(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph, prun_ node_module.bias = torch.nn.Parameter(node_module.bias[bool_mask]) nncf_logger.debug( - f"Pruned GroupNorm {node.data['key']} by input mask. " + f"Pruned GroupNorm {node.node_key} by input mask. " f"Old num features: {old_num_channels}, new num features: {new_num_channels}." ) else: @@ -552,7 +554,7 @@ def input_reorder(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph): nncf_logger.debug( "Reordered channels (first 10 reorder indexes {}) of LayerNorm: {} ".format( - reorder_indexes[:10], node.data["key"] + reorder_indexes[:10], node.node_key ) ) @@ -601,7 +603,7 @@ def input_prune(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph, prun_ node_module.n_channels = new_num_channels nncf_logger.debug( - f'Pruned Elementwise {node.data["key"]} by input mask. ' + f"Pruned Elementwise {node.node_key} by input mask. " f"Old num features: {old_num_channels}, new num features: {new_num_channels}." ) else: @@ -610,12 +612,12 @@ def input_prune(cls, model: NNCFNetwork, node: NNCFNode, graph: NNCFGraph, prun_ @PT_PRUNING_OPERATOR_METATYPES.register("stop_propagation_ops") class PTStopMaskForwardPruningOp(StopMaskForwardPruningOp, PTPruner): - subtypes = [PTMeanMetatype, PTMaxMetatype, PTMinMetatype, PTSumMetatype, UnknownMetatype] + subtypes = [PTMaxMetatype, PTMinMetatype, PTSumMetatype, UnknownMetatype] @PT_PRUNING_OPERATOR_METATYPES.register("reshape") class PTReshape(ReshapePruningOp, PTPruner): - subtypes = [PTReshapeMetatype] + subtypes = [PTReshapeMetatype, PTSqueezeMetatype] @PT_PRUNING_OPERATOR_METATYPES.register("concat") diff --git a/nncf/torch/pruning/utils.py b/nncf/torch/pruning/utils.py index 554154f543d..20ecccdc44a 100644 --- a/nncf/torch/pruning/utils.py +++ b/nncf/torch/pruning/utils.py @@ -57,12 +57,12 @@ def init_output_masks_in_graph(graph: NNCFGraph, nodes: List): :param nodes: list with pruned nodes """ for node in graph.get_all_nodes(): - node.data.pop("output_mask", None) + node.attributes.pop("output_mask", None) for minfo in nodes: mask = minfo.operand.binary_filter_pruning_mask nncf_node = graph.get_node_by_id(minfo.nncf_node_id) - nncf_node.data["output_mask"] = PTNNCFTensor(mask) + nncf_node.attributes["output_mask"] = PTNNCFTensor(mask) def _calculate_output_shape(graph: NNCFGraph, node: NNCFNode) -> Tuple[int, ...]: diff --git a/nncf/torch/quantization/algo.py b/nncf/torch/quantization/algo.py index 2fb131f8f50..d5b4269bf87 100644 --- a/nncf/torch/quantization/algo.py +++ b/nncf/torch/quantization/algo.py @@ -49,6 +49,7 @@ from nncf.common.logging import nncf_logger from nncf.common.logging.logger import DuplicateFilter from nncf.common.quantization.config_assignment import assign_qconfig_lists_to_modules +from nncf.common.quantization.quantizer_propagation.structs import IgnoreReason from nncf.common.quantization.quantizer_setup import DEFAULT_QUANTIZER_CONFIG from nncf.common.quantization.quantizer_setup import MultiConfigQuantizerSetup from nncf.common.quantization.quantizer_setup import QuantizationPointId @@ -88,6 +89,8 @@ from nncf.torch.debug import DebugInterface from nncf.torch.dynamic_graph.context import TracingContext from nncf.torch.graph.graph import PTNNCFGraph +from nncf.torch.graph.operator_metatypes import UNIFICATION_PRODUCING_METATYPES +from nncf.torch.graph.operator_metatypes import PTCatMetatype from nncf.torch.graph.operator_metatypes import PTDepthwiseConv2dSubtype from nncf.torch.graph.operator_metatypes import PTModuleConv2dMetatype from nncf.torch.graph.transformations.commands import PTInsertionCommand @@ -131,8 +134,7 @@ from nncf.torch.quantization.precision_init.hawq_init import HAWQPrecisionInitParams from nncf.torch.quantization.precision_init.manual_init import ManualPrecisionInitParams from nncf.torch.quantization.schedulers import QUANTIZATION_SCHEDULERS -from nncf.torch.quantization.strip import remove_disabled_quantizers -from nncf.torch.quantization.strip import replace_quantizer_to_torch_native_module +from nncf.torch.quantization.strip import strip_quantized_model from nncf.torch.quantization.structs import NonWeightQuantizerInfo from nncf.torch.quantization.structs import WeightQuantizerInfo from nncf.torch.quantization.translator import PTTargetPointTranslator @@ -359,8 +361,12 @@ def generate_setup(self) -> SingleConfigQuantizerSetup: QuantizerPropagationSolver, # pylint: disable=cyclic-import ) + scales_unification_map = {PTCatMetatype: UNIFICATION_PRODUCING_METATYPES} + ignored_scopes_for_solver = { + name: IgnoreReason.USER_REQUESTED for name in self._ignored_scopes_per_group[QuantizerGroup.ACTIVATIONS] + } prop_graph_solver = QuantizerPropagationSolver( - activation_ignored_scopes=self._ignored_scopes_per_group[QuantizerGroup.ACTIVATIONS], + activation_ignored_scopes=ignored_scopes_for_solver, weight_ignored_scopes=self._ignored_scopes_per_group[QuantizerGroup.WEIGHTS], activation_target_scopes=self._target_scopes_per_group[QuantizerGroup.ACTIVATIONS], weight_target_scopes=self._target_scopes_per_group[QuantizerGroup.WEIGHTS], @@ -374,6 +380,7 @@ def generate_setup(self) -> SingleConfigQuantizerSetup: global_constraints=self.global_quantizer_constraints, additional_unified_scale_op_scopes=self._unified_scale_ops, quantize_outputs=self._quantize_outputs, + scales_unification_map=scales_unification_map, ) merged_ip_graph = insertion_point_graph.get_ip_graph_with_merged_hw_optimized_operations( @@ -469,6 +476,8 @@ def __init__(self, config, should_init: bool = True): algo_config = self._get_algo_specific_config_section() if self._target_device == "VPU" and "preset" in algo_config: raise RuntimeError("The VPU target device does not support presets.") + if self._target_device == "CPU_SPR": + raise RuntimeError("The CPU_SPR target device does not supported.") self._range_init_params = None self._precision_init_type = None @@ -1519,8 +1528,7 @@ def statistics(self, quickly_collected_only=False) -> NNCFStatistics: def strip_model(self, model: NNCFNetwork, do_copy: bool = False) -> NNCFNetwork: if do_copy: model = copy_model(model) - model = replace_quantizer_to_torch_native_module(model) - model = remove_disabled_quantizers(model) + model = strip_quantized_model(model) return model diff --git a/nncf/torch/quantization/default_quantization.py b/nncf/torch/quantization/default_quantization.py index 6b9979f6776..79856820dc9 100644 --- a/nncf/torch/quantization/default_quantization.py +++ b/nncf/torch/quantization/default_quantization.py @@ -10,13 +10,12 @@ # limitations under the License. from typing import Dict, List -from nncf.common.graph.operator_metatypes import UnknownMetatype from nncf.common.quantization.quantizer_propagation.structs import QuantizationTrait from nncf.torch.graph import operator_metatypes from nncf.torch.graph.operator_metatypes import OPERATORS_WITH_WEIGHTS_METATYPES from nncf.torch.graph.operator_metatypes import PTOperatorMetatype -# If there are no some metatypes it means that they are considered as QuantizationTrait.QuantizationAgnostic +# If a metatype is not in this list, then it is considered to be QuantizationTrait.NON_QUANTIZABLE. DEFAULT_PT_QUANT_TRAIT_TO_OP_DICT = { QuantizationTrait.INPUTS_QUANTIZABLE: [ @@ -55,18 +54,32 @@ operator_metatypes.PTAvgPool2dMetatype, operator_metatypes.PTAvgPool3dMetatype, ], - QuantizationTrait.NON_QUANTIZABLE: [ - operator_metatypes.PTSigmoidMetatype, - operator_metatypes.PTSoftmaxMetatype, - operator_metatypes.PTRELUMetatype, - operator_metatypes.PTDeformConv2dMetatype, - operator_metatypes.PTModuleDeformConv2dMetatype, - UnknownMetatype, - # Ticket: 108478 - operator_metatypes.PTAbsMetatype, - operator_metatypes.PTExpMetatype, - operator_metatypes.PTLogMetatype, - operator_metatypes.PTSqrtMetatype, + QuantizationTrait.QUANTIZATION_AGNOSTIC: [ + operator_metatypes.PTThresholdMetatype, + operator_metatypes.PTDropoutMetatype, + operator_metatypes.PTPadMetatype, + operator_metatypes.PTMaxMetatype, + operator_metatypes.PTMinMetatype, + operator_metatypes.PTTransposeMetatype, + operator_metatypes.PTGatherMetatype, + operator_metatypes.PTScatterMetatype, + operator_metatypes.PTReshapeMetatype, + operator_metatypes.PTSqueezeMetatype, + operator_metatypes.PTSplitMetatype, + operator_metatypes.PTExpandMetatype, + operator_metatypes.PTMaxPool1dMetatype, + operator_metatypes.PTMaxPool2dMetatype, + operator_metatypes.PTMaxPool3dMetatype, + operator_metatypes.PTMaxUnpool1dMetatype, + operator_metatypes.PTMaxUnpool2dMetatype, + operator_metatypes.PTMaxUnpool3dMetatype, + operator_metatypes.PTRepeatMetatype, + operator_metatypes.PTNoopMetatype, + # PTRELUMetatype is not considered to be QUANTIZATION_AGNOSTIC, because: + # 1. Runtime doesn't provide performance benefits by quantizing the stand-alone RELU's (ticket: 59548) + # 2. It's frequently better for the end accuracy to have quantizers set up after the RELU + # so that the input distribution to the quantizer is non-negative + # and we can therefore have better quantization resolution while preserving the original dynamic range ], QuantizationTrait.CONCAT: [operator_metatypes.PTCatMetatype], QuantizationTrait.OUTPUT_QUANTIZATION_AS_WEIGHTS: [ diff --git a/nncf/torch/quantization/extensions.py b/nncf/torch/quantization/extensions.py index f86b4e117a1..1ade5a28225 100644 --- a/nncf/torch/quantization/extensions.py +++ b/nncf/torch/quantization/extensions.py @@ -19,6 +19,7 @@ from nncf.torch.extensions import EXTENSIONS from nncf.torch.extensions import CudaNotAvailableStub from nncf.torch.extensions import ExtensionLoader +from nncf.torch.extensions import ExtensionLoaderTimeoutException from nncf.torch.extensions import ExtensionNamespace from nncf.torch.extensions import ExtensionsType from nncf.torch.quantization.reference import ReferenceQuantizedFunctions @@ -60,6 +61,8 @@ def load(cls): build_directory=cls.get_build_dir(), verbose=False, ) + except ExtensionLoaderTimeoutException as e: + raise e except Exception as e: # pylint:disable=broad-except nncf_logger.warning( f"Could not compile CPU quantization extensions. " @@ -87,6 +90,8 @@ def load(cls): build_directory=cls.get_build_dir(), verbose=False, ) + except ExtensionLoaderTimeoutException as e: + raise e except (subprocess.CalledProcessError, OSError, RuntimeError) as e: assert torch.cuda.is_available() raise RuntimeError( diff --git a/nncf/torch/quantization/ignored_patterns.py b/nncf/torch/quantization/ignored_patterns.py index ba18a3e5af5..d4816cc482e 100644 --- a/nncf/torch/quantization/ignored_patterns.py +++ b/nncf/torch/quantization/ignored_patterns.py @@ -15,35 +15,76 @@ PT_IGNORED_PATTERNS = Registry("IGNORED_PATTERNS") -@PT_IGNORED_PATTERNS.register(IgnoredPatternNames.SOFTMAX_MATMUL) -def create_softmax_matmul() -> GraphPattern: - matmul_aliases = ["linear", "addmm", "matmul", "bmm", "mm", "baddbmm"] - pattern = GraphPattern() +def _add_softmax_matmul( + pattern: GraphPattern, matmul_aliases, reshape_squeeze_aliases, gather_aliases, transpose_aliases +) -> None: + # SOFTMAX RESHAPE||TRANSPOSE||GATHER||SQUEEZE + # \ / + # \ / + # \ / + # \ / + # \ / + # MATMUL + branch_matmul_nodes = reshape_squeeze_aliases + gather_aliases + transpose_aliases softmax = pattern.add_node(**{GraphPattern.LABEL_ATTR: "SOFTMAX", GraphPattern.METATYPE_ATTR: "softmax"}) matmul = pattern.add_node(**{GraphPattern.LABEL_ATTR: "MATMUL", GraphPattern.METATYPE_ATTR: matmul_aliases}) - non_pattern_node = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "ANY", GraphPattern.METATYPE_ATTR: GraphPattern.NON_PATTERN_NODE_TYPE} + matmul_branch_nodes = pattern.add_node( + **{GraphPattern.LABEL_ATTR: "NON_PATTERN", GraphPattern.METATYPE_ATTR: branch_matmul_nodes} ) pattern.add_edge(softmax, matmul) - pattern.add_edge(non_pattern_node, matmul) - return pattern + pattern.add_edge(matmul_branch_nodes, matmul) -@PT_IGNORED_PATTERNS.register(IgnoredPatternNames.SOFTMAX_RESHAPE_MATMUL) -def create_softmax_reshape_matmul() -> GraphPattern: - matmul_aliases = ["linear", "addmm", "matmul", "bmm", "mm", "baddbmm"] - pattern = GraphPattern() +def _add_softmax_reshape_matmul( + pattern: GraphPattern, matmul_aliases, reshape_squeeze_aliases, gather_aliases, transpose_aliases +) -> None: + # SOFTMAX + # \ + # \ + # \ + # RESHAPE RESHAPE||TRANSPOSE||GATHER||SQUEEZE + # \ / + # \ / + # \ / + # \ / + # \ / + # \ / + # MATMUL + branch_matmul_nodes = reshape_squeeze_aliases + gather_aliases + transpose_aliases softmax = pattern.add_node(**{GraphPattern.LABEL_ATTR: "SOFTMAX", GraphPattern.METATYPE_ATTR: "softmax"}) - reshape = pattern.add_node(**{GraphPattern.LABEL_ATTR: "RESHAPE", GraphPattern.METATYPE_ATTR: "reshape"}) - matmul = pattern.add_node(**{GraphPattern.LABEL_ATTR: "MATMUL", GraphPattern.METATYPE_ATTR: matmul_aliases}) - non_pattern_node_1 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "NON_PATTERN_1", GraphPattern.METATYPE_ATTR: GraphPattern.NON_PATTERN_NODE_TYPE} + reshape = pattern.add_node( + **{GraphPattern.LABEL_ATTR: "RESHAPE", GraphPattern.METATYPE_ATTR: reshape_squeeze_aliases} ) - non_pattern_node_2 = pattern.add_node( - **{GraphPattern.LABEL_ATTR: "NON_PATTERN_2", GraphPattern.METATYPE_ATTR: GraphPattern.NON_PATTERN_NODE_TYPE} + matmul = pattern.add_node(**{GraphPattern.LABEL_ATTR: "MATMUL", GraphPattern.METATYPE_ATTR: matmul_aliases}) + matmul_branch_nodes = pattern.add_node( + **{GraphPattern.LABEL_ATTR: "RESHAPE||TRANSPOSE||GATHER", GraphPattern.METATYPE_ATTR: branch_matmul_nodes} ) pattern.add_edge(softmax, reshape) - pattern.add_edge(non_pattern_node_1, reshape) pattern.add_edge(reshape, matmul) - pattern.add_edge(non_pattern_node_2, matmul) + pattern.add_edge(matmul_branch_nodes, matmul) + return pattern + + +@PT_IGNORED_PATTERNS.register(IgnoredPatternNames.MULTIHEAD_ATTENTION_OUTPUT) +def create_multihead_attention_output() -> GraphPattern: + matmul_aliases = ["linear", "addmm", "matmul", "bmm", "mm", "baddbmm"] + reshape_squeeze_aliases = ["reshape", "view", "flatten", "squeeze", "unsqueeze", "squeeze", "flatten", "unsqueeze"] + gather_aliases = ["gather", "index_select", "where", "index_select", "__getitem__"] + transpose_aliases = ["transpose", "permute", "transpose_"] + + pattern = GraphPattern() + _add_softmax_matmul( + pattern, + matmul_aliases=matmul_aliases, + reshape_squeeze_aliases=reshape_squeeze_aliases, + gather_aliases=gather_aliases, + transpose_aliases=transpose_aliases, + ) + _add_softmax_reshape_matmul( + pattern, + matmul_aliases=matmul_aliases, + reshape_squeeze_aliases=reshape_squeeze_aliases, + gather_aliases=gather_aliases, + transpose_aliases=transpose_aliases, + ) return pattern diff --git a/nncf/torch/quantization/init_precision.py b/nncf/torch/quantization/init_precision.py index 5bcf86709a0..fa3da5c6d3a 100644 --- a/nncf/torch/quantization/init_precision.py +++ b/nncf/torch/quantization/init_precision.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from typing import Type from nncf.torch.quantization.precision_init.autoq_init import AutoQPrecisionInitializer diff --git a/nncf/torch/quantization/layers.py b/nncf/torch/quantization/layers.py index 5ac19d104ac..4a588ddaa8d 100644 --- a/nncf/torch/quantization/layers.py +++ b/nncf/torch/quantization/layers.py @@ -19,7 +19,6 @@ import numpy as np import torch -from pkg_resources import parse_version from torch import distributed from torch import nn @@ -331,7 +330,6 @@ def close(self): self.hook.remove() self.load_listener = LoadStateListener(self) - self._old_level_range_setting = False @property def level_low(self) -> int: @@ -379,8 +377,6 @@ def forward(self, x): # TODO: refactor to get rid of extra if's and calls on each forward if not self.is_enabled_quantization(): return x - if self._old_level_range_setting: - self.set_levels() is_exporting = is_tracing_state() if is_exporting: with no_nncf_trace(): @@ -518,7 +514,13 @@ def run_export_quantization(self, x: torch.Tensor): with torch.no_grad(): if self._export_mode == QuantizerExportMode.FAKE_QUANTIZE: x, levels, input_low, input_high = self._prepare_fq_export_quantization(x) - return ExportQuantizeToFakeQuantize.apply(x, levels, input_low, input_high, input_low, input_high) + q_min, q_max, scale, zero_point = self.get_parameters_for_torch_fq() + + ch_axis = np.argmax(self.scale_shape) if self.per_channel else None + + return ExportQuantizeToFakeQuantize.apply( + x, levels, input_low, input_high, input_low, input_high, scale, zero_point, q_min, q_max, ch_axis + ) if self._export_mode == QuantizerExportMode.ONNX_QUANTIZE_DEQUANTIZE_PAIRS: x, y_scale, y_zero_point, axis = self._prepare_qdq_export_quantization(x) return ExportQuantizeToONNXQuantDequant.apply(x, y_scale, y_zero_point, axis) @@ -658,11 +660,8 @@ def __init__(self, qspec: PTQuantizerSpec): ) ) - if parse_version(torch.__version__) >= parse_version("1.12"): - # Values of level_low, level_high must be recalculated for load new signed parameter. - self.register_load_state_dict_post_hook(lambda module, _: module.set_levels()) - else: - self._old_level_range_setting = True + # Values of level_low, level_high must be recalculated for load new signed parameter. + self.register_load_state_dict_post_hook(lambda module, _: module.set_levels()) @property def scale(self): @@ -777,7 +776,7 @@ def get_parameters_for_torch_fq(self) -> Tuple[int, int, torch.Tensor, torch.Ten scale - Quantizer scale. zero_point - Quantizer zero point. """ - with torch.no_grad(): + with torch.no_grad(), no_jit_trace(): input_low, input_high = self._get_input_low_input_high( self.scale, self.level_low, self.level_high, self.eps ) @@ -970,7 +969,7 @@ def get_parameters_for_torch_fq(self) -> Tuple[int, int, torch.Tensor, torch.Ten scale - Quantizer scale. zero_point - Quantizer zero point. """ - with torch.no_grad(): + with torch.no_grad(), no_jit_trace(): input_low, input_high = self._get_input_low_input_high( self.input_range, self.input_low, self.levels, self.eps ) diff --git a/nncf/torch/quantization/metrics.py b/nncf/torch/quantization/metrics.py index 49362ac2cc8..3b4960a28aa 100644 --- a/nncf/torch/quantization/metrics.py +++ b/nncf/torch/quantization/metrics.py @@ -20,6 +20,7 @@ from nncf.common.collector import StatisticsCollector from nncf.common.graph import NNCFGraph +from nncf.common.graph.graph import NNCFNode from nncf.common.graph.graph_matching import find_subgraphs_matching_pattern from nncf.common.graph.patterns.manager import PatternsManager from nncf.common.graph.patterns.manager import TargetDevice @@ -178,7 +179,7 @@ def collect(self) -> MemoryConsumptionStatistics: shape = original_nx_graph.edges[u, v][NNCFGraph.ACTIVATION_SHAPE_EDGE_ATTR] num_bits = self._get_precision_for_activation_tensor(u, v, original_nx_graph) original_nx_graph.edges[u, v]["precision"] = num_bits - u_node_name = original_nx_graph.nodes[u][NNCFGraph.NODE_NAME_ATTR] + u_node_name = original_nx_graph.nodes[u][NNCFNode.NODE_NAME_ATTR] memory_consumption_fp_model[u_node_name] = np.prod(shape) * fp_num_bits memory_consumption_compressed_model[u_node_name] = np.prod(shape) * num_bits try: @@ -195,7 +196,7 @@ def _get_precision_for_activation_tensor(self, u_node: str, v_node: str, origina precision_enter_activation_tensor = max( [0] + [original_nx_graph.edges[pred_u_node, u_node]["precision"] for pred_u_node in pred_u_nodes] ) - u_node_name = original_nx_graph.nodes[u_node][NNCFGraph.NODE_NAME_ATTR] + u_node_name = original_nx_graph.nodes[u_node][NNCFNode.NODE_NAME_ATTR] module = self._compressed_model.nncf.get_containing_module(u_node_name) if is_nncf_module(module): quantizer = self._get_weight_quantizer_for_module(module) @@ -268,7 +269,7 @@ def collect(self) -> QuantizationConfigurationStatistics: node = merged_original_graph.nodes[node_key] if node[self.IS_MERGED_GRAPH_ATTR]: last_node = node[self.NODES_GRAPH_ATTR][-1] - node_name = str(last_node[NNCFGraph.NODE_NAME_ATTR]) + node_name = str(last_node[NNCFNode.NODE_NAME_ATTR]) matched = False for aq_info in self._qctrl.non_weight_quantizers.values(): for target_point in aq_info.affected_insertions: @@ -280,7 +281,7 @@ def collect(self) -> QuantizationConfigurationStatistics: else: self._marking_edges(merged_original_graph, node_key, queue, False) else: - node_name = str(node[NNCFGraph.NODE_NAME_ATTR]) + node_name = str(node[NNCFNode.NODE_NAME_ATTR]) matched = False for aq_key in self._compressed_model.nncf.external_quantizers.keys(): @@ -291,7 +292,7 @@ def collect(self) -> QuantizationConfigurationStatistics: self._marking_edges(merged_original_graph, node_key, queue) else: is_op_non_change_precision_activation_tensor = True - node_metatype = node[NNCFGraph.METATYPE_ATTR] + node_metatype = node[NNCFNode.METATYPE_ATTR] is_op_non_change_precision_activation_tensor = ( node_metatype not in DEFAULT_PT_QUANT_TRAIT_TO_OP_DICT[QuantizationTrait.INPUTS_QUANTIZABLE] ) @@ -358,7 +359,7 @@ def get_merged_original_graph_with_patterns(self, original_graph: PTNNCFGraph): merged_nodes.append(original_graph._nx_graph.nodes[node_key]) merged_graph.remove_node(node_key) merged_node_attrs = { - PTNNCFGraph.KEY_NODE_ATTR: merged_node_key, + NNCFNode.KEY_NODE_ATTR: merged_node_key, self.NODES_GRAPH_ATTR: merged_nodes, self.IS_MERGED_GRAPH_ATTR: True, } diff --git a/nncf/torch/quantization/precision_init/adjacent_quantizers.py b/nncf/torch/quantization/precision_init/adjacent_quantizers.py index 4b2a6ed3927..6a08cf35fd8 100644 --- a/nncf/torch/quantization/precision_init/adjacent_quantizers.py +++ b/nncf/torch/quantization/precision_init/adjacent_quantizers.py @@ -92,7 +92,7 @@ def parse_from_quantizer_setup( for weight_quantized_module_node_name, w_qp_id in module_scope_per_weight_qp_id.items(): if weight_quantized_module_node_name not in quantized_node_per_activation_qp_id: nncf_logger.debug( - f"Module {weight_quantized_module_node_name} has quantized weights" f" and no quantized inputs!" + f"Module {weight_quantized_module_node_name} has quantized weights and no quantized inputs!" ) continue a_qp_id = quantized_node_per_activation_qp_id[weight_quantized_module_node_name] diff --git a/nncf/torch/quantization/precision_init/base_init.py b/nncf/torch/quantization/precision_init/base_init.py index 34f81c39525..4009ab9f4dc 100644 --- a/nncf/torch/quantization/precision_init/base_init.py +++ b/nncf/torch/quantization/precision_init/base_init.py @@ -1,15 +1,13 @@ -""" - Copyright (c) 2020-2023 Intel Corporation - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. from collections import OrderedDict from copy import deepcopy diff --git a/nncf/torch/quantization/precision_init/bitwidth_graph.py b/nncf/torch/quantization/precision_init/bitwidth_graph.py index f776ec6a32b..3561557872e 100644 --- a/nncf/torch/quantization/precision_init/bitwidth_graph.py +++ b/nncf/torch/quantization/precision_init/bitwidth_graph.py @@ -1,21 +1,21 @@ -""" - Copyright (c) 2020-2023 Intel Corporation - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from collections import defaultdict from typing import Dict import networkx as nx from nncf.common.graph import NNCFGraph +from nncf.common.graph.graph import NNCFNode from nncf.common.logging import nncf_logger from nncf.common.quantization.structs import NonWeightQuantizerId from nncf.torch.layers import NNCFConv2d @@ -47,8 +47,7 @@ def __init__( flops_vs_node_group[idx] = (flops, node_set) grouped_mode = bool(groups_of_adjacent_quantizers) - for node_key in nncf_graph.get_all_node_keys(): - node = nncf_graph.get_node_by_key(node_key) + for node_key, node in nncf_graph.nodes.items(): color = "" operator_name = node.node_type module = model.nncf.get_containing_module(node.node_name) @@ -162,7 +161,7 @@ def _paint_activation_quantizer_node( bitwidth = quantizer_info.quantizer_module_ref.num_bits activation_fq_node["color"] = bitwidth_color_map[bitwidth] activation_fq_node["style"] = "filled" - node_id = activation_fq_node[NNCFGraph.ID_NODE_ATTR] + node_id = activation_fq_node[NNCFNode.ID_NODE_ATTR] activation_fq_node["label"] = "AFQ_[{}]_#{}".format( quantizer_info.quantizer_module_ref.get_quantizer_config(), str(node_id) diff --git a/nncf/torch/quantization/precision_init/hawq_debug.py b/nncf/torch/quantization/precision_init/hawq_debug.py index c5069e9f752..b2165e09e5d 100644 --- a/nncf/torch/quantization/precision_init/hawq_debug.py +++ b/nncf/torch/quantization/precision_init/hawq_debug.py @@ -1,15 +1,14 @@ -""" - Copyright (c) 2020-2023 Intel Corporation - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import os from collections import OrderedDict from pathlib import Path @@ -199,13 +198,16 @@ def dump_perturbations_ratio(self): perturb.append(perturbations_for_all_observed_qconfig_sequence_in_current_layer[max_bitwidth_qconfig]) max_bitwidths.append(max_bitwidth_qconfig.num_bits) ax.plot( - [p / m / n for p, m, n in zip(perturb, self._num_weights_per_layer, self._norm_weights_per_layer)], + [ + (p / m / n).cpu().numpy() + for p, m, n in zip(perturb, self._num_weights_per_layer, self._norm_weights_per_layer) + ], label="normalized n-bit noise", ) - ax.plot(perturb, label="n-bit noise") + ax.plot([x.cpu().numpy() for x in perturb], label="n-bit noise") ax.plot(max_bitwidths, label="n") ax.plot(self._traces_per_layer.cpu().numpy(), label="trace") - ax.plot([n * p for n, p in zip(self._traces_per_layer, perturb)], label="trace * noise") + ax.plot([(n * p).cpu().numpy() for n, p in zip(self._traces_per_layer, perturb)], label="trace * noise") ax.legend() plt.savefig(os.path.join(self._dump_dir, "Quantization_noise_vs_Average_Trace")) diff --git a/nncf/torch/quantization/quantize_functions.py b/nncf/torch/quantization/quantize_functions.py index 88abbb14a45..a82721b4c59 100644 --- a/nncf/torch/quantization/quantize_functions.py +++ b/nncf/torch/quantization/quantize_functions.py @@ -136,7 +136,9 @@ def _quantize_autograd_to_range(input_, input_low, input_high, levels): # pylint:disable=abstract-method class ExportQuantizeToFakeQuantize(torch.autograd.Function): @staticmethod - def symbolic(g, input_, levels, input_low, input_high, output_low, output_high): + def symbolic( + g, input_, levels, input_low, input_high, output_low, output_high, scale, zero_point, q_min, q_max, ch_axis + ): output = g.op( add_domain("FakeQuantize"), input_, input_low, input_high, output_low, output_high, levels_i=levels ) @@ -145,8 +147,12 @@ def symbolic(g, input_, levels, input_low, input_high, output_low, output_high): return output @staticmethod - def forward(ctx, input_, levels, input_low, input_high, output_low, output_high): - return torch.clone(input_) + def forward( + ctx, input_, levels, input_low, input_high, output_low, output_high, scale, zero_point, q_min, q_max, ch_axis + ): + if ch_axis is not None: + return torch.fake_quantize_per_channel_affine(input_, scale, zero_point, ch_axis, q_min, q_max) + return torch.fake_quantize_per_tensor_affine(input_, scale, zero_point, q_min, q_max) @staticmethod def backward(ctx: Any, *grad_outputs: Any) -> Any: @@ -222,7 +228,7 @@ def forward(ctx, input_low, input_range, levels): input_low_copy[input_low_copy > 0] = 0 input_high[input_high < 0] = 0 n = levels - 1 - # Need a cast here because fp16 division yileds fp32 results sometimes + # Need a cast here because fp16 division yields fp32 results sometimes scale = (levels / (input_high - input_low_copy)).to(dtype=input_high.dtype) zp = torch.round(-input_low_copy * scale) diff --git a/nncf/torch/quantization/quantize_model.py b/nncf/torch/quantization/quantize_model.py index 2c41e26c24a..3b0c82406e5 100644 --- a/nncf/torch/quantization/quantize_model.py +++ b/nncf/torch/quantization/quantize_model.py @@ -22,7 +22,7 @@ from nncf.parameters import ModelType from nncf.parameters import TargetDevice from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters -from nncf.quantization.advanced_parameters import convert_advanced_parameters_to_dict +from nncf.quantization.advanced_parameters import apply_advanced_parameters_to_config from nncf.scopes import IgnoredScope from nncf.scopes import convert_ignored_scope_to_list from nncf.torch.dynamic_graph.context import no_nncf_trace @@ -32,6 +32,8 @@ from nncf.torch.initialization import PTInitializingDataLoader from nncf.torch.model_creation import create_compressed_model from nncf.torch.nested_objects_traversal import objwalk +from nncf.torch.nncf_module_replacement import replace_modules_by_nncf_modules +from nncf.torch.quantization.weights_compression import insert_pre_compression_operations from nncf.torch.utils import get_model_device from nncf.torch.utils import is_tensor @@ -39,7 +41,7 @@ # TODO(alexsu52): It is a workaround and should be removed. -class CalibrarionDataLoader(PTInitializingDataLoader): +class CalibrationDataLoader(PTInitializingDataLoader): """ This class wraps the nncf.Dataset. @@ -61,7 +63,7 @@ def __iter__(self): def __len__(self): if self._length is None: data = self._data_loader.get_inference_data() - self._length = CalibrarionDataLoader._get_length(data) + self._length = CalibrationDataLoader._get_length(data) return self._length def get_inputs(self, dataloader_output: Any) -> Tuple[Tuple, Dict]: @@ -169,18 +171,13 @@ def _create_nncf_config( compression_config["ignored_scopes"].extend(_ignored_scope) else: compression_config["ignored_scopes"] = _ignored_scope + compression_config["validate_scopes"] = ignored_scope.validate if advanced_parameters is not None: - advanced_config = convert_advanced_parameters_to_dict(advanced_parameters) + compression_config = apply_advanced_parameters_to_config(compression_config, advanced_parameters) - ranges = advanced_config.get("initializer", {}).get("range") - if ranges is not None: - for rconfig in ranges: - rconfig["num_init_samples"] = subset_size - if "type" not in rconfig: - rconfig["type"] = DEFAULT_RANGE_TYPE - - compression_config.update(advanced_config) + if model_type == ModelType.TRANSFORMER: + compression_config["validate_scopes"] = False return NNCFConfig({"target_device": target_device.value, "compression": compression_config}) @@ -214,7 +211,7 @@ def quantize_impl( preset, target_device, subset_size, model_type, ignored_scope, advanced_parameters ) - calibration_data_loader = CalibrarionDataLoader(calibration_dataset) + calibration_data_loader = CalibrationDataLoader(calibration_dataset) nncf_config.register_extra_structs( [ QuantizationRangeInitArgs(data_loader=calibration_data_loader), @@ -261,3 +258,13 @@ def send_to_device(tensor): compressed_model.nncf.disable_dynamic_graph_building() return compressed_model + + +def compress_weights_impl(model: torch.nn.Module) -> torch.nn.Module: + """ + Implementation of the `compress_weights()` method for the PyTorch backend. + """ + compressed_model, _ = replace_modules_by_nncf_modules(model) + insert_pre_compression_operations(model) + + return compressed_model diff --git a/nncf/torch/quantization/reference.py b/nncf/torch/quantization/reference.py index 0a36747c6e2..b93b1617a76 100644 --- a/nncf/torch/quantization/reference.py +++ b/nncf/torch/quantization/reference.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from enum import Enum from typing import List, Tuple, TypeVar @@ -78,7 +89,7 @@ def tune_range( input_high[input_high < 0] = 0 n = levels - 1 scale = levels / (input_high - input_low) - scale = scale.astype(dtype=input_high.dtype) + scale = self._astype(scale, input_high.dtype) zp = self.backend.round(-input_low * scale) new_input_low = self.backend.where(zp < n, zp / (zp - n) * input_high, input_low) @@ -87,7 +98,7 @@ def tune_range( range_1 = input_high - new_input_low range_2 = new_input_high - input_low - mask = (range_1 > range_2).astype(input_high.dtype) + mask = self._astype((range_1 > range_2), input_high.dtype) inv_mask = abs(1 - mask) new_input_low = mask * new_input_low + inv_mask * input_low diff --git a/nncf/torch/quantization/strip.py b/nncf/torch/quantization/strip.py index f4b9129709d..e5412b45acf 100644 --- a/nncf/torch/quantization/strip.py +++ b/nncf/torch/quantization/strip.py @@ -19,18 +19,22 @@ from nncf.torch.quantization.layers import BaseQuantizer from nncf.torch.quantization.layers import SymmetricQuantizer +SUPPORTED_NUM_BITS_FOR_STRIP_MODEL = [8] + def replace_quantizer_to_torch_native_module(model: NNCFNetwork) -> NNCFNetwork: """ Replace NNCF quantizer modules to PyTorch FakeQuantizer module and remove unused quantizer operators. :param model: Target model. - :return: The modified NNCF network. """ - for key in model.nncf.external_quantizers.keys(): - if model.nncf.external_quantizers[key].is_enabled_quantization(): - model.nncf.external_quantizers[key] = convert_to_torch_fakequantizer(model.nncf.external_quantizers[key]) + if hasattr(model.nncf, "external_quantizers"): + for key in model.nncf.external_quantizers.keys(): + if model.nncf.external_quantizers[key].is_enabled_quantization(): + model.nncf.external_quantizers[key] = convert_to_torch_fakequantizer( + model.nncf.external_quantizers[key] + ) for node in model.nncf.get_original_graph().get_all_nodes(): if node.node_type in ["nncf_model_input", "nncf_model_output"]: @@ -67,13 +71,17 @@ def convert_to_torch_fakequantizer(nncf_quantizer: BaseQuantizer) -> FakeQuantiz Convert BaseQuantizer module to FakeQuantize. :param quantizer: NNCF Quantizer module. - :return: Instance of FakeQuantize similar to the input quantizer. """ # Call set_ranges in case the basic parameters impacting levels had changed nncf_quantizer.set_levels() + if nncf_quantizer.num_bits not in SUPPORTED_NUM_BITS_FOR_STRIP_MODEL: + raise RuntimeError( + "Converting nncf quantizer module to torch native only supports " + f"for num_bits in {SUPPORTED_NUM_BITS_FOR_STRIP_MODEL}." + ) per_channel = nncf_quantizer.per_channel scale_shape = nncf_quantizer.scale_shape ch_axis = int(np.argmax(scale_shape)) @@ -119,10 +127,9 @@ def remove_disabled_quantizers(model: NNCFNetwork) -> NNCFNetwork: Remove all unused quantizer operators from the model. :param model: Compressed model. - :return: The modified NNCF network. """ - if hasattr(model, "external_quantizers"): + if hasattr(model.nncf, "external_quantizers"): for key in list(model.nncf.external_quantizers.keys()): op = model.nncf.external_quantizers[key] if isinstance(op, BaseQuantizer) and not op.is_enabled_quantization(): @@ -147,3 +154,16 @@ def remove_disabled_quantizers(model: NNCFNetwork) -> NNCFNetwork: nncf_module.remove_post_forward_operation(key) return model + + +def strip_quantized_model(model: NNCFNetwork): + """ + Returns the model with as much custom NNCF additions as possible removed + while still preserving the functioning of the model object as a compressed model. + + :param model: Compressed model. + :return: The modified NNCF network. + """ + model = replace_quantizer_to_torch_native_module(model) + model = remove_disabled_quantizers(model) + return model diff --git a/nncf/torch/quantization/weights_compression.py b/nncf/torch/quantization/weights_compression.py new file mode 100644 index 00000000000..9fc725fb235 --- /dev/null +++ b/nncf/torch/quantization/weights_compression.py @@ -0,0 +1,103 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Dict, List, Optional + +import torch +from torch import nn + +from nncf.torch.layers import NNCF_WRAPPED_USER_MODULES_DICT +from nncf.torch.layers import NNCFEmbedding +from nncf.torch.layers import NNCFLinear +from nncf.torch.quantization.quantize_functions import get_scale_zp_from_input_low_input_high + + +class WeightsDecompressor(nn.Module): + """Applies decompression of compressed weights in forward pass + + Attributes: + zero_point: zero point in quantization scheme + scale: scale in quantizatin scheme + """ + + def __init__(self, zero_point, scale): + super().__init__() + self.zero_point = zero_point + self.scale = scale + + def forward(self, layer, op_arg): + w = layer.weight.type(dtype=self.scale.dtype) + layer.weight = (w - self.zero_point) * self.scale + + +def _insert_pre_compression_operations( + module: nn.Module, allowed_types: List, level_high: int = 255, compression_hist: Dict = None +) -> Optional[nn.Module]: + """ + Inserts weights compression with dequantization for layers in `allowed_types`. + + :param module: The module to insert the weights compression. + :param allowed_types: list of allowed types for weights compression. + :param level_high: highest possible value of compressed weights (lower is 0 in assymetric quantization). + :param compression_hist: mapping between layer weight and corresponding WeightsDecompressor for finding + shared weights. + :return: The non-trainable module with inserted operations. + """ + if compression_hist is None: + compression_hist = {} + for _, layer in module.named_children(): + if not type(layer) in allowed_types: + _insert_pre_compression_operations(layer, allowed_types, level_high, compression_hist) + continue + + if layer.weight.dtype in [torch.uint8, torch.int8]: + if layer.weight in compression_hist: + layer.register_pre_forward_operation(compression_hist[layer.weight]) + continue + + target_dim = layer.target_weight_dim_for_compression + stat_dim = (target_dim + 1) % 2 + input_low = torch.min(layer.weight, dim=stat_dim).values.detach() + input_high = torch.max(layer.weight, dim=stat_dim).values.detach() + scale, zero_point = get_scale_zp_from_input_low_input_high(0, level_high, input_low, input_high) + + scale = scale.unsqueeze(stat_dim) + zero_point = zero_point.unsqueeze(stat_dim) + key = layer.register_pre_forward_operation(WeightsDecompressor(zero_point, scale)) + + compressed_weight = layer.weight.data / scale + zero_point + compressed_weight = torch.clamp(torch.round(compressed_weight), 0, level_high) + + layer.weight.requires_grad = False + layer.weight.data = compressed_weight.type(dtype=torch.uint8) + + compression_hist[layer.weight] = layer.get_pre_op(key) + + +def insert_pre_compression_operations(module: nn.Module, bits: int = 8) -> Optional[nn.Module]: + """ + Inserts weights compression with dequantization for Linear and Embedding layers. + + :param module: The module to insert the weights compression. + :param bits: number of bits for compression. Note: compressed weights type is + uint8 with one element per 8 bit. + :return: The non-trainable module with inserted operations. + """ + user_types = list(NNCF_WRAPPED_USER_MODULES_DICT.values()) + allowed_types = [NNCFEmbedding, NNCFLinear] + level_high = 2**bits - 1 + + assert level_high < 256 + + for user_type in user_types: + allowed_types.append(user_type) + + _insert_pre_compression_operations(module, allowed_types, level_high) diff --git a/nncf/torch/statistics/aggregator.py b/nncf/torch/statistics/aggregator.py index 4c57699cd3c..6c2c48256c6 100644 --- a/nncf/torch/statistics/aggregator.py +++ b/nncf/torch/statistics/aggregator.py @@ -15,6 +15,7 @@ import torch from nncf.common.factory import TModel +from nncf.common.graph.graph import NNCFGraph from nncf.common.graph.transformations.commands import TransformationPriority from nncf.common.graph.transformations.layout import TransformationLayout from nncf.common.tensor_statistics.aggregator import StatisticPointsContainer @@ -25,10 +26,10 @@ class PTStatisticsAggregator(StatisticsAggregator): - def collect_statistics(self, model: NNCFNetwork) -> None: + def collect_statistics(self, model: NNCFNetwork, graph: NNCFGraph) -> None: with torch.no_grad(): with model.nncf.temporary_clean_view() as intermediate_model: - super().collect_statistics(intermediate_model) + super().collect_statistics(intermediate_model, graph) def _register_statistics( self, outputs: Dict[str, PTNNCFTensor], statistic_points: StatisticPointsContainer @@ -59,7 +60,7 @@ def _get_transformation_layout_extra_outputs( @staticmethod def _get_merged_statistic_points( - statistic_points: StatisticPointsContainer, model: TModel + statistic_points: StatisticPointsContainer, model: TModel, graph: NNCFGraph ) -> StatisticPointsContainer: # TODO: mirgate to experimental statistic collector and use common merging algorithm return statistic_points diff --git a/nncf/torch/strip.py b/nncf/torch/strip.py new file mode 100644 index 00000000000..ed4959064fd --- /dev/null +++ b/nncf/torch/strip.py @@ -0,0 +1,25 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from nncf.torch.nncf_network import NNCFNetwork + + +def strip(model: NNCFNetwork, do_copy: bool = True) -> NNCFNetwork: + """ + Returns the model object with as much custom NNCF additions as possible removed + while still preserving the functioning of the model object as a compressed model. + + :param do_copy: If True (default), will return a copy of the currently associated model object. If False, + will return the currently associated model object "stripped" in-place. + :return: The stripped model. + """ + return model.nncf.strip(do_copy) diff --git a/nncf/torch/tensor_statistics/collectors.py b/nncf/torch/tensor_statistics/collectors.py index 66aa4a76e52..186bbb18393 100644 --- a/nncf/torch/tensor_statistics/collectors.py +++ b/nncf/torch/tensor_statistics/collectors.py @@ -17,6 +17,7 @@ from nncf.common.tensor import TensorElementsType from nncf.common.tensor_statistics.collectors import MeanMinMaxStatisticCollector from nncf.common.tensor_statistics.collectors import MeanPercentileStatisticCollector +from nncf.common.tensor_statistics.collectors import MeanStatisticCollector from nncf.common.tensor_statistics.collectors import MedianMADStatisticCollector from nncf.common.tensor_statistics.collectors import MinMaxStatisticCollector from nncf.common.tensor_statistics.collectors import MixedMinMaxStatisticCollector @@ -27,6 +28,7 @@ from nncf.torch.dynamic_graph.context import no_nncf_trace from nncf.torch.tensor import PTNNCFTensor from nncf.torch.tensor_statistics.reduction import expand_like +from nncf.torch.tensor_statistics.statistics import PTMeanTensorStatistic from nncf.torch.tensor_statistics.statistics import PTMedianMADTensorStatistic from nncf.torch.tensor_statistics.statistics import PTMinMaxTensorStatistic from nncf.torch.tensor_statistics.statistics import PTPercentileTensorStatistic @@ -73,6 +75,18 @@ def masked_mean(x: NNCFTensor, axis: Union[int, tuple, list], mask: NNCFTensor, def masked_median(x: NNCFTensor, axis: Union[int, tuple, list], mask: NNCFTensor, keepdims=False) -> NNCFTensor: raise NotImplementedError() + @staticmethod + def mean_per_channel(x: NNCFTensor, axis: int) -> NNCFTensor: + if len(x.shape) < 3: + return PTNNCFTensor(torch.mean(x.tensor, axis=0)) + x = torch.moveaxis(x.tensor, axis, 1) + t = x.reshape(x.shape[0], x.shape[1], -1) + return PTNNCFTensor(torch.mean(t, axis=(0, 2))) + + @staticmethod + def batch_mean(x: NNCFTensor) -> NNCFTensor: + return PTNNCFTensor(torch.mean(x.tensor, axis=0, keepdims=True)) + @staticmethod def stack(x: Union[List[NNCFTensor], Deque[NNCFTensor]], axis: int = 0) -> NNCFTensor: x = [t.tensor for t in x] @@ -96,10 +110,6 @@ def quantile( ) -> List[NNCFTensor]: raise NotImplementedError() - @staticmethod - def mean_per_channel(x: NNCFTensor, axis: int) -> NNCFTensor: - raise NotImplementedError() - @classmethod def no_outliers_map( cls, x: NNCFTensor, fn: Callable[[NNCFTensor, Optional[int]], Any], axis: int = 0, alpha: float = 0.01 @@ -235,3 +245,16 @@ def _get_statistics(self) -> PTPercentileTensorStatistic: stacked_pct_vals = torch.stack(list(val)) mean_percentile_values[pct] = stacked_pct_vals.mean(dim=0).view(self._reduction_shape) return PTPercentileTensorStatistic(mean_percentile_values) + + +class PTMeanStatisticCollector(MeanStatisticCollector): + @staticmethod + def _get_processor() -> NNCFCollectorTensorProcessor: + return PTNNCFCollectorTensorProcessor() + + def _register_input(self, x: torch.Tensor): + with no_nncf_trace(): + self._register_input_common(PTNNCFTensor(x)) + + def _get_statistics(self) -> PTMeanTensorStatistic: + return PTMeanTensorStatistic(self._mean_aggregate().tensor, self._shape()) diff --git a/nncf/torch/tensor_statistics/statistics.py b/nncf/torch/tensor_statistics/statistics.py index 325e1b27e81..7a251b19207 100644 --- a/nncf/torch/tensor_statistics/statistics.py +++ b/nncf/torch/tensor_statistics/statistics.py @@ -11,6 +11,7 @@ import torch +from nncf.common.tensor_statistics.statistics import MeanTensorStatistic from nncf.common.tensor_statistics.statistics import MedianMADTensorStatistic from nncf.common.tensor_statistics.statistics import MinMaxTensorStatistic from nncf.common.tensor_statistics.statistics import PercentileTensorStatistic @@ -35,6 +36,12 @@ def tensor_eq(tensor1: torch.Tensor, tensor2: torch.Tensor, rtol=1e-6) -> bool: return bool(torch.allclose(tensor1, tensor2, rtol=rtol)) +class PTMeanTensorStatistic(MeanTensorStatistic): + @staticmethod + def tensor_eq(tensor1: torch.Tensor, tensor2: torch.Tensor, rtol=1e-6) -> bool: + return bool(torch.allclose(tensor1, tensor2, rtol=rtol)) + + def pt_convert_stat_to_min_max_tensor_stat(statistic: TensorStatistic) -> PTMinMaxTensorStatistic: if isinstance(statistic, PTMinMaxTensorStatistic): return statistic diff --git a/nncf/torch/utils.py b/nncf/torch/utils.py index 68fe76157ae..5ac8f142594 100644 --- a/nncf/torch/utils.py +++ b/nncf/torch/utils.py @@ -132,7 +132,7 @@ def fp32_accum_wrapper(func): def wrapper(tensor_to_sum, ret_tensor): half = tensor_to_sum.dtype == np.float16 if half: - tensor_to_sum = tensor_to_sum.astype(np.float) + tensor_to_sum = tensor_to_sum.astype(np.float32) retval = func(tensor_to_sum, ret_tensor) if half: retval = retval.astype(np.float16) diff --git a/nncf/version.py b/nncf/version.py index 80a98bae145..6fa2e735ef1 100644 --- a/nncf/version.py +++ b/nncf/version.py @@ -1,5 +1,16 @@ -__version__ = "2.5.0" +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. -BKC_TORCH_VERSION = "1.13.1" -BKC_TORCHVISION_VERSION = "0.14.1" -BKC_TF_VERSION = "2.11.*" +__version__ = "2.6.0" + +BKC_TORCH_VERSION = "2.0.1" +BKC_TORCHVISION_VERSION = "0.15.1" +BKC_TF_VERSION = "2.12.*" diff --git a/setup.py b/setup.py index 274344275de..52c142f408e 100644 --- a/setup.py +++ b/setup.py @@ -101,50 +101,55 @@ def find_version(*file_paths): INSTALL_REQUIRES = [ - "ninja>=1.10.0.post2, <1.11", - "texttable>=1.6.3", - "scipy>=1.3.2, <1.11", + "jsonschema>=3.2.0", + "jstyleson>=0.0.2", + "natsort>=7.1.0", "networkx>=2.6, <=2.8.2", # see ticket 94048 or https://github.com/networkx/networkx/issues/5962 - "numpy>=1.19.1, <1.24", + "ninja>=1.10.0.post2, <1.11", + "numpy>=1.19.1, <1.25", + "openvino-telemetry>=2023.1.1", + "packaging>=20.0", + "pandas>=1.1.5,<2.1", + "psutil", + "pydot>=1.4.1", + "pymoo>=0.6.0.1", # The recent pyparsing major version update seems to break # integration with networkx - the graphs parsed from current .dot # reference files no longer match against the graphs produced in tests. # Using 2.x versions of pyparsing seems to fix the issue. # Ticket: 69520 "pyparsing<3.0", - "pymoo==0.5.0", - "jsonschema>=3.2.0", - "pydot>=1.4.1", - "jstyleson>=0.0.2", - "tqdm>=4.54.1", - "natsort>=7.1.0", - "pandas>=1.1.5,<2.1", "scikit-learn>=0.24.0", - "openvino-telemetry", + "scipy>=1.3.2, <1.11", + "texttable>=1.6.3", + "tqdm>=4.54.1", ] TF_EXTRAS = [ - "tensorflow~=2.11.1", - # The workaround of the protobuf issue and should be fixed with migration on TF 2.12 + "tensorflow~=2.12.0", + # This is required for support of TF 2.8.4 which needs protobuf<=3.19.6 "tensorflow-metadata<=1.13.0", ] TORCH_EXTRAS = [ - "torch>=1.9.1,<1.14;python_version < '3.11'", + "torch>=1.13.0,<2.1;python_version < '3.11'", ] ONNX_EXTRAS = ["onnx~=1.13.1", "onnxruntime~=1.14.1;python_version < '3.11'"] -OPENVINO_EXTRAS = ["openvino==2023.0.0"] +OPENVINO_EXTRAS = ["openvino==2023.0.1"] EXTRAS_REQUIRE = { "dev": [ + "black==23.3.0", + "isort==5.12.0", "kaleido>=0.2.1", "matplotlib>=3.3.4, <3.6", "pillow>=9.0.0", "plotly-express>=0.4.1", + "pre-commit==3.2.2", ], "tests": ["pytest"], "docs": [], diff --git a/tests/common/accuracy_control/test_calculate_drop.py b/tests/common/accuracy_control/test_calculate_drop.py new file mode 100644 index 00000000000..f67d7284739 --- /dev/null +++ b/tests/common/accuracy_control/test_calculate_drop.py @@ -0,0 +1,98 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dataclasses import dataclass + +import pytest + +from nncf.parameters import DropType +from nncf.quantization.algorithms.accuracy_control.algorithm import calculate_accuracy_drop + + +@dataclass +class TestCase: + initial_metric: float + quantized_metric: float + drop_type: DropType + expected_should_terminate: bool + expected_accuracy_drop: float + max_drop: float = 0.01 + + +@pytest.mark.parametrize( + "ts", + [ + # ABSOLUTE + TestCase( + initial_metric=0.2923, + quantized_metric=0.3185, + drop_type=DropType.ABSOLUTE, + expected_should_terminate=True, + expected_accuracy_drop=-0.0262, + ), + TestCase( + initial_metric=0.3185, + quantized_metric=0.2923, + drop_type=DropType.ABSOLUTE, + expected_should_terminate=False, + expected_accuracy_drop=0.0262, + ), + TestCase( + initial_metric=-0.2923, + quantized_metric=-0.3185, + drop_type=DropType.ABSOLUTE, + expected_should_terminate=False, + expected_accuracy_drop=0.0262, + ), + TestCase( + initial_metric=-0.3185, + quantized_metric=-0.2923, + drop_type=DropType.ABSOLUTE, + expected_should_terminate=True, + expected_accuracy_drop=-0.0262, + ), + # RELATIVE + TestCase( + initial_metric=0.2923, + quantized_metric=0.3185, + drop_type=DropType.RELATIVE, + expected_should_terminate=True, + expected_accuracy_drop=None, + ), + TestCase( + initial_metric=0.3185, + quantized_metric=0.2923, + drop_type=DropType.RELATIVE, + expected_should_terminate=False, + expected_accuracy_drop=0.08226059, + ), + TestCase( + initial_metric=-0.2923, + quantized_metric=-0.3185, + drop_type=DropType.RELATIVE, + expected_should_terminate=False, + expected_accuracy_drop=0.0896339, + ), + TestCase( + initial_metric=-0.3185, + quantized_metric=-0.2923, + drop_type=DropType.RELATIVE, + expected_should_terminate=True, + expected_accuracy_drop=None, + ), + ], +) +def test_calculate_accuracy_drop(ts: TestCase): + should_terminate, accuracy_drop = calculate_accuracy_drop( + ts.initial_metric, ts.quantized_metric, ts.max_drop, ts.drop_type + ) + assert should_terminate == ts.expected_should_terminate + assert pytest.approx(accuracy_drop) == ts.expected_accuracy_drop diff --git a/tests/common/accuracy_control/test_evaluator.py b/tests/common/accuracy_control/test_evaluator.py new file mode 100644 index 00000000000..080a24a5ab6 --- /dev/null +++ b/tests/common/accuracy_control/test_evaluator.py @@ -0,0 +1,100 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dataclasses import dataclass +from typing import List, Union + +import numpy as np +import pytest + +from nncf.data.dataset import Dataset +from nncf.quantization.algorithms.accuracy_control.evaluator import Evaluator + + +@dataclass +class TestCase: + metric_value: float + values_for_each_item: Union[None, List[float], List[List[np.ndarray]]] + expected_is_metric_mode: bool + raise_exception: bool = False + + +@pytest.mark.parametrize( + "ts", + [ + # Return: (float, None) + TestCase( + metric_value=0.1, + values_for_each_item=None, + expected_is_metric_mode=True, + ), + # Return: (float, List[float]) + TestCase( + metric_value=0.1, + values_for_each_item=[0.3], + expected_is_metric_mode=True, + ), + # Return: (float, List[List[TTensor]]) + TestCase( + metric_value=0.1, + values_for_each_item=[[np.array([1.1])]], + expected_is_metric_mode=False, + ), + # Return: (None, None) + TestCase( + metric_value=None, + values_for_each_item=None, + expected_is_metric_mode=False, + ), + # Return: (None, List[float]) + TestCase( + metric_value=None, + values_for_each_item=[0.3], + expected_is_metric_mode=None, + raise_exception=True, + ), + # Return: (None, List[List[TTensor]]) + TestCase(metric_value=None, values_for_each_item=[[np.array([1.1])]], expected_is_metric_mode=False), + # Return: (ConvertibleToFloat, List[ConvertibleToFloat]) + TestCase( + metric_value=np.array(0.1), + values_for_each_item=[np.array(0.3)], + expected_is_metric_mode=True, + ), + # Return: (ConvertibleToFloat, List[ConvertibleToFloat]) + TestCase( + metric_value=np.array([0.1]), + values_for_each_item=[np.array([0.3])], + expected_is_metric_mode=True, + ), + # Return: (NotConvertibleToFloat, None) + TestCase(metric_value=[0.1], values_for_each_item=None, expected_is_metric_mode=None, raise_exception=True), + ], +) +def test_determine_mode(ts: TestCase): + def _validation_fn(dummy_model, dummy_dataset): + return (ts.metric_value, ts.values_for_each_item) + + # pylint: disable=W0212 + if ts.raise_exception: + with pytest.raises(RuntimeError): + _ = Evaluator.determine_mode(None, Dataset([None]), _validation_fn) + else: + is_metric_mode = Evaluator.determine_mode(None, Dataset([None]), _validation_fn) + assert is_metric_mode == ts.expected_is_metric_mode + + +def test_determine_mode_2(): + def _validation_fn_with_error(dummy_model, dummy_dataset): + raise RuntimeError + + is_metric_mode = Evaluator.determine_mode(None, Dataset([None]), _validation_fn_with_error) + assert not is_metric_mode diff --git a/tests/common/accuracy_control/test_ranking.py b/tests/common/accuracy_control/test_ranking.py index d0c08dc43b4..a383f8281cc 100644 --- a/tests/common/accuracy_control/test_ranking.py +++ b/tests/common/accuracy_control/test_ranking.py @@ -14,16 +14,12 @@ import numpy as np import pytest -from nncf.data.dataset import Dataset -from nncf.quantization.algorithms.accuracy_control.algorithm import QuantizationAccuracyRestorer from nncf.quantization.algorithms.accuracy_control.rank_functions import normalized_mse -from nncf.quantization.algorithms.accuracy_control.ranker import LogitsBasedRanker -from nncf.quantization.algorithms.accuracy_control.ranker import MetricBasedRanker -from nncf.quantization.algorithms.accuracy_control.ranker import get_ranking_subset_indices +from nncf.quantization.algorithms.accuracy_control.subset_selection import get_subset_indices def create_fp32_tensor_1d(items): - return {"output": np.array(items, dtype=np.float32)} + return np.array(items, dtype=np.float32) @pytest.mark.parametrize( @@ -43,7 +39,7 @@ def create_fp32_tensor_1d(items): ], ) def test_normalized_mse(x_ref: np.ndarray, x_approx: np.ndarray, expected_nmse: float): - actual_nmse = normalized_mse(x_ref, x_approx) + actual_nmse = normalized_mse([x_ref], [x_approx]) assert np.allclose(expected_nmse, actual_nmse) @@ -78,36 +74,6 @@ def test_normalized_mse(x_ref: np.ndarray, x_approx: np.ndarray, expected_nmse: "subset_size_greater_than_num_errors", ], ) -def test_get_ranking_subset_indices(errors: List[float], subset_size: int, expected_indices: List[int]): - actual_indices = get_ranking_subset_indices(errors, subset_size) +def test_get_subset_indices(errors: List[float], subset_size: int, expected_indices: List[int]): + actual_indices = get_subset_indices(errors, subset_size) assert expected_indices == actual_indices - - -def _validation_fn_with_error(model, val_dataset) -> float: - raise RuntimeError - - -def _validation_fn(model, val_dataset) -> float: - return 0.1 - - -class DummyAccuracyControlAlgoBackend: - @staticmethod - def prepare_for_inference(model): - return model - - -def test_create_logits_ranker(): - algo_backend = DummyAccuracyControlAlgoBackend() - dataset = Dataset([0, 1, 2]) - # pylint:disable=protected-access - ranker = QuantizationAccuracyRestorer._create_ranker(None, _validation_fn_with_error, dataset, 300, algo_backend) - assert isinstance(ranker, LogitsBasedRanker) - - -def test_create_metric_ranker(): - algo_backend = DummyAccuracyControlAlgoBackend() - dataset = Dataset([0, 1, 2]) - # pylint:disable=protected-access - ranker = QuantizationAccuracyRestorer._create_ranker(None, _validation_fn, dataset, 300, algo_backend) - assert isinstance(ranker, MetricBasedRanker) diff --git a/tests/common/conftest.py b/tests/common/conftest.py new file mode 100644 index 00000000000..b9217f8077e --- /dev/null +++ b/tests/common/conftest.py @@ -0,0 +1,12 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from tests.shared.logging import nncf_caplog # pylint:disable=unused-import diff --git a/tests/common/graph/test_graph_matching.py b/tests/common/graph/test_graph_matching.py index 703575a03ba..089cd84f9dc 100644 --- a/tests/common/graph/test_graph_matching.py +++ b/tests/common/graph/test_graph_matching.py @@ -175,7 +175,7 @@ def test_matches_with_non_pattern_node_type(): ref_graph = create_graph_with_many_nodes() matches = find_subgraphs_matching_pattern(ref_graph, pattern) - assert matches == [["1", "2", "4", "3", "5", "6"]] + assert matches == [["1", "2", "3", "4", "5", "6"]] def test_matches_with_any_pattern_node_type(): @@ -202,3 +202,28 @@ def test_matches_with_any_pattern_node_type(): ref_graph = create_graph_with_many_nodes() matches = find_subgraphs_matching_pattern(ref_graph, pattern) assert matches == [["7", "1", "2", "4", "8", "3", "5", "9", "6"]] + + +def test_not_match_edges_inside_pattern(): + ref_graph = nx.DiGraph() + ref_graph.add_node("0", **{GraphPattern.METATYPE_ATTR: "0"}) + ref_graph.add_node("1", **{GraphPattern.METATYPE_ATTR: "a"}) + ref_graph.add_node("2", **{GraphPattern.METATYPE_ATTR: "b"}) + ref_graph.add_node("3", **{GraphPattern.METATYPE_ATTR: "c"}) + ref_graph.add_edge("0", "1") + ref_graph.add_edge("1", "2") + ref_graph.add_edge("2", "3") + ref_graph.add_edge("1", "3") + + pattern = GraphPattern() + node_1 = pattern.add_node(**{GraphPattern.METATYPE_ATTR: "a"}) + node_2 = pattern.add_node(**{GraphPattern.METATYPE_ATTR: "b"}) + node_3 = pattern.add_node(**{GraphPattern.METATYPE_ATTR: "c"}) + pattern.add_edge(node_1, node_2) + pattern.add_edge(node_2, node_3) + matches = find_subgraphs_matching_pattern(ref_graph, pattern) + assert not matches + + pattern.add_edge(node_1, node_3) + matches = find_subgraphs_matching_pattern(ref_graph, pattern) + assert matches == [["1", "2", "3"]] diff --git a/tests/common/graph/test_nncf_graph.py b/tests/common/graph/test_nncf_graph.py new file mode 100644 index 00000000000..ad166e1d2fe --- /dev/null +++ b/tests/common/graph/test_nncf_graph.py @@ -0,0 +1,50 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from nncf.common.graph.graph import NNCFGraph +from nncf.common.graph.layer_attributes import Dtype +from nncf.common.graph.patterns import GraphPattern + + +def test_find_matching_subgraphs(): + nncf_graph = NNCFGraph() + nodes = [] + for node_id in "abcdef": + nodes.append(nncf_graph.add_nncf_node(node_id, node_id, f"metatype_{node_id}")) + + for i in range(1, len(nodes)): + nncf_graph.add_edge_between_nncf_nodes( + from_node_id=nodes[i - 1].node_id, + to_node_id=nodes[i].node_id, + tensor_shape=[1], + input_port_id=0, + output_port_id=0, + dtype=Dtype.FLOAT, + ) + + graph_pattern = GraphPattern() + for patterns in ["ab", "def"]: + graph_part = GraphPattern() + pattern_nodes = [] + for metatype in patterns: + pattern_nodes.append(graph_part.add_node(**{GraphPattern.METATYPE_ATTR: metatype})) + for i in range(1, len(pattern_nodes)): + graph_part.add_edge(pattern_nodes[i - 1], pattern_nodes[i]) + graph_pattern.add_pattern_alternative(graph_part) + + matches = nncf_graph.find_matching_subgraphs(graph_pattern) + assert len(matches) == 2 + for match in matches: + if len(match) == 3: + assert match == nodes[3:] + continue + assert len(match) == 2 + assert match == nodes[:2] diff --git a/tests/common/graph/test_utils.py b/tests/common/graph/test_utils.py index bdeef405477..c1bc08b4db5 100644 --- a/tests/common/graph/test_utils.py +++ b/tests/common/graph/test_utils.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import pytest from nncf.common.graph.utils import get_concat_axis diff --git a/tests/common/hyperparameter_tuner/__init__.py b/tests/common/hyperparameter_tuner/__init__.py new file mode 100644 index 00000000000..9b29b47534a --- /dev/null +++ b/tests/common/hyperparameter_tuner/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/common/hyperparameter_tuner/test_hyperparameter_tuner.py b/tests/common/hyperparameter_tuner/test_hyperparameter_tuner.py new file mode 100644 index 00000000000..f057cfa18e8 --- /dev/null +++ b/tests/common/hyperparameter_tuner/test_hyperparameter_tuner.py @@ -0,0 +1,432 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy +from dataclasses import dataclass +from dataclasses import field +from typing import Any, Dict, List, Tuple + +import pytest + +from nncf.quantization.algorithms.hyperparameter_tuner.algorithm import apply_combination +from nncf.quantization.algorithms.hyperparameter_tuner.algorithm import create_combinations +from nncf.quantization.algorithms.hyperparameter_tuner.algorithm import find_best_combination + +CombinationKey = Tuple[int, ...] +Combination = Dict[str, Any] + + +# ========================================================= +# TEST: Create combinations +# ========================================================= + + +@pytest.mark.parametrize( + "param_settings,expected_combinations", + [ + # one_parameter + ( + # param_settings + {"x": [0, 1, 2]}, + # expected_combinations + { + (None,): {}, + (0,): {"x": 0}, + (1,): {"x": 1}, + (2,): {"x": 2}, + }, + ), + # two_parameters + ( + # param_settings + {"x": [0, 1, 2], "y": [True, False]}, + # expected_combinations + { + (None, None): {}, + (None, 0): {"y": True}, + (None, 1): {"y": False}, + (0, None): {"x": 0}, + (0, 0): {"x": 0, "y": True}, + (0, 1): {"x": 0, "y": False}, + (1, None): {"x": 1}, + (1, 0): {"x": 1, "y": True}, + (1, 1): {"x": 1, "y": False}, + (2, None): {"x": 2}, + (2, 0): {"x": 2, "y": True}, + (2, 1): {"x": 2, "y": False}, + }, + ), + # three_parameters + ( + # param_settings + {"x": [0, 1], "y": [True, False], "z": ["a", "b"]}, + # expected_combinations + { + (None, None, None): {}, + (None, None, 0): {"z": "a"}, + (None, None, 1): {"z": "b"}, + (None, 0, None): {"y": True}, + (None, 0, 0): {"y": True, "z": "a"}, + (None, 0, 1): {"y": True, "z": "b"}, + (None, 1, None): {"y": False}, + (None, 1, 0): {"y": False, "z": "a"}, + (None, 1, 1): {"y": False, "z": "b"}, + (0, None, None): {"x": 0}, + (0, None, 0): {"x": 0, "z": "a"}, + (0, None, 1): {"x": 0, "z": "b"}, + (0, 0, None): {"x": 0, "y": True}, + (0, 0, 0): {"x": 0, "y": True, "z": "a"}, + (0, 0, 1): {"x": 0, "y": True, "z": "b"}, + (0, 1, None): {"x": 0, "y": False}, + (0, 1, 0): {"x": 0, "y": False, "z": "a"}, + (0, 1, 1): {"x": 0, "y": False, "z": "b"}, + (1, None, None): {"x": 1}, + (1, None, 0): {"x": 1, "z": "a"}, + (1, None, 1): {"x": 1, "z": "b"}, + (1, 0, None): {"x": 1, "y": True}, + (1, 0, 0): {"x": 1, "y": True, "z": "a"}, + (1, 0, 1): {"x": 1, "y": True, "z": "b"}, + (1, 1, None): {"x": 1, "y": False}, + (1, 1, 0): {"x": 1, "y": False, "z": "a"}, + (1, 1, 1): {"x": 1, "y": False, "z": "b"}, + }, + ), + ], + ids=[ + "one_parameter", + "two_parameters", + "three_parameters", + ], +) +def test_create_combinations( + param_settings: Dict[str, List[Any]], expected_combinations: Dict[CombinationKey, Combination] +): + actual_combinations = create_combinations(param_settings) + assert expected_combinations == actual_combinations + + +# ========================================================= +# TEST: Apply combination +# ========================================================= + + +@dataclass +class ParamsA: + x: int = None + y: str = None + + +@dataclass +class ParamsB: + x: bool = None + a: ParamsA = field(default_factory=ParamsA) + + +@dataclass +class ParamsC: + x: float = None + b: ParamsB = field(default_factory=ParamsB) + + +@pytest.mark.parametrize( + "init_params,combination,expected_params", + [ + # change_one_parameter + ( + # init_params + {"x": 0, "y": True}, + # combination + {"x": -1}, + # expected_params + {"x": -1, "y": True}, + ), + # change_two_parameters + ( + # init_params + {"x": 0, "y": True, "z": "a"}, + # combination + {"x": -1, "y": False}, + # expected_params + {"x": -1, "y": False, "z": "a"}, + ), + # change_all_parameters + ( + # init_params + {"x": 0, "y": True, "z": "a"}, + # combination + {"x": -1, "y": False, "z": "b"}, + # expected_params + {"x": -1, "y": False, "z": "b"}, + ), + # change_one_subparameter_depth_1 + ( + # init_params + {"x": 0, "p": ParamsA(), "y": True}, + # combination + {"p:x": -1}, + # expected_params + {"x": 0, "p": ParamsA(x=-1), "y": True}, + ), + # change_one_parameter_and_one_subparameter_depth_1 + ( + # init_params + {"x": 0, "p": ParamsA(), "y": True}, + # combination + {"x": -1, "p:y": "a"}, + # expected_params + {"x": -1, "p": ParamsA(y="a"), "y": True}, + ), + # change_all_subparameters_depth_1 + ( + # init_params + {"x": 0, "p": ParamsA(), "y": True}, + # combination + {"p:x": -1, "p:y": "a"}, + # expected_params + {"x": 0, "p": ParamsA(x=-1, y="a"), "y": True}, + ), + # change_one_subparameter_depth_3 + ( + # init_params + {"x": 0, "p": ParamsC()}, + # combination + {"p:b:a:x": -1}, + # expected_params + {"x": 0, "p": ParamsC(b=ParamsB(a=ParamsA(x=-1)))}, + ), + # change_one_subparameter_depth_2 + ( + # init_params + {"x": 0, "p": ParamsC()}, + # combination + {"p:b:a": ParamsA(x=-1, y="a")}, + # expected_params + {"x": 0, "p": ParamsC(b=ParamsB(a=ParamsA(x=-1, y="a")))}, + ), + # change_one_parameter_and_one_subparameter_depth_3 + ( + # init_params + {"x": 0, "p": ParamsC(), "y": True}, + # combination + {"p:b:a:x": -1, "y": False}, + # expected_params + {"x": 0, "p": ParamsC(b=ParamsB(a=ParamsA(x=-1))), "y": False}, + ), + ], + ids=[ + "change_one_parameter", + "change_two_parameters", + "change_all_parameters", + "change_one_subparameter_depth_1", + "change_one_parameter_and_one_subparameter_depth_1", + "change_all_subparameters_depth_1", + "change_one_subparameter_depth_3", + "change_one_subparameter_depth_2", + "change_one_parameter_and_one_subparameter_depth_3", + ], +) +def test_apply_combination(init_params: Dict[str, Any], combination: Combination, expected_params: Dict[str, Any]): + init_params_copy = copy.deepcopy(init_params) + actual_params = apply_combination(init_params, combination) + + # Check that `init_params` was not changed + assert init_params_copy == init_params + + assert expected_params == actual_params + + +# ========================================================= +# TEST: Find best combination +# ========================================================= + + +@pytest.mark.parametrize( + "combinations,scores,param_settings,expected_combination_key", + [ + # one_parameter-scores_different + ( + # combinations + { + (None,): {}, + (0,): {"x": 0}, + (1,): {"x": 1}, + }, + # scores + { + (None,): 0.1, + (0,): 0.3, + (1,): 0.2, + }, + # param_settings + { + "x": [0, 1], + }, + # expected_combination_key + (0,), + ), + # one_parameter-scores_equal + ( + # combinations + { + (None,): {}, + (0,): {"x": 0}, + (1,): {"x": 1}, + }, + # scores + { + (None,): 0.1, + (0,): 0.1, + (1,): 0.1, + }, + # param_settings + { + "x": [0, 1], + }, + # expected_combination_key + (None,), + ), + # two_parameter-scores_different + ( + # combinations + { + (None, None): {}, + (None, 0): {"y": True}, + (None, 1): {"y": False}, + (0, None): {"x": 0}, + (0, 0): {"x": 0, "y": True}, + (0, 1): {"x": 0, "y": False}, + (1, None): {"x": 1}, + (1, 0): {"x": 1, "y": True}, + (1, 1): {"x": 1, "y": False}, + (2, None): {"x": 2}, + (2, 0): {"x": 2, "y": True}, + (2, 1): {"x": 2, "y": False}, + }, + # scores + { + (None, None): 0.1, + (0, None): 0.2, + (1, None): 0.4, + (2, None): 0.1, + (1, 0): 0.6, + (1, 1): 0.7, + }, + # param_settings + { + "x": [0, 1, 2], + "y": [True, False], + }, + # expected_combination_key + (1, 1), + ), + # two_parameters-no_best_value_for_second_parameter + ( + # combinations + { + (None, None): {}, + (None, 0): {"y": True}, + (None, 1): {"y": False}, + (0, None): {"x": 0}, + (0, 0): {"x": 0, "y": True}, + (0, 1): {"x": 0, "y": False}, + (1, None): {"x": 1}, + (1, 0): {"x": 1, "y": True}, + (1, 1): {"x": 1, "y": False}, + (2, None): {"x": 2}, + (2, 0): {"x": 2, "y": True}, + (2, 1): {"x": 2, "y": False}, + }, + # scores + { + (None, None): 0.1, + (0, None): 0.5, + (1, None): 0.2, + (2, None): 0.3, + (0, 0): 0.3, + (0, 1): 0.4, + }, + # param_settings + { + "x": [0, 1, 2], + "y": [True, False], + }, + # expected_combination_key + (0, None), + ), + # three_parameters-no_best_value_for_second_parameter + ( + # combinations + { + (None, None, None): {}, + (None, None, 0): {"z": "a"}, + (None, None, 1): {"z": "b"}, + (None, 0, None): {"y": True}, + (None, 0, 0): {"y": True, "z": "a"}, + (None, 0, 1): {"y": True, "z": "b"}, + (None, 1, None): {"y": False}, + (None, 1, 0): {"y": False, "z": "a"}, + (None, 1, 1): {"y": False, "z": "b"}, + (0, None, None): {"x": 0}, + (0, None, 0): {"x": 0, "z": "a"}, + (0, None, 1): {"x": 0, "z": "b"}, + (0, 0, None): {"x": 0, "y": True}, + (0, 0, 0): {"x": 0, "y": True, "z": "a"}, + (0, 0, 1): {"x": 0, "y": True, "z": "b"}, + (0, 1, None): {"x": 0, "y": False}, + (0, 1, 0): {"x": 0, "y": False, "z": "a"}, + (0, 1, 1): {"x": 0, "y": False, "z": "b"}, + (1, None, None): {"x": 1}, + (1, None, 0): {"x": 1, "z": "a"}, + (1, None, 1): {"x": 1, "z": "b"}, + (1, 0, None): {"x": 1, "y": True}, + (1, 0, 0): {"x": 1, "y": True, "z": "a"}, + (1, 0, 1): {"x": 1, "y": True, "z": "b"}, + (1, 1, None): {"x": 1, "y": False}, + (1, 1, 0): {"x": 1, "y": False, "z": "a"}, + (1, 1, 1): {"x": 1, "y": False, "z": "b"}, + }, + # scores + { + (None, None, None): 0.5, + (0, None, None): 0.4, + (1, None, None): 0.3, + (None, 0, None): 0.6, + (None, 1, None): 0.7, + (None, 1, 0): 0.6, + (None, 1, 1): 0.5, + }, + # param_settings + { + "x": [0, 1], + "y": [2, 3], + "z": [4, 5], + }, + # expected_combination_key + (None, 1, None), + ), + ], + ids=[ + "one_parameter-scores_different", + "one_parameter-scores_equal", + "two_parameters-scores_different", + "two_parameters-no_best_value_for_second_parameter", + "three_parameters-no_best_value_for_first_and_third_parameter", + ], +) +def test_find_best_combination( + combinations: Dict[CombinationKey, Combination], + scores: Dict[CombinationKey, float], + param_settings: Dict[str, List[Any]], + expected_combination_key: CombinationKey, +): + combination_score_func = lambda x: scores[x] + actual_combination_key = find_best_combination(combinations, combination_score_func, param_settings) + assert expected_combination_key == actual_combination_key diff --git a/tests/common/pruning/dummy_types.py b/tests/common/pruning/dummy_types.py index de4e8c73274..844d5caf64a 100644 --- a/tests/common/pruning/dummy_types.py +++ b/tests/common/pruning/dummy_types.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from typing import List from nncf.common.graph.operator_metatypes import OperatorMetatype diff --git a/tests/common/pruning/test_pruning_operations.py b/tests/common/pruning/test_pruning_operations.py index 3d41ea27da1..c4ac594f5e7 100644 --- a/tests/common/pruning/test_pruning_operations.py +++ b/tests/common/pruning/test_pruning_operations.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from functools import partial import numpy as np @@ -33,7 +44,7 @@ def test_stop_propagate_ops(pruning_op, metatype, accept_pruned_input): node = graph.add_nncf_node("conv_op", metatype.name, metatype) assert pruning_op.accept_pruned_input(node) == accept_pruned_input pruning_op.mask_propagation(node, graph, NPNNCFTensorProcessor) - assert node.data["output_mask"] is None + assert node.attributes["output_mask"] is None @pytest.mark.parametrize("dummy_op_class", [dummy_types.DummyIdentityMaskForward, dummy_types.DummyBatchNormPruningOp]) @@ -56,13 +67,13 @@ def test_identity_mask_propogation_prune_ops(dummy_op_class): # Check with and without masks for output_mask in [None, NPNNCFTensor(np.ones((10,)))]: conv_op = graph.get_node_by_id(conv_op.node_id) - conv_op.data["output_mask"] = output_mask + conv_op.attributes["output_mask"] = output_mask MaskPropagationAlgorithm( graph, dummy_types.DUMMY_PRUNING_OPERATOR_METATYPES, NPNNCFTensorProcessor ).mask_propagation() for identity_op in identity_ops: identity_op = graph.get_node_by_id(identity_op.node_id) - assert np.all(identity_op.data["output_mask"] == output_mask) + assert np.all(identity_op.attributes["output_mask"] == output_mask) @pytest.mark.parametrize("valid_masks", [None, True, False]) @@ -87,7 +98,7 @@ def test_elementwise_prune_ops(valid_masks): def set_masks(masks, ops): for conv_op, mask in zip(ops, masks): conv_op = graph.get_node_by_id(conv_op.node_id) - conv_op.data["output_mask"] = mask + conv_op.attributes["output_mask"] = mask if valid_masks is None or valid_masks: if valid_masks: @@ -96,7 +107,7 @@ def set_masks(masks, ops): graph, dummy_types.DUMMY_PRUNING_OPERATOR_METATYPES, NPNNCFTensorProcessor ).mask_propagation() elementwise_op = graph.get_node_by_id(elementwise_op.node_id) - assert np.all(elementwise_op.data["output_mask"] == masks[0]) + assert np.all(elementwise_op.attributes["output_mask"] == masks[0]) else: def check_wrong_masks(masks): @@ -137,7 +148,7 @@ def test_group_norm_pruning_ops(num_channels, num_groups, accept_pruned_input_re # Check with and without masks for output_mask in [None, NPNNCFTensor(np.ones((10,)))]: conv_op = graph.get_node_by_id(conv_op.node_id) - conv_op.data["output_mask"] = output_mask + conv_op.attributes["output_mask"] = output_mask MaskPropagationAlgorithm( graph, dummy_types.DUMMY_PRUNING_OPERATOR_METATYPES, NPNNCFTensorProcessor ).mask_propagation() @@ -145,7 +156,7 @@ def test_group_norm_pruning_ops(num_channels, num_groups, accept_pruned_input_re if not accept_pruned_input_ref: output_mask = None - assert np.all(identity_op.data["output_mask"] == output_mask) + assert np.all(identity_op.attributes["output_mask"] == output_mask) class DummyMaskProducerMetatype(dummy_types.DummyDefaultMetatype): @@ -181,6 +192,7 @@ def test_conv_pruning_ops(transpose, layer_attributes, ref_accept_pruned_input, "weight_requires_grad": True, "kernel_size": (2, 2), "stride": (1, 1), + "dilations": (1, 1), "padding_values": [0, 0], } graph = NNCFGraph() @@ -209,19 +221,19 @@ def test_conv_pruning_ops(transpose, layer_attributes, ref_accept_pruned_input, for output_mask in [None, ones_output_mask]: dummy_op_before = graph.get_node_by_id(dummy_op_before.node_id) conv_op_target = graph.get_node_by_id(conv_op_target.node_id) - dummy_op_before.data["output_mask"] = input_mask - conv_op_target.data["output_mask"] = output_mask + dummy_op_before.attributes["output_mask"] = input_mask + conv_op_target.attributes["output_mask"] = output_mask MaskPropagationAlgorithm( graph, dummy_types.DUMMY_PRUNING_OPERATOR_METATYPES, NPNNCFTensorProcessor ).mask_propagation() dummy_op_before = graph.get_node_by_id(dummy_op_before.node_id) conv_op_target = graph.get_node_by_id(conv_op_target.node_id) if conv_type == "usual_conv": - assert np.all(conv_op_target.data["output_mask"] == output_mask) + assert np.all(conv_op_target.attributes["output_mask"] == output_mask) elif conv_type in ["grouped_conv_no_depthwise", "multiply_grouped_conv"]: - assert conv_op_target.data["output_mask"] is None + assert conv_op_target.attributes["output_mask"] is None else: - assert np.all(conv_op_target.data["output_mask"] == input_mask) + assert np.all(conv_op_target.attributes["output_mask"] == input_mask) def test_linear_pruning_ops(): @@ -254,14 +266,14 @@ def test_linear_pruning_ops(): for output_mask in [None, ones_output_mask]: dummy_op_before = graph.get_node_by_id(dummy_op_before.node_id) linear_op_target = graph.get_node_by_id(linear_op_target.node_id) - dummy_op_before.data["output_mask"] = input_mask - linear_op_target.data["output_mask"] = output_mask + dummy_op_before.attributes["output_mask"] = input_mask + linear_op_target.attributes["output_mask"] = output_mask MaskPropagationAlgorithm( graph, dummy_types.DUMMY_PRUNING_OPERATOR_METATYPES, NPNNCFTensorProcessor ).mask_propagation() dummy_op_before = graph.get_node_by_id(dummy_op_before.node_id) linear_op_target = graph.get_node_by_id(linear_op_target.node_id) - assert np.all(linear_op_target.data["output_mask"] == output_mask) + assert np.all(linear_op_target.attributes["output_mask"] == output_mask) @pytest.mark.parametrize("empty_mask_left_branch", [False, True]) @@ -301,11 +313,11 @@ def test_convs_elementwise_source_before_concat( if not empty_mask_left_branch: for conv_op in [conv_op_0, conv_op_1]: conv_op = graph.get_node_by_id(conv_op.node_id) - conv_op.data["output_mask"] = NPNNCFTensor(np.ones(10)) + conv_op.attributes["output_mask"] = NPNNCFTensor(np.ones(10)) if not empty_mask_right_branch: conv_op = graph.get_node_by_id(conv_op_2.node_id) - conv_op.data["output_mask"] = NPNNCFTensor(np.ones(right_branch_output_channels)) + conv_op.attributes["output_mask"] = NPNNCFTensor(np.ones(right_branch_output_channels)) # Propagate masks MaskPropagationAlgorithm( @@ -314,10 +326,10 @@ def test_convs_elementwise_source_before_concat( # Check with masks concat_node = graph.get_node_by_id(concat_node.node_id) if empty_mask_left_branch and empty_mask_right_branch: - assert concat_node.data["output_mask"] is None + assert concat_node.attributes["output_mask"] is None else: reference_mask = np.ones((10 + right_branch_output_channels,)) - np.testing.assert_equal(concat_node.data["output_mask"].tensor, reference_mask) + np.testing.assert_equal(concat_node.attributes["output_mask"].tensor, reference_mask) def test_concat_output_tensor_device(): @@ -344,17 +356,17 @@ def test_concat_output_tensor_device(): ref_device = "some_test_device" for op in dummy_ops[:-1]: op = graph.get_node_by_id(op.node_id) - op.data["output_mask"] = None + op.attributes["output_mask"] = None last_op = graph.get_node_by_id(dummy_ops[-1].node_id) - last_op.data["output_mask"] = NPNNCFTensor(np.ones(10), dummy_device=ref_device) + last_op.attributes["output_mask"] = NPNNCFTensor(np.ones(10), dummy_device=ref_device) # Propagate masks MaskPropagationAlgorithm( graph, dummy_types.DUMMY_PRUNING_OPERATOR_METATYPES, NPNNCFTensorProcessor ).mask_propagation() # Check concat op has appropriate device concat_node = graph.get_node_by_id(concat_node.node_id) - assert concat_node.data["output_mask"].device == ref_device + assert concat_node.attributes["output_mask"].device == ref_device RESHAPE_TEST_CASES = [ @@ -431,16 +443,16 @@ def test_reshape_metatype_mask_prop(node_type, input_shape, output_shape, output # Get reference to graph node prev_node = graph.get_node_by_id(prev_node.node_id) reshape_node = graph.get_node_by_id(reshape_node.node_id) - prev_node.data["output_mask"] = NPNNCFTensor(output_mask_cur) if output_mask_cur is not None else None + prev_node.attributes["output_mask"] = NPNNCFTensor(output_mask_cur) if output_mask_cur is not None else None if isinstance(output_mask_ref_cur, str): with pytest.raises(AssertionError): METATYPES_MAP[node_type]["ops"].mask_propagation(reshape_node, graph, NPNNCFTensorProcessor) else: METATYPES_MAP[node_type]["ops"].mask_propagation(reshape_node, graph, NPNNCFTensorProcessor) if output_mask_ref_cur is None: - assert reshape_node.data["output_mask"] is None + assert reshape_node.attributes["output_mask"] is None else: - assert np.all(reshape_node.data["output_mask"].tensor == output_mask_ref_cur) + assert np.all(reshape_node.attributes["output_mask"].tensor == output_mask_ref_cur) @pytest.mark.parametrize("node_type", ["reshape", "flatten"]) @@ -468,9 +480,9 @@ def test_reshape_is_last_op(node_type): for output_mask in (None, NPNNCFTensor(np.ones((10,)))): prev_node = graph.get_node_by_id(prev_node.node_id) reshape_node = graph.get_node_by_id(reshape_node.node_id) - prev_node.data["output_mask"] = output_mask + prev_node.attributes["output_mask"] = output_mask METATYPES_MAP[node_type]["ops"].mask_propagation(reshape_node, graph, NPNNCFTensorProcessor) - assert reshape_node.data["output_mask"] is None + assert reshape_node.attributes["output_mask"] is None SPLIT_TEST_CASES = [ @@ -498,6 +510,7 @@ def test_split_metatype_mask_prop(empty_mask_left_branch, empty_mask_right_branc "weight_requires_grad": True, "kernel_size": (2, 2), "stride": (1, 1), + "dilations": (1, 1), "padding_values": [0, 0], } split_attributes = MultipleOutputLayerAttributes(chunks=2, axis=1) @@ -536,7 +549,7 @@ def test_split_metatype_mask_prop(empty_mask_left_branch, empty_mask_right_branc # Set masks conv_op_0_node = graph.get_node_by_id(conv_op_0.node_id) - conv_op_0_node.data["output_mask"] = NPNNCFTensor(np.ones(10)) + conv_op_0_node.attributes["output_mask"] = NPNNCFTensor(np.ones(10)) # Set in_channles for node in (conv_op_1, conv_op_2): @@ -550,7 +563,7 @@ def test_split_metatype_mask_prop(empty_mask_left_branch, empty_mask_right_branc # Check with masks split_node = graph.get_node_by_id(split_node.node_id) - split_output_masks = split_node.data["output_mask"] + split_output_masks = split_node.attributes["output_mask"] reference_mask = np.ones((5,)) for node in (conv_op_1, conv_op_2): conv_node = graph.get_node_by_id(conv_op_1.node_id) diff --git a/tests/common/pruning/test_symbolic_mask_processor.py b/tests/common/pruning/test_symbolic_mask_processor.py index d23988706d0..379c5b38799 100644 --- a/tests/common/pruning/test_symbolic_mask_processor.py +++ b/tests/common/pruning/test_symbolic_mask_processor.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import pytest from nncf.common.pruning.symbolic_mask import AmbiguousSymbolicMask diff --git a/tests/common/pruning/test_utils.py b/tests/common/pruning/test_utils.py index ee71fb9ff35..6f04a932387 100644 --- a/tests/common/pruning/test_utils.py +++ b/tests/common/pruning/test_utils.py @@ -22,9 +22,13 @@ ) def test_is_batched_linear(batched, has_output_edges, res): graph = NNCFGraph() - linear = graph.add_nncf_node("linear", "linear", "linear", LinearLayerAttributes(True, 5, 5)) + linear = graph.add_nncf_node( + "linear", "linear", "linear", LinearLayerAttributes(True, in_features=5, out_features=5) + ) if has_output_edges: - last_linear = graph.add_nncf_node("last_linear", "linear", "linear", LinearLayerAttributes(True, 5, 5)) + last_linear = graph.add_nncf_node( + "last_linear", "linear", "linear", LinearLayerAttributes(True, in_features=5, out_features=5) + ) tensor_shape = [5, 5] if not batched else [5, 5, 5] graph.add_edge_between_nncf_nodes(linear.node_id, last_linear.node_id, tensor_shape, 0, 0, Dtype.FLOAT) assert is_batched_linear(linear, graph) == res diff --git a/tests/common/quantization/data_generators.py b/tests/common/quantization/data_generators.py index b3a1c97180f..42f612ecb62 100644 --- a/tests/common/quantization/data_generators.py +++ b/tests/common/quantization/data_generators.py @@ -172,7 +172,7 @@ def get_points_near_of_mid_points(input_data: np.array, mid_points: np.array, at :return np.array: Array of flags to indicate points is in the middle between quant points. """ num_elements = np.prod(input_data.shape) - is_near_mid_point = np.zeros(num_elements).astype(np.bool) + is_near_mid_point = np.zeros(num_elements).astype(bool) mid_point_ind = 0 for ind in range(num_elements): @@ -293,7 +293,7 @@ def generate_sweep_data( if is_weights: channel_count = input_size[0] inputs = np.empty(input_size) - is_near_mid_point = np.zeros(input_size).astype(np.bool) + is_near_mid_point = np.zeros(input_size).astype(bool) quant_lens = np.empty(input_size) for idx in range(0, channel_count): ch_input, ch_is_near_mid_point, ch_quant_lens = generate_sweep_for_one_channel( @@ -305,7 +305,7 @@ def generate_sweep_data( else: channel_count = input_size[1] inputs = np.empty(input_size) - is_near_mid_point = np.zeros(input_size).astype(np.bool) + is_near_mid_point = np.zeros(input_size).astype(bool) quant_lens = np.empty(input_size) for idx in range(0, channel_count): ch_input, ch_is_near_mid_point, ch_quant_lens = generate_sweep_for_one_channel( diff --git a/tests/common/quantization/metatypes.py b/tests/common/quantization/metatypes.py index 69de5d77405..0e9a5bcfe43 100644 --- a/tests/common/quantization/metatypes.py +++ b/tests/common/quantization/metatypes.py @@ -1,9 +1,19 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from typing import List from nncf.common.graph import OperatorMetatype from nncf.common.graph.operator_metatypes import INPUT_NOOP_METATYPES from nncf.common.graph.operator_metatypes import OperatorMetatypeRegistry -from nncf.common.graph.operator_metatypes import UnknownMetatype from nncf.common.quantization.quantizer_propagation.structs import QuantizationTrait METATYPES_FOR_TEST = OperatorMetatypeRegistry("TEST_METATYPES") @@ -156,13 +166,6 @@ class ParameterTestMetatype(TestMetatype): LinearTestMetatype, AddTestMetatype, ], - QuantizationTrait.NON_QUANTIZABLE: [ - MaxPool2dTestMetatype, - DropoutTestMetatype, - MinTestMetatype, - SoftmaxTestMetatype, - UnknownMetatype, - ], QuantizationTrait.CONCAT: [CatTestMetatype], } diff --git a/tests/common/quantization/mock_graphs.py b/tests/common/quantization/mock_graphs.py index 32c3743ff02..7879066a35f 100644 --- a/tests/common/quantization/mock_graphs.py +++ b/tests/common/quantization/mock_graphs.py @@ -19,6 +19,7 @@ from nncf.common.graph import Dtype from nncf.common.graph import NNCFGraph from nncf.common.graph import NNCFNodeName +from nncf.common.graph.graph import NNCFNode from nncf.common.graph.layer_attributes import ConvolutionLayerAttributes from nncf.common.graph.operator_metatypes import UnknownMetatype from nncf.common.insertion_point_graph import InsertionPointGraph @@ -88,20 +89,20 @@ def get_nncf_graph_from_mock_nx_graph(nx_graph: nx.DiGraph, nncf_graph_cls=NNCFG for idx, curr_node_key in enumerate(lexicographical_topological_sort(nx_graph)): node = nx_graph.nodes[curr_node_key] - if NNCFGraph.NODE_NAME_ATTR in node: - node_name = node[NNCFGraph.NODE_NAME_ATTR] + if NNCFNode.NODE_NAME_ATTR in node: + node_name = node[NNCFNode.NODE_NAME_ATTR] else: node_name = "/" + curr_node_key + "_0" - if NNCFGraph.NODE_TYPE_ATTR in node: - node_type = node[NNCFGraph.NODE_TYPE_ATTR] + if NNCFNode.NODE_TYPE_ATTR in node: + node_type = node[NNCFNode.NODE_TYPE_ATTR] else: node_type = curr_node_key - layer_attributes = node.get(NNCFGraph.LAYER_ATTRIBUTES) + layer_attributes = node.get(NNCFNode.LAYER_ATTRIBUTES) - if NNCFGraph.METATYPE_ATTR in node: - metatype = node[NNCFGraph.METATYPE_ATTR] + if NNCFNode.METATYPE_ATTR in node: + metatype = node[NNCFNode.METATYPE_ATTR] else: metatype = METATYPES_FOR_TEST.get_operator_metatype_by_op_name(node_type) if metatype is not UnknownMetatype: @@ -178,10 +179,10 @@ def get_mock_nncf_node_attrs(op_name=None, scope_str=None, metatype=None, type_= if scope_str is None: scope_str = "" output = { - NNCFGraph.NODE_NAME_ATTR: f"{scope_str}/{op_name_to_set}_0", - NNCFGraph.NODE_TYPE_ATTR: type_, + NNCFNode.NODE_NAME_ATTR: f"{scope_str}/{op_name_to_set}_0", + NNCFNode.NODE_TYPE_ATTR: type_, } - for attr_name, attr_val in [(NNCFGraph.METATYPE_ATTR, metatype), (NNCFGraph.LAYER_ATTRIBUTES, layer_attributes)]: + for attr_name, attr_val in [(NNCFNode.METATYPE_ATTR, metatype), (NNCFNode.LAYER_ATTRIBUTES, layer_attributes)]: if attr_val is not None: output[attr_name] = attr_val @@ -194,7 +195,7 @@ def _add_nodes_with_layer_attrs( for node_key in node_keys: nx_graph.add_node(node_key, **get_mock_nncf_node_attrs(op_name=node_key)) if node_key in layer_attrs: - nx_graph.nodes[node_key][NNCFGraph.LAYER_ATTRIBUTES] = layer_attrs[node_key] + nx_graph.nodes[node_key][NNCFNode.LAYER_ATTRIBUTES] = layer_attrs[node_key] return nx_graph @@ -220,6 +221,7 @@ def get_mock_model_graph_with_mergeable_pattern() -> NNCFGraph: out_channels=1, kernel_size=(1, 1), stride=(1, 1), + dilations=(1, 1), groups=1, transpose=False, padding_values=[0, 0, 0, 0], @@ -264,6 +266,7 @@ def get_mock_model_graph_with_no_mergeable_pattern() -> NNCFGraph: out_channels=1, kernel_size=(1, 1), stride=(1, 1), + dilations=(1, 1), groups=1, transpose=False, padding_values=[0, 0, 0, 0], @@ -307,6 +310,7 @@ def get_mock_model_graph_with_broken_output_edge_pattern() -> NNCFGraph: out_channels=1, kernel_size=(1, 1), stride=(1, 1), + dilations=(1, 1), groups=1, transpose=False, padding_values=[0, 0, 0, 0], @@ -367,10 +371,10 @@ def get_randomly_connected_model_graph(op_name_keys: Set[str]) -> nx.DiGraph: shuffled_op_names = random.sample(op_name_keys, len(op_name_keys)) for idx, (_, node) in enumerate(mock_graph.nodes.items()): op_name = shuffled_op_names[idx] - node[NNCFGraph.NODE_NAME_ATTR] = get_node_name(shuffled_op_names[idx]) - node[NNCFGraph.NODE_TYPE_ATTR] = op_name + node[NNCFNode.NODE_NAME_ATTR] = get_node_name(shuffled_op_names[idx]) + node[NNCFNode.NODE_TYPE_ATTR] = op_name if op_name in OP_NAMES_IN_TEST_WITH_MODULE_ATTRIBUTES: - node[NNCFGraph.LAYER_ATTRIBUTES] = MagicMock() + node[NNCFNode.LAYER_ATTRIBUTES] = MagicMock() mark_input_ports_lexicographically_based_on_input_node_key(mock_graph) return mock_graph @@ -382,12 +386,12 @@ def get_sequentially_connected_model_graph(op_name_keys: List[str]) -> nx.DiGrap actual_keys = [] for node_key in op_name_keys: attrs = { - NNCFGraph.NODE_NAME_ATTR: get_node_name(node_key, call_order=node_key_appearances[node_key]), - NNCFGraph.NODE_TYPE_ATTR: node_key, + NNCFNode.NODE_NAME_ATTR: get_node_name(node_key, call_order=node_key_appearances[node_key]), + NNCFNode.NODE_TYPE_ATTR: node_key, } if node_key in OP_NAMES_IN_TEST_WITH_MODULE_ATTRIBUTES: - attrs[NNCFGraph.LAYER_ATTRIBUTES] = MagicMock() + attrs[NNCFNode.LAYER_ATTRIBUTES] = MagicMock() actual_key = node_key + "_{}".format(node_key_appearances[node_key]) graph.add_node(actual_key, **attrs) node_key_appearances[node_key] += 1 diff --git a/tests/common/quantization/test_filter_constant_nodes.py b/tests/common/quantization/test_filter_constant_nodes.py index a53a44028f6..1cccfcffd08 100644 --- a/tests/common/quantization/test_filter_constant_nodes.py +++ b/tests/common/quantization/test_filter_constant_nodes.py @@ -14,7 +14,7 @@ import pytest -from nncf.common.graph.graph import NNCFGraph +from nncf.common.graph.graph import NNCFNode from nncf.common.graph.operator_metatypes import InputNoopMetatype from nncf.common.graph.operator_metatypes import OutputNoopMetatype from nncf.common.insertion_point_graph import ConstantNodesFilter @@ -227,7 +227,7 @@ def test_constant_nodes_filter(model_to_test): quantizable_layer_nodes = [ QuantizableWeightedLayerNode(weight_node, [QuantizerConfig()]) for weight_node in weight_nodes ] - quantizable_layer_node_keys = [node.node.data[NNCFGraph.KEY_NODE_ATTR] for node in quantizable_layer_nodes] + quantizable_layer_node_keys = [node.node.node_key for node in quantizable_layer_nodes] ip_graph = get_ip_graph_for_test(nncf_graph, quantizable_layer_nodes) filtered_ip_graph = ConstantNodesFilter.filter(ip_graph, quantizable_layer_node_keys) diff --git a/tests/common/quantization/test_ignore_post_processing.py b/tests/common/quantization/test_ignore_post_processing.py index 42506a05c2e..a4e92944d68 100644 --- a/tests/common/quantization/test_ignore_post_processing.py +++ b/tests/common/quantization/test_ignore_post_processing.py @@ -86,7 +86,17 @@ def __init__(self): ] original_mock_graph = create_mock_graph(nodes, node_edges) self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph) - self.reference_ignored_scopes = ["Identity_2", "Identity_1", "Identity_4", "Identity_5"] + self.reference_ignored_scopes = [ + "Identity_2", + "Identity_1", + "Identity_4", + "Identity_5", + "TopK_1", + "NMS_1", + "NMS_2", + "Identity_3", + "Input_2", + ] @ALL_SYNTHETIC_NNCF_GRAPH.register() @@ -129,7 +139,7 @@ def __init__(self): ] original_mock_graph = create_mock_graph(nodes, node_edges) self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph) - self.reference_ignored_scopes = ["Identity_3", "Identity_2", "Identity_1"] + self.reference_ignored_scopes = ["Identity_3", "Identity_2", "Identity_1", "TopK_1", "TopK_2"] @ALL_SYNTHETIC_NNCF_GRAPH.register() @@ -176,7 +186,7 @@ def __init__(self): ] original_mock_graph = create_mock_graph(nodes, node_edges) self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph) - self.reference_ignored_scopes = ["Identity_3"] + self.reference_ignored_scopes = ["Identity_3", "Identity_2", "Identity_1", "NMS_1", "TopK_1"] @ALL_SYNTHETIC_NNCF_GRAPH.register() @@ -226,16 +236,24 @@ def __init__(self): ] original_mock_graph = create_mock_graph(nodes, node_edges) self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph) - self.reference_ignored_scopes = ["Identity_3", "Identity_2", "Identity_5", "Identity_4", "Identity_1"] + self.reference_ignored_scopes = [ + "Identity_3", + "Identity_2", + "Identity_5", + "Identity_4", + "Identity_1", + "NMS_1", + "TopK_1", + ] @ALL_SYNTHETIC_NNCF_GRAPH.register() class ModelToTest5: # Input_1 # | - # Conv_1 - # | - # Identity_1 + # NMS_1 + # \ + # Conv_1 # | \ # Identity_2 Identity_3 # | / | @@ -252,7 +270,7 @@ def __init__(self): nodes = [ NodeWithType("Input_1", InputNoopMetatype), NodeWithType("Conv_1", Conv2dTestMetatype), - NodeWithType("Identity_1", IdentityTestMetatype), + NodeWithType("NMS_1", IdentityTestMetatype), NodeWithType("Identity_2", IdentityTestMetatype), NodeWithType("Identity_3", IdentityTestMetatype), NodeWithType("Identity_4", IdentityTestMetatype), @@ -262,10 +280,10 @@ def __init__(self): NodeWithType("Identity_7", IdentityTestMetatype), ] node_edges = [ - ("Input_1", "Conv_1"), - ("Conv_1", "Identity_1"), - ("Identity_1", "Identity_2"), - ("Identity_1", "Identity_3"), + ("Input_1", "NMS_1"), + ("NMS_1", "Conv_1"), + ("Conv_1", "Identity_2"), + ("Conv_1", "Identity_3"), ("Identity_2", "Identity_4"), ("Identity_3", "Identity_5"), ("Identity_3", "Identity_4"), @@ -279,6 +297,177 @@ def __init__(self): self.reference_ignored_scopes = [] +@ALL_SYNTHETIC_NNCF_GRAPH.register() +class ModelToTest6: + # Input_1 + # | + # Conv_1 + # | + # Identity_2 + # | \ + # NMS_1 Conv_2 + # | | + # Identity_3 Output_2 + # | + # Output_1 + + def __init__(self): + nodes = [ + NodeWithType("Input_1", InputNoopMetatype), + NodeWithType("Conv_1", Conv2dTestMetatype), + NodeWithType("Identity_2", IdentityTestMetatype), + NodeWithType("NMS_1", NMSTestMetatype), + NodeWithType("Identity_3", IdentityTestMetatype), + NodeWithType("Output_1", OutputNoopMetatype), + NodeWithType("Conv_2", Conv2dTestMetatype), + NodeWithType("Output_2", OutputNoopMetatype), + ] + node_edges = [ + ("Input_1", "Conv_1"), + ("Conv_1", "Identity_2"), + ("Identity_2", "NMS_1"), + ("Identity_2", "Conv_2"), + ("NMS_1", "Identity_3"), + ("Identity_3", "Output_1"), + ("Conv_2", "Output_2"), + ] + original_mock_graph = create_mock_graph(nodes, node_edges) + self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph) + self.reference_ignored_scopes = ["Identity_3", "Identity_2", "NMS_1"] + + +@ALL_SYNTHETIC_NNCF_GRAPH.register() +class ModelToTest7: + # Input_1 + # | + # Conv_1 + # | + # Identity_1 + # | \ + # TopK_1 Identity_4 + # | / | + # Identity_2 Identity_5 + # \ / + # \ / + # Identity_3 + # | + # Output_1 + + def __init__(self): + nodes = [ + NodeWithType("Input_1", InputNoopMetatype), + NodeWithType("Conv_1", Conv2dTestMetatype), + NodeWithType("Identity_1", IdentityTestMetatype), + NodeWithType("TopK_1", TopKTestMetatype), + NodeWithType("Identity_2", IdentityTestMetatype), + NodeWithType("Identity_3", IdentityTestMetatype), + NodeWithType("Output_1", OutputNoopMetatype), + NodeWithType("Identity_4", IdentityTestMetatype), + NodeWithType("Identity_5", IdentityTestMetatype), + ] + node_edges = [ + ("Input_1", "Conv_1"), + ("Conv_1", "Identity_1"), + ("Identity_1", "TopK_1"), + ("Identity_1", "Identity_4"), + ("TopK_1", "Identity_2"), + ("Identity_2", "Identity_3"), + ("Identity_3", "Output_1"), + ("Identity_4", "Identity_2"), + ("Identity_4", "Identity_5"), + ("Identity_5", "Identity_3"), + ] + original_mock_graph = create_mock_graph(nodes, node_edges) + self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph) + self.reference_ignored_scopes = ["Identity_3", "Identity_2", "Identity_1", "TopK_1"] + + +@ALL_SYNTHETIC_NNCF_GRAPH.register() +class ModelToTest8: + # Input_1 Input_2 + # | | + # Conv_1 Conv_2 + # | | + # Identity_1 Identity_3 + # | / | + # | / FC_1 + # | / | + # NMS_1 Identity_4 + # | | + # Identity_2 Identity_5 + # | | + # TopK_1 Output_2 + # | + # Output_1 + # + def __init__(self): + nodes = [ + NodeWithType("Input_1", InputNoopMetatype), + NodeWithType("Conv_1", Conv2dTestMetatype), + NodeWithType("Identity_1", IdentityTestMetatype), + NodeWithType("NMS_1", NMSTestMetatype), + NodeWithType("Identity_2", IdentityTestMetatype), + NodeWithType("TopK_1", TopKTestMetatype), + NodeWithType("Output_1", OutputNoopMetatype), + NodeWithType("Input_2", InputNoopMetatype), + NodeWithType("Conv_2", Conv2dTestMetatype), + NodeWithType("Identity_3", IdentityTestMetatype), + NodeWithType("FC_1", LinearTestMetatype), + NodeWithType("Identity_4", IdentityTestMetatype), + NodeWithType("Identity_5", IdentityTestMetatype), + NodeWithType("Output_2", OutputNoopMetatype), + ] + node_edges = [ + ("Input_1", "Conv_1"), + ("Conv_1", "Identity_1"), + ("Identity_1", "NMS_1"), + ("NMS_1", "Identity_2"), + ("Identity_2", "TopK_1"), + ("TopK_1", "Output_1"), + ("Input_2", "Conv_2"), + ("Conv_2", "Identity_3"), + ("Identity_3", "NMS_1"), + ("Identity_3", "FC_1"), + ("FC_1", "Identity_4"), + ("Identity_4", "Identity_5"), + ("Identity_5", "Output_2"), + ] + original_mock_graph = create_mock_graph(nodes, node_edges) + self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph) + self.reference_ignored_scopes = ["Identity_1", "Identity_2", "Identity_3", "TopK_1", "NMS_1"] + + +@ALL_SYNTHETIC_NNCF_GRAPH.register() +class ModelToTest9: + # Input_1 + # | + # Identity_1 + # | + # TopK_1 + # | + # Identity_2 + # | + # Output_1 + + def __init__(self): + nodes = [ + NodeWithType("Input_1", InputNoopMetatype), + NodeWithType("Identity_1", IdentityTestMetatype), + NodeWithType("TopK_1", TopKTestMetatype), + NodeWithType("Identity_2", IdentityTestMetatype), + NodeWithType("Output_1", OutputNoopMetatype), + ] + node_edges = [ + ("Input_1", "Identity_1"), + ("Identity_1", "TopK_1"), + ("TopK_1", "Identity_2"), + ("Identity_2", "Output_1"), + ] + original_mock_graph = create_mock_graph(nodes, node_edges) + self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph) + self.reference_ignored_scopes = ["Input_1", "Identity_1", "TopK_1", "Identity_2"] + + @pytest.mark.parametrize("model_to_test", ALL_SYNTHETIC_NNCF_GRAPH.values()) def test_node_locator_finds_postprocessing_nodes(model_to_test): model_to_test = model_to_test() diff --git a/tests/common/quantization/test_quantizer_propagation_graph.py b/tests/common/quantization/test_quantizer_propagation_graph.py index 0342c6a3a32..83ff4321572 100644 --- a/tests/common/quantization/test_quantizer_propagation_graph.py +++ b/tests/common/quantization/test_quantizer_propagation_graph.py @@ -21,6 +21,7 @@ from nncf.common.graph import Dtype from nncf.common.graph import NNCFGraph from nncf.common.graph import NNCFNodeName +from nncf.common.graph.graph import NNCFNode from nncf.common.insertion_point_graph import InsertionPointGraph from nncf.common.insertion_point_graph import PostHookInsertionPoint from nncf.common.insertion_point_graph import PreHookInsertionPoint @@ -35,7 +36,9 @@ from nncf.common.quantization.structs import QuantizationMode from nncf.common.quantization.structs import QuantizerConfig from nncf.common.quantization.structs import UnifiedScaleType +from tests.common.quantization.metatypes import WEIGHT_LAYER_METATYPES from tests.common.quantization.metatypes import CatTestMetatype +from tests.common.quantization.metatypes import Conv2dTestMetatype from tests.common.quantization.mock_graphs import get_ip_graph_for_test from tests.common.quantization.mock_graphs import get_mock_nncf_node_attrs from tests.common.quantization.mock_graphs import get_nncf_graph_from_mock_nx_graph @@ -67,6 +70,7 @@ def mock_qp_graph(): qpsg = QPSG(ip_graph) qpsg.nodes["5 /F_0"][QPSG.OPERATOR_METATYPE_NODE_ATTR] = CatTestMetatype + qpsg.nodes["6 /G_0"][QPSG.OPERATOR_METATYPE_NODE_ATTR] = Conv2dTestMetatype qpsg.skip_check = False yield qpsg if not qpsg.skip_check: @@ -285,7 +289,7 @@ def test_get_paths_to_immediately_dominating_insertion_points_grouped_by_unified ref_groups_vs_paths = start_ip_node_and_dom_node_grouped_paths.ref_groups_vs_paths test_groups_vs_paths = ( mock_qp_graph.get_paths_to_immediately_dominating_insertion_points_grouped_by_unified_scales( - start_node_key, {CatTestMetatype} + start_node_key, {CatTestMetatype}, {CatTestMetatype: WEIGHT_LAYER_METATYPES} ) ) @@ -1227,7 +1231,7 @@ def get_model_graph_with_split_node() -> QPSG: mock_node_attrs = get_mock_nncf_node_attrs(op_name=node_key) if node_key == "B": # Split have no POST_HOOK - mock_node_attrs[NNCFGraph.NODE_TYPE_ATTR] = "split" + mock_node_attrs[NNCFNode.NODE_TYPE_ATTR] = "split" mock_graph.add_node(node_key, **mock_node_attrs) mock_graph.add_edges_from([("A", "B"), ("B", "C"), ("B", "D"), ("C", "E"), ("D", "E")]) @@ -1702,3 +1706,137 @@ def test_create_quantizer_setup_with_output_quant_as_weights_ops( ) ref_quantizer_setup = output_quant_as_weights_test_struct.ref_quantizer_setup() assert test_quantizer_setup.equivalent_to(ref_quantizer_setup) + + +@pytest.mark.parametrize( + "weight_configs, activation_configs, reference_configs", + [ + ( + # Weights #1 + [ + QuantizerConfig( + num_bits=8, mode=QuantizationMode.SYMMETRIC, signedness_to_force=True, per_channel=False + ), + QuantizerConfig( + num_bits=8, mode=QuantizationMode.SYMMETRIC, signedness_to_force=True, per_channel=True + ), + ], + # Activations #1 + [ + QuantizerConfig(num_bits=8, mode=QuantizationMode.SYMMETRIC, per_channel=False), + ], + # Reference #1 + [ + QuantizerConfig( + num_bits=8, mode=QuantizationMode.SYMMETRIC, signedness_to_force=True, per_channel=False + ), + ], + ), + ( + # Weights #2 + [ + QuantizerConfig( + num_bits=8, mode=QuantizationMode.ASYMMETRIC, signedness_to_force=True, per_channel=False + ), + QuantizerConfig( + num_bits=8, mode=QuantizationMode.SYMMETRIC, signedness_to_force=True, per_channel=True + ), + ], + # Activations #2 + [ + QuantizerConfig(num_bits=8, mode=QuantizationMode.ASYMMETRIC, per_channel=False), + QuantizerConfig( + num_bits=8, mode=QuantizationMode.SYMMETRIC, signedness_to_force=True, per_channel=False + ), + ], + # Reference #2 + [ + QuantizerConfig( + num_bits=8, mode=QuantizationMode.ASYMMETRIC, signedness_to_force=True, per_channel=False + ), + ], + ), + ( + # Weights #3 + [ + QuantizerConfig( + num_bits=8, mode=QuantizationMode.SYMMETRIC, signedness_to_force=True, per_channel=False + ), + QuantizerConfig( + num_bits=8, mode=QuantizationMode.SYMMETRIC, signedness_to_force=True, per_channel=True + ), + ], + # Activations #3 + [ + QuantizerConfig( + num_bits=8, mode=QuantizationMode.ASYMMETRIC, signedness_to_force=True, per_channel=False + ), + ], + # Reference #3 + [], + ), + ( + # Weights #4 + [ + QuantizerConfig( + num_bits=8, mode=QuantizationMode.SYMMETRIC, signedness_to_force=True, per_channel=False + ), + QuantizerConfig( + num_bits=8, mode=QuantizationMode.SYMMETRIC, signedness_to_force=True, per_channel=True + ), + ], + # Activations #4 + [ + QuantizerConfig( + num_bits=8, mode=QuantizationMode.SYMMETRIC, signedness_to_force=False, per_channel=False + ), + ], + # Reference #4 + [], + ), + ( + # Weights #5 + [ + QuantizerConfig( + num_bits=8, mode=QuantizationMode.ASYMMETRIC, signedness_to_force=False, per_channel=False + ), + QuantizerConfig( + num_bits=8, mode=QuantizationMode.ASYMMETRIC, signedness_to_force=True, per_channel=True + ), + ], + # Activations #5 + [ + QuantizerConfig( + num_bits=8, mode=QuantizationMode.ASYMMETRIC, signedness_to_force=None, per_channel=False + ), + QuantizerConfig( + num_bits=8, mode=QuantizationMode.SYMMETRIC, signedness_to_force=None, per_channel=False + ), + ], + # Reference #5 + [ + QuantizerConfig( + num_bits=8, mode=QuantizationMode.ASYMMETRIC, signedness_to_force=False, per_channel=False + ), + ], + ), + ( + # Weights #6 + [ + QuantizerConfig(num_bits=8, mode=QuantizationMode.SYMMETRIC, per_channel=False), + ], + # Activations #6 + [ + QuantizerConfig(num_bits=8, mode=QuantizationMode.SYMMETRIC, per_channel=False), + ], + # Reference #6 + [ + QuantizerConfig(num_bits=8, mode=QuantizationMode.SYMMETRIC, per_channel=False), + ], + ), + ], +) +def test_get_weight_and_activation_qconfig_list_intersection(weight_configs, activation_configs, reference_configs): + # pylint: disable=protected-access + resulted_configs = QPSG._get_weight_and_activation_qconfig_list_intersection(weight_configs, activation_configs) + assert resulted_configs == reference_configs diff --git a/tests/common/quantization/test_quantizer_propagation_solver.py b/tests/common/quantization/test_quantizer_propagation_solver.py index 4cdd2a09352..dc065a2e7e2 100644 --- a/tests/common/quantization/test_quantizer_propagation_solver.py +++ b/tests/common/quantization/test_quantizer_propagation_solver.py @@ -1,4 +1,3 @@ -# pylint:disable=too-many-lines # Copyright (c) 2023 Intel Corporation # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -9,6 +8,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +# pylint:disable=too-many-lines from collections import Counter from collections import namedtuple from itertools import permutations @@ -21,12 +22,16 @@ from nncf.common.graph import NNCFGraph from nncf.common.graph import NNCFNodeName from nncf.common.graph.definitions import MODEL_INPUT_OP_NAME +from nncf.common.graph.graph import NNCFNode +from nncf.common.graph.operator_metatypes import OutputNoopMetatype +from nncf.common.graph.operator_metatypes import UnknownMetatype from nncf.common.graph.transformations.commands import TargetType from nncf.common.insertion_point_graph import InsertionPointGraph from nncf.common.quantization.quantizer_propagation.graph import QuantizerPropagationStateGraph as QPSG from nncf.common.quantization.quantizer_propagation.solver import PropagationStrategy from nncf.common.quantization.quantizer_propagation.solver import QuantizerPropagationSolver from nncf.common.quantization.quantizer_propagation.solver import TransitionStatus +from nncf.common.quantization.quantizer_propagation.structs import IgnoreReason from nncf.common.quantization.quantizer_propagation.structs import PropagatingQuantizer from nncf.common.quantization.quantizer_propagation.structs import PropagationPath from nncf.common.quantization.quantizer_propagation.structs import QuantizationTrait @@ -67,18 +72,18 @@ class TwoFcAfterDropout: def get_graph(): graph = nx.DiGraph() dropout_node_attrs = { - NNCFGraph.NODE_NAME_ATTR: TwoFcAfterDropout.DROPOUT_NODE_NAME, - NNCFGraph.NODE_TYPE_ATTR: TwoFcAfterDropout.DROPOUT_OP_TYPE_STR, + NNCFNode.NODE_NAME_ATTR: TwoFcAfterDropout.DROPOUT_NODE_NAME, + NNCFNode.NODE_TYPE_ATTR: TwoFcAfterDropout.DROPOUT_OP_TYPE_STR, } fc_1_node_attrs = { - NNCFGraph.NODE_NAME_ATTR: TwoFcAfterDropout.FC_1_NODE_NAME, - NNCFGraph.NODE_TYPE_ATTR: TwoFcAfterDropout.FC_OP_TYPE_STR, + NNCFNode.NODE_NAME_ATTR: TwoFcAfterDropout.FC_1_NODE_NAME, + NNCFNode.NODE_TYPE_ATTR: TwoFcAfterDropout.FC_OP_TYPE_STR, } fc_2_node_attrs = { - NNCFGraph.NODE_NAME_ATTR: TwoFcAfterDropout.FC_2_NODE_NAME, - NNCFGraph.NODE_TYPE_ATTR: TwoFcAfterDropout.FC_OP_TYPE_STR, + NNCFNode.NODE_NAME_ATTR: TwoFcAfterDropout.FC_2_NODE_NAME, + NNCFNode.NODE_TYPE_ATTR: TwoFcAfterDropout.FC_OP_TYPE_STR, } graph.add_node("dropout", **dropout_node_attrs) @@ -94,21 +99,23 @@ def get_graph(): def get_branching_model_graph() -> NNCFGraph: mock_graph = nx.DiGraph() - # (0 /O) <-- treating this as an auxiliary "input" node - # | - # (1 /A) - # | - # /-(2 /B)---------\ - # / | | - # (3 /C) (4 /D) (5 /E) - # | | \ - # (6 /F) (7 /G) (8 /H) - # \ / - # (9 /I) - # | - # (10 /J) - - node_keys = ["O", "A", "B", "C", "D", "E", "F", "G", "H", "I", "J"] + # (0 /O) <-- treating this as an auxiliary "input" node + # | + # (1 /A) + # | + # /-(2 /B)---------\ + # / | | + # (3 /C) (4 /D) (5 /E) + # | / \ + # (6 /F) (7 /G) (8 /H) + # / | \ / + # (11 /K) (12 /L) (9 /I) + # | | | + # (13 /M) (14 /N) (10 /J) + # | | + # (15 /P) (16 /Q) + + node_keys = ["O", "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "P", "Q"] for node_key in node_keys: mock_node_attrs = get_mock_nncf_node_attrs(op_name=node_key) mock_graph.add_node(node_key, **mock_node_attrs) @@ -126,6 +133,12 @@ def get_branching_model_graph() -> NNCFGraph: ("G", "I"), ("H", "I"), ("I", "J"), + ("F", "K"), + ("F", "L"), + ("K", "M"), + ("L", "N"), + ("M", "P"), + ("N", "Q"), ] ) @@ -819,13 +832,19 @@ def test_merged_qconfig_list_is_independent_of_branch_qconfig_list_order( ), ) + class InitNodeTestStruct: + def __init__(self, quantization_trait, config, op_meta=UnknownMetatype): + self.quantization_trait = quantization_trait + self.config = config + self.op_meta = op_meta + BRANCH_TRANSITION_TEST_CASES = [ # Downward branches are quantization-agnostic BranchTransitionTestStruct( init_node_to_trait_and_configs_dict= { - '4 /D_0': (QuantizationTrait.INPUTS_QUANTIZABLE, - [QuantizerConfig()]), + '4 /D_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, + QuantizerConfig()), }, starting_primary_quantizer_ip_node=InsertionPointGraph.get_pre_hook_node_key('4 /D_0'), target_branching_node_for_primary_quantizer=InsertionPointGraph.get_post_hook_node_key('2 /B_0'), @@ -836,12 +855,12 @@ def test_merged_qconfig_list_is_independent_of_branch_qconfig_list_order( BranchTransitionTestStruct( init_node_to_trait_and_configs_dict= { - '6 /F_0': (QuantizationTrait.INPUTS_QUANTIZABLE, - [QuantizerConfig()]), - '4 /D_0': (QuantizationTrait.INPUTS_QUANTIZABLE, - [QuantizerConfig()]), - '5 /E_0': (QuantizationTrait.INPUTS_QUANTIZABLE, - [QuantizerConfig()]), + '6 /F_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, + [QuantizerConfig()]), + '4 /D_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, + [QuantizerConfig()]), + '5 /E_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, + [QuantizerConfig()]), }, starting_primary_quantizer_ip_node=InsertionPointGraph.get_pre_hook_node_key('6 /F_0'), target_branching_node_for_primary_quantizer=InsertionPointGraph.get_post_hook_node_key('2 /B_0'), @@ -851,11 +870,11 @@ def test_merged_qconfig_list_is_independent_of_branch_qconfig_list_order( BranchTransitionTestStruct( init_node_to_trait_and_configs_dict= { - '6 /F_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '6 /F_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig(num_bits=7)]), - '4 /D_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '4 /D_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig(num_bits=8), QuantizerConfig(num_bits=6)]), - '5 /E_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '5 /E_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig(num_bits=4)]), }, starting_primary_quantizer_ip_node=InsertionPointGraph.get_pre_hook_node_key('5 /E_0'), @@ -866,11 +885,11 @@ def test_merged_qconfig_list_is_independent_of_branch_qconfig_list_order( BranchTransitionTestStruct( init_node_to_trait_and_configs_dict= { - '6 /F_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '6 /F_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig(num_bits=7, mode=QuantizationMode.ASYMMETRIC)]), - '4 /D_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '4 /D_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig(num_bits=8), QuantizerConfig(num_bits=6, mode=QuantizationMode.ASYMMETRIC)]), - '5 /E_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '5 /E_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig(num_bits=4), QuantizerConfig(num_bits=6)]), }, starting_primary_quantizer_ip_node=InsertionPointGraph.get_pre_hook_node_key('5 /E_0'), @@ -881,22 +900,21 @@ def test_merged_qconfig_list_is_independent_of_branch_qconfig_list_order( BranchTransitionTestStruct( init_node_to_trait_and_configs_dict= { - '4 /D_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '4 /D_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig(num_bits=4), QuantizerConfig(num_bits=6)]), - '3 /C_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '3 /C_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig(num_bits=7, mode=QuantizationMode.ASYMMETRIC)]), - '5 /E_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '5 /E_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig(num_bits=8), QuantizerConfig(num_bits=6, mode=QuantizationMode.ASYMMETRIC)]), - '6 /F_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '6 /F_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig(num_bits=4)]), - '7 /G_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '7 /G_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig(num_bits=4)]), - '8 /H_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '8 /H_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig(num_bits=4)]), - }, starting_primary_quantizer_ip_node=InsertionPointGraph.get_pre_hook_node_key('4 /D_0'), target_branching_node_for_primary_quantizer=InsertionPointGraph.get_post_hook_node_key('2 /B_0'), @@ -906,11 +924,11 @@ def test_merged_qconfig_list_is_independent_of_branch_qconfig_list_order( BranchTransitionTestStruct( init_node_to_trait_and_configs_dict= { - '6 /F_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '6 /F_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig(num_bits=5)]), - '4 /D_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '4 /D_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig(num_bits=6)]), - '5 /E_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '5 /E_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig(num_bits=4), QuantizerConfig(num_bits=6)]), }, starting_primary_quantizer_ip_node=InsertionPointGraph.get_pre_hook_node_key('5 /E_0'), @@ -921,11 +939,11 @@ def test_merged_qconfig_list_is_independent_of_branch_qconfig_list_order( BranchTransitionTestStruct( init_node_to_trait_and_configs_dict= { - '6 /F_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '6 /F_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig(num_bits=7, mode=QuantizationMode.ASYMMETRIC)]), - '4 /D_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '4 /D_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig(num_bits=6), QuantizerConfig(num_bits=8)]), - '5 /E_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '5 /E_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig(num_bits=4), QuantizerConfig(num_bits=6)]), }, starting_primary_quantizer_ip_node=InsertionPointGraph.get_pre_hook_node_key('6 /F_0'), @@ -937,11 +955,11 @@ def test_merged_qconfig_list_is_independent_of_branch_qconfig_list_order( BranchTransitionTestStruct( init_node_to_trait_and_configs_dict= { - '6 /F_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '6 /F_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig()]), - '4 /D_0': (QuantizationTrait.NON_QUANTIZABLE, + '4 /D_0': InitNodeTestStruct(QuantizationTrait.NON_QUANTIZABLE, []), - '5 /E_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '5 /E_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig()]), }, starting_primary_quantizer_ip_node=InsertionPointGraph.get_pre_hook_node_key('5 /E_0'), @@ -952,11 +970,11 @@ def test_merged_qconfig_list_is_independent_of_branch_qconfig_list_order( BranchTransitionTestStruct( init_node_to_trait_and_configs_dict= { - '6 /F_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '6 /F_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig()]), - '4 /D_0': (QuantizationTrait.NON_QUANTIZABLE, + '4 /D_0': InitNodeTestStruct(QuantizationTrait.NON_QUANTIZABLE, []), - '10 /J_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '10 /J_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig()]), }, starting_primary_quantizer_ip_node=InsertionPointGraph.get_pre_hook_node_key('6 /F_0'), @@ -968,11 +986,11 @@ def test_merged_qconfig_list_is_independent_of_branch_qconfig_list_order( BranchTransitionTestStruct( init_node_to_trait_and_configs_dict= { - '6 /F_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '6 /F_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig()]), - '4 /D_0': (QuantizationTrait.NON_QUANTIZABLE, + '4 /D_0': InitNodeTestStruct(QuantizationTrait.NON_QUANTIZABLE, []), - '5 /E_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '5 /E_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig()]), }, starting_primary_quantizer_ip_node=InsertionPointGraph.get_pre_hook_node_key('5 /E_0'), @@ -983,11 +1001,11 @@ def test_merged_qconfig_list_is_independent_of_branch_qconfig_list_order( BranchTransitionTestStruct( init_node_to_trait_and_configs_dict= { - '6 /F_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '6 /F_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig()]), - '4 /D_0': (QuantizationTrait.NON_QUANTIZABLE, + '4 /D_0': InitNodeTestStruct(QuantizationTrait.NON_QUANTIZABLE, []), - '5 /E_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '5 /E_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig()]), }, starting_primary_quantizer_ip_node=InsertionPointGraph.get_pre_hook_node_key('5 /E_0'), @@ -999,11 +1017,11 @@ def test_merged_qconfig_list_is_independent_of_branch_qconfig_list_order( BranchTransitionTestStruct( init_node_to_trait_and_configs_dict= { - '7 /G_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '7 /G_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig()]), - '8 /H_0': (QuantizationTrait.QUANTIZATION_AGNOSTIC, + '8 /H_0': InitNodeTestStruct(QuantizationTrait.QUANTIZATION_AGNOSTIC, []), - '9 /I_0': (QuantizationTrait.CONCAT, + '9 /I_0': InitNodeTestStruct(QuantizationTrait.CONCAT, []), }, starting_primary_quantizer_ip_node=InsertionPointGraph.get_pre_hook_node_key('7 /G_0'), @@ -1015,17 +1033,77 @@ def test_merged_qconfig_list_is_independent_of_branch_qconfig_list_order( BranchTransitionTestStruct( init_node_to_trait_and_configs_dict= { - '7 /G_0': (QuantizationTrait.INPUTS_QUANTIZABLE, + '7 /G_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, [QuantizerConfig()]), - '8 /H_0': (QuantizationTrait.QUANTIZATION_AGNOSTIC, + '8 /H_0': InitNodeTestStruct(QuantizationTrait.QUANTIZATION_AGNOSTIC, []), - '9 /I_0': (QuantizationTrait.CONCAT, + '9 /I_0': InitNodeTestStruct(QuantizationTrait.CONCAT, [QuantizerConfig(num_bits=6)]), }, starting_primary_quantizer_ip_node=InsertionPointGraph.get_pre_hook_node_key('7 /G_0'), target_branching_node_for_primary_quantizer=InsertionPointGraph.get_post_hook_node_key('5 /E_0'), expected_status=TransitionStatus.SHOULD_WAIT_FOR_MERGE ), + BranchTransitionTestStruct( + init_node_to_trait_and_configs_dict= + { + '6 /F_0': InitNodeTestStruct(QuantizationTrait.QUANTIZATION_AGNOSTIC, + []), + '11 /K_0': InitNodeTestStruct(QuantizationTrait.QUANTIZATION_AGNOSTIC, + [], OutputNoopMetatype), + '12 /L_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, + [QuantizerConfig()]), + }, + starting_primary_quantizer_ip_node=InsertionPointGraph.get_pre_hook_node_key('12 /L_0'), + target_branching_node_for_primary_quantizer=InsertionPointGraph.get_pre_hook_node_key('6 /F_0'), + expected_status=TransitionStatus.SHOULD_TRANSITION + ), + BranchTransitionTestStruct( + init_node_to_trait_and_configs_dict= + { + '6 /F_0': InitNodeTestStruct(QuantizationTrait.QUANTIZATION_AGNOSTIC, + []), + '11 /K_0': InitNodeTestStruct(QuantizationTrait.NON_QUANTIZABLE, + []), + '12 /L_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, + [QuantizerConfig()]), + }, + starting_primary_quantizer_ip_node=InsertionPointGraph.get_pre_hook_node_key('12 /L_0'), + target_branching_node_for_primary_quantizer=InsertionPointGraph.get_pre_hook_node_key('6 /F_0'), + expected_status=TransitionStatus.SHOULD_NOT_TRANSITION + ), + BranchTransitionTestStruct( + init_node_to_trait_and_configs_dict= + { + '6 /F_0': InitNodeTestStruct(QuantizationTrait.QUANTIZATION_AGNOSTIC, + []), + '13 /M_0': InitNodeTestStruct(QuantizationTrait.QUANTIZATION_AGNOSTIC, + [], OutputNoopMetatype), + '12 /L_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, + [QuantizerConfig()]), + '11 /K_0': InitNodeTestStruct(QuantizationTrait.QUANTIZATION_AGNOSTIC, + []), + }, + starting_primary_quantizer_ip_node=InsertionPointGraph.get_pre_hook_node_key('12 /L_0'), + target_branching_node_for_primary_quantizer=InsertionPointGraph.get_post_hook_node_key('6 /F_0'), + expected_status=TransitionStatus.SHOULD_NOT_TRANSITION + ), + BranchTransitionTestStruct( + init_node_to_trait_and_configs_dict= + { + '6 /F_0': InitNodeTestStruct(QuantizationTrait.QUANTIZATION_AGNOSTIC, + []), + '13 /M_0': InitNodeTestStruct(QuantizationTrait.QUANTIZATION_AGNOSTIC, + [], OutputNoopMetatype), + '12 /L_0': InitNodeTestStruct(QuantizationTrait.INPUTS_QUANTIZABLE, + [QuantizerConfig()]), + '11 /K_0': InitNodeTestStruct(QuantizationTrait.NON_QUANTIZABLE, + []), + }, + starting_primary_quantizer_ip_node=InsertionPointGraph.get_pre_hook_node_key('12 /L_0'), + target_branching_node_for_primary_quantizer=InsertionPointGraph.get_post_hook_node_key('6 /F_0'), + expected_status=TransitionStatus.SHOULD_NOT_TRANSITION + ) ] # fmt: skip @staticmethod @@ -1042,14 +1120,22 @@ def test_check_branching_transition(self, branch_transition_test_struct: BranchT # Graph preparation nncf_graph = get_branching_model_graph() ip_graph = get_ip_graph_for_test(nncf_graph) + + # Metatypes must be assigned before QPSG creation, because + # QPSG detects outputs based on the metatype + metatypes = {k: v.op_meta for k, v in init_node_to_trait_and_configs_dict.items()} + for node_key, metatype in metatypes.items(): + node = ip_graph.nodes[node_key] + node[InsertionPointGraph.REGULAR_NODE_REF_NODE_ATTR].attributes[NNCFNode.METATYPE_ATTR] = metatype + quant_prop_graph = QPSG(ip_graph) for node in quant_prop_graph.nodes.values(): node[QPSG.QUANTIZATION_TRAIT_NODE_ATTR] = QuantizationTrait.QUANTIZATION_AGNOSTIC primary_prop_quant = None - for node_key, trait_and_configs_tuple in init_node_to_trait_and_configs_dict.items(): - trait = trait_and_configs_tuple[0] - qconfigs = trait_and_configs_tuple[1] + for node_key, init_node_struct in init_node_to_trait_and_configs_dict.items(): + qconfigs = init_node_struct.config + trait = init_node_struct.quantization_trait quant_prop_graph.nodes[node_key][QPSG.QUANTIZATION_TRAIT_NODE_ATTR] = trait if trait == QuantizationTrait.INPUTS_QUANTIZABLE: ip_node_key = InsertionPointGraph.get_pre_hook_node_key(node_key) @@ -1063,6 +1149,7 @@ def test_check_branching_transition(self, branch_transition_test_struct: BranchT quant_prop_graph.add_propagating_quantizer(qconfigs, ip_node_key) path = get_edge_paths_for_propagation(quant_prop_graph, target_node, starting_primary_quantizer_ip_node) + primary_prop_quant = quant_prop_graph.propagate_quantizer_via_path(primary_prop_quant, path[0]) quant_prop_graph.run_consistency_check() @@ -1532,7 +1619,7 @@ def test_handling_upward_branching_path_with_no_transition_creates_no_extra_quan retval_shared_input_operation_set_groups=[{1}], expected_count_finished_quant=1, expected_count_active_quant=0, - ignored_scopes=['/gelu_0', '/conv2d_0'] + ignored_scopes={'/gelu_0': IgnoreReason.USER_REQUESTED, '/conv2d_0': IgnoreReason.USER_REQUESTED} ), RunOnIpGraphTestStruct( base_nx_graph=get_sequentially_connected_model_graph(['conv2d', 'matmul']), @@ -1544,7 +1631,7 @@ def test_handling_upward_branching_path_with_no_transition_creates_no_extra_quan retval_shared_input_operation_set_groups=[{1}], expected_count_finished_quant=1, expected_count_active_quant=0, - ignored_scopes=['/conv2d_0'] + ignored_scopes={'/conv2d_0': IgnoreReason.USER_REQUESTED} ), RunOnIpGraphTestStruct( base_nx_graph=get_sequentially_connected_model_graph(['conv2d', 'matmul']), @@ -1553,7 +1640,7 @@ def test_handling_upward_branching_path_with_no_transition_creates_no_extra_quan retval_shared_input_operation_set_groups=[], expected_count_finished_quant=0, expected_count_active_quant=0, - ignored_scopes=['/conv2d_0', '/matmul_0'] + ignored_scopes={'/conv2d_0': IgnoreReason.USER_REQUESTED, '/matmul_0': IgnoreReason.USER_REQUESTED} ), RunOnIpGraphTestStruct( base_nx_graph=TwoFcAfterDropout.get_graph(), @@ -1567,7 +1654,7 @@ def test_handling_upward_branching_path_with_no_transition_creates_no_extra_quan retval_shared_input_operation_set_groups=[{1}], expected_count_finished_quant=1, expected_count_active_quant=0, - ignored_scopes=[TwoFcAfterDropout.FC_2_NODE_NAME] + ignored_scopes={TwoFcAfterDropout.FC_2_NODE_NAME: IgnoreReason.USER_REQUESTED} ) ] # fmt: skip @@ -1584,9 +1671,13 @@ def test_run_on_ip_graph(self, run_on_ip_graph_test_struct: RunOnIpGraphTestStru nncf_graph = run_on_ip_graph_test_struct.base_graph ip_graph = get_ip_graph_for_test(nncf_graph) + if run_on_ip_graph_test_struct.ignored_scopes is not None: + weight_ignored_scopes = list(run_on_ip_graph_test_struct.ignored_scopes.keys()) + else: + weight_ignored_scopes = None quant_prop_solver = QuantizerPropagationSolver( activation_ignored_scopes=run_on_ip_graph_test_struct.ignored_scopes, - weight_ignored_scopes=run_on_ip_graph_test_struct.ignored_scopes, + weight_ignored_scopes=weight_ignored_scopes, default_trait_to_metatype_map=DEFAULT_TEST_QUANT_TRAIT_MAP, run_consistency_checks=True, ) diff --git a/tests/common/requirements.txt b/tests/common/requirements.txt new file mode 100644 index 00000000000..1388c4ee806 --- /dev/null +++ b/tests/common/requirements.txt @@ -0,0 +1,3 @@ +pytest +pytest-cov +pytest-mock>=3.3.1 diff --git a/tests/common/test_framework_detection.py b/tests/common/test_framework_detection.py new file mode 100644 index 00000000000..c6f0f4c9736 --- /dev/null +++ b/tests/common/test_framework_detection.py @@ -0,0 +1,74 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import importlib +import logging +import re +import unittest +from importlib.machinery import ModuleSpec +from typing import List +from unittest.mock import MagicMock + +import pytest + +import nncf + +SUPPORTED_FRAMEWORKS = nncf._SUPPORTED_FRAMEWORKS # pylint:disable=protected-access +_REAL_FIND_SPEC = importlib._bootstrap._find_spec # pylint:disable=protected-access + + +class FailForModules: + def __init__(self, mocked_modules: List[str], hidden_modules: List[str], origin_in_nncf: bool = False): + self._mocked_modules = mocked_modules + self._hidden_modules = hidden_modules + self._origin_in_nncf = origin_in_nncf + + def __call__(self, fullname, path=None, target=None): + if fullname in self._hidden_modules: + return None + if fullname in self._mocked_modules: + if self._origin_in_nncf: + origin = _REAL_FIND_SPEC("nncf", path, target).origin + "/foo/bar" + else: + origin = "foo/bar" + return ModuleSpec(fullname, loader=MagicMock(), origin=origin) + return _REAL_FIND_SPEC(fullname, path, target) + + +def _mock_import_and_check_availability_messages( + ref_available_frameworks: List[str], unavailable_frameworks: List[str], failer_obj: FailForModules, nncf_caplog +): + with unittest.mock.patch("importlib.util.find_spec", wraps=failer_obj): + with nncf_caplog.at_level(logging.INFO): + importlib.reload(nncf) + matches = re.search(r"Supported frameworks detected: (.*)", nncf_caplog.text) + if ref_available_frameworks: + assert matches is not None + match_text = matches[0] + for fw in ref_available_frameworks: + assert fw in match_text + for fw in unavailable_frameworks: + assert fw not in match_text + else: + assert matches is None + + +@pytest.mark.parametrize("ref_available_frameworks", [["torch"], ["torch", "tensorflow"], ["onnx", "openvino"], []]) +def test_frameworks_detected(ref_available_frameworks: List[str], nncf_caplog): + unavailable_frameworks = [fw for fw in SUPPORTED_FRAMEWORKS if fw not in ref_available_frameworks] + failer = FailForModules(ref_available_frameworks, unavailable_frameworks) + _mock_import_and_check_availability_messages(ref_available_frameworks, unavailable_frameworks, failer, nncf_caplog) + + +@pytest.mark.parametrize("ref_available_frameworks", [[fw] for fw in SUPPORTED_FRAMEWORKS]) +def test_frameworks_detected_if_origin_in_nncf(ref_available_frameworks, nncf_caplog): + unavailable_frameworks = [fw for fw in SUPPORTED_FRAMEWORKS if fw not in ref_available_frameworks] + failer = FailForModules(ref_available_frameworks, unavailable_frameworks, origin_in_nncf=True) + _mock_import_and_check_availability_messages(ref_available_frameworks, unavailable_frameworks, failer, nncf_caplog) diff --git a/tests/common/test_hardware_config.py b/tests/common/test_hardware_config.py index a8ebdcc174d..fa88a963787 100644 --- a/tests/common/test_hardware_config.py +++ b/tests/common/test_hardware_config.py @@ -24,8 +24,3 @@ def test_get_hw_config_type(target_device): def test_get_hw_config_type_trial(): assert get_hw_config_type("TRIAL") is None - - -def test_get_hw_config_type_cpu_spr(): - with pytest.raises(ValueError): - get_hw_config_type("CPU_SPR") diff --git a/tests/common/test_logging.py b/tests/common/test_logging.py index d371d4f508a..870afab404c 100644 --- a/tests/common/test_logging.py +++ b/tests/common/test_logging.py @@ -55,6 +55,7 @@ def test_set_log_file(messages, expected): assert actual_line.rstrip("\n") == expected_line handlers_to_remove = [] + # pylint: disable=no-member for handler in nncf_logger.handlers: if isinstance(handler, logging.FileHandler) and str(tmp_dir) in handler.baseFilename: handler.close() # so that the log file is released and temp dir can be deleted diff --git a/tests/common/test_scopes.py b/tests/common/test_scopes.py index ec4d6351a62..0ebfc3914d0 100644 --- a/tests/common/test_scopes.py +++ b/tests/common/test_scopes.py @@ -32,6 +32,9 @@ ], ) def test_get_not_matched_scopes(scope, ref): - node_lists = [NNCFNode(1, "A"), NNCFNode(2, "B")] + node_lists = [ + NNCFNode({NNCFNode.ID_NODE_ATTR: 1, NNCFNode.NODE_NAME_ATTR: "A"}), + NNCFNode({NNCFNode.ID_NODE_ATTR: 2, NNCFNode.NODE_NAME_ATTR: "B"}), + ] not_matched = get_not_matched_scopes(scope, node_lists) assert not set(not_matched) - set(ref) diff --git a/tests/common/test_statistics_aggregator.py b/tests/common/test_statistics_aggregator.py index 69f35ab6530..cd0545a4580 100644 --- a/tests/common/test_statistics_aggregator.py +++ b/tests/common/test_statistics_aggregator.py @@ -8,8 +8,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - from abc import abstractmethod +from collections import Counter from dataclasses import dataclass from enum import Enum from itertools import product @@ -375,40 +375,37 @@ def test_statistics_aggregator_min_max( inplace_statistics, is_backend_support_custom_estimators, ): - algo_backend = self.get_min_max_algo_backend_cls() model = self.get_backend_model(dataset_samples) - nncf_graph = NNCFGraphFactory.create(model) - quantizer_config = QuantizerConfig( mode=test_parameters.quantization_mode, per_channel=test_parameters.per_channel ) - target_point = self.get_target_point(test_parameters.target_type) - is_standart_estimator = test_parameters.range_estimator_params in [ + is_standard_estimator = test_parameters.range_estimator_params in [ RangeEstimatorParametersSet.MINMAX, RangeEstimatorParametersSet.MEAN_MINMAX, ] - if not is_standart_estimator and not is_backend_support_custom_estimators: + if not is_standard_estimator and not is_backend_support_custom_estimators: pytest.skip("Custom estimators are not supported for this backend yet") - tensor_collector = algo_backend.get_statistic_collector( + target_point = self.get_target_point(test_parameters.target_type) + algorithm_name = "TestAlgo" + statistic_point = self.create_statistics_point( + model, + quantizer_config, + target_point, + len(dataset_samples), + algorithm_name, + inplace_statistics, test_parameters.range_estimator_params, - nncf_graph=nncf_graph, - target_point=target_point, - quantizer_config=quantizer_config, - num_samples=len(dataset_samples), - inplace=inplace_statistics, ) - statistics_points = StatisticPointsContainer() - algorithm_name = "TestAlgo" - statistics_points.add_statistic_point( - StatisticPoint(target_point=target_point, tensor_collector=tensor_collector, algorithm=algorithm_name) - ) + statistics_points.add_statistic_point(statistic_point) + dataset = self.get_dataset(dataset_samples) statistics_aggregator = self.get_statistics_aggregator(dataset) statistics_aggregator.register_statistic_points(statistics_points) - statistics_aggregator.collect_statistics(model) + graph = NNCFGraphFactory.create(model) + statistics_aggregator.collect_statistics(model, graph) def filter_func(point): return ( @@ -443,7 +440,7 @@ class BiasCorrectionAlgos(Enum): class BCStatsCollectors(Enum): MEAN = "mean" - BATCH_MEAN = "batch_mean" + RAW = "raw" @dataclass class BCTestParameters: @@ -520,7 +517,7 @@ class BCTestParameters: axis=2, ), BCTestParameters( - BiasCorrectionAlgos.BIAS_CORRECTION, BCStatsCollectors.BATCH_MEAN, TargetType.POST_LAYER_OPERATION + BiasCorrectionAlgos.BIAS_CORRECTION, BCStatsCollectors.RAW, TargetType.POST_LAYER_OPERATION ), # TargeType: weights BCTestParameters( @@ -579,8 +576,8 @@ def test_statistics_aggregator_bias_correction( tensor_collector = algo_backend.mean_statistic_collector( test_params.axis, inplace_statistics, len(dataset_samples) ) - elif test_params.collector_type == self.BCStatsCollectors.BATCH_MEAN: - tensor_collector = algo_backend.batch_statistic_collector(inplace_statistics, len(dataset_samples)) + elif test_params.collector_type == self.BCStatsCollectors.RAW: + tensor_collector = algo_backend.raw_statistic_collector(inplace_statistics, len(dataset_samples)) else: raise RuntimeError() @@ -595,7 +592,8 @@ def test_statistics_aggregator_bias_correction( statistics_aggregator = self.get_statistics_aggregator(dataset) statistics_aggregator.register_statistic_points(statistics_points) model = self.get_backend_model(dataset_samples) - statistics_aggregator.collect_statistics(model) + graph = NNCFGraphFactory.create(model) + statistics_aggregator.collect_statistics(model, graph) def filter_func(point): return ( @@ -611,7 +609,7 @@ def filter_func(point): stat = tensor_collector.get_statistics() if test_params.collector_type == self.BCStatsCollectors.MEAN: ret_val = [stat.mean_values, stat.shape] - elif test_params.collector_type == self.BCStatsCollectors.BATCH_MEAN: + elif test_params.collector_type == self.BCStatsCollectors.RAW: ret_val = stat.values test_params.ref_values = dataset_samples if not is_stat_in_shape_of_scale: @@ -624,73 +622,59 @@ def filter_func(point): assert ref.shape == val.shape assert np.allclose(val, ref) - def test_statistics_merging_simple(self, dataset_samples, inplace_statistics): + def create_statistics_point( + self, model, q_config, target_point, subset_size, algorithm_name, inplace_statistics, range_estimator + ): algo_backend = self.get_min_max_algo_backend_cls() - model = self.get_backend_model(dataset_samples) nncf_graph = NNCFGraphFactory.create(model) - - quantizer_config = QuantizerConfig(mode=QuantizationMode.SYMMETRIC, per_channel=False) - pre_layer_target_point = self.get_target_point(TargetType.PRE_LAYER_OPERATION) - pre_tensor_collector = algo_backend.get_statistic_collector( - RangeEstimatorParametersSet.MINMAX, + tensor_collector = algo_backend.get_statistic_collector( + range_estimator, nncf_graph=nncf_graph, - target_point=pre_layer_target_point, - quantizer_config=quantizer_config, - num_samples=len(dataset_samples), + target_point=target_point, + quantizer_config=q_config, + num_samples=subset_size, inplace=inplace_statistics, ) + return StatisticPoint(target_point=target_point, tensor_collector=tensor_collector, algorithm=algorithm_name) - post_layer_target_point = self.get_target_point(TargetType.POST_LAYER_OPERATION) - post_tensor_collector = algo_backend.get_statistic_collector( - RangeEstimatorParametersSet.MINMAX, - nncf_graph=nncf_graph, - target_point=post_layer_target_point, - quantizer_config=quantizer_config, - num_samples=len(dataset_samples), - inplace=inplace_statistics, - ) - unique_post_tensor_collector = algo_backend.get_statistic_collector( - RangeEstimatorParametersSet.MEAN_MINMAX, - nncf_graph=nncf_graph, - target_point=post_layer_target_point, - quantizer_config=quantizer_config, - num_samples=len(dataset_samples), - inplace=inplace_statistics, - ) + @pytest.mark.parametrize( + "statistic_point_params", + ( + ( + ("AAA", RangeEstimatorParametersSet.MINMAX, TargetType.PRE_LAYER_OPERATION, -128.0, 128), + ("BBB", RangeEstimatorParametersSet.MINMAX, TargetType.POST_LAYER_OPERATION, -128.0, 128), + ("CCC", RangeEstimatorParametersSet.MEAN_MINMAX, TargetType.POST_LAYER_OPERATION, -63.5, 64.5), + ), + ), + ) + def test_statistics_merging_simple(self, dataset_samples, inplace_statistics, statistic_point_params): + model = self.get_backend_model(dataset_samples) + quantizer_config = QuantizerConfig(mode=QuantizationMode.SYMMETRIC, per_channel=False) + subset_size = len(dataset_samples) statistics_points = StatisticPointsContainer() - algorithm_names = ["AAA", "BBB", "CCC"] - statistics_points.add_statistic_point( - StatisticPoint( - target_point=pre_layer_target_point, tensor_collector=pre_tensor_collector, algorithm=algorithm_names[0] + ref_val = {} + + for statistic_point_param in statistic_point_params: + algorithm_name, range_estimator, target_point_type, ref_min_val, ref_max_val = statistic_point_param + ref_val[algorithm_name] = (ref_min_val, ref_max_val) + target_point = self.get_target_point(target_point_type) + statistics_point = self.create_statistics_point( + model, quantizer_config, target_point, subset_size, algorithm_name, inplace_statistics, range_estimator ) - ) - statistics_points.add_statistic_point( - StatisticPoint( - target_point=post_layer_target_point, - tensor_collector=post_tensor_collector, - algorithm=algorithm_names[1], - ) - ) - statistics_points.add_statistic_point( - StatisticPoint( - target_point=post_layer_target_point, - tensor_collector=unique_post_tensor_collector, - algorithm=algorithm_names[2], - ) - ) + statistics_points.add_statistic_point(statistics_point) + dataset = self.get_dataset(dataset_samples) statistics_aggregator = self.get_statistics_aggregator(dataset) statistics_aggregator.register_statistic_points(statistics_points) - statistics_aggregator.collect_statistics(model) + graph = NNCFGraphFactory.create(model) + statistics_aggregator.collect_statistics(model, graph) tensor_collectors = list(statistics_points.get_tensor_collectors()) assert len(tensor_collectors) == 3 - for _, _, tensor_collector in tensor_collectors: + for algorithm, _, tensor_collector in tensor_collectors: stat = tensor_collector.get_statistics() - ref_min_val, ref_max_val = -128.0, 128 - if tensor_collector is unique_post_tensor_collector: - ref_min_val, ref_max_val = -63.5, 64.5 + ref_min_val, ref_max_val = ref_val[algorithm] assert np.allclose(stat.min_values, ref_min_val) assert np.allclose(stat.max_values, ref_max_val) @@ -790,7 +774,7 @@ def test_statistic_merging(self, test_params, key, dataset_samples, inplace_stat dataset = self.get_dataset(dataset_samples) statistics_aggregator = self.get_statistics_aggregator(dataset) # pylint: disable=protected-access - merged_statistics = statistics_aggregator._get_merged_statistic_points(statistics_points, model) + merged_statistics = statistics_aggregator._get_merged_statistic_points(statistics_points, model, nncf_graph) merged_stats_checkers_map = { "split_concat": self._check_split_concat_merged_stats, "shared_conv": self._check_shared_convs_merged_stats, @@ -798,7 +782,7 @@ def test_statistic_merging(self, test_params, key, dataset_samples, inplace_stat merged_stats_checkers_map[key](merged_statistics) statistics_aggregator.register_statistic_points(statistics_points) - statistics_aggregator.collect_statistics(model) + statistics_aggregator.collect_statistics(model, nncf_graph) for collector, ref in collectors_and_refs: stat = collector.get_statistics() @@ -861,4 +845,43 @@ def product_dict(**kwargs): statistics_aggregator = self.get_statistics_aggregator(dataset) statistics_aggregator.register_statistic_points(statistics_points) # Run statistic collection to check output names matches reduer names - statistics_aggregator.collect_statistics(model) + graph = NNCFGraphFactory.create(model) + statistics_aggregator.collect_statistics(model, graph) + + @pytest.mark.parametrize( + "statistic_point_params", + ( + ( + ("AAA", RangeEstimatorParametersSet.MINMAX, TargetType.PRE_LAYER_OPERATION, 100), + ("BBB", RangeEstimatorParametersSet.MINMAX, TargetType.POST_LAYER_OPERATION, 10), + ("CCC", RangeEstimatorParametersSet.MEAN_MINMAX, TargetType.PRE_LAYER_OPERATION, None), + ("CCC", RangeEstimatorParametersSet.MEAN_MINMAX, TargetType.PRE_LAYER_OPERATION, -1), + ), + ), + ) + def test_register_statistics(self, dataset_samples, statistic_point_params): + model = self.get_backend_model(dataset_samples) + quantizer_config = QuantizerConfig(mode=QuantizationMode.SYMMETRIC, per_channel=False) + statistics_points = StatisticPointsContainer() + ref_val = {} + + for statistic_point_param in statistic_point_params: + algorithm_name, range_estimator, target_point_type, subset_size = statistic_point_param + ref_val[algorithm_name] = subset_size + target_point = self.get_target_point(target_point_type) + statistics_point = self.create_statistics_point( + model, quantizer_config, target_point, subset_size, algorithm_name, True, range_estimator + ) + statistics_points.add_statistic_point(statistics_point) + + dataset = self.get_dataset(dataset_samples) + statistics_aggregator = self.get_statistics_aggregator(dataset) + statistics_aggregator.register_statistic_points(statistics_points) + assert Counter(statistics_points) == Counter(statistics_aggregator.statistic_points) + ref_subset_size = None + for subset_size in ref_val.values(): + if subset_size and ref_subset_size: + ref_subset_size = max(ref_subset_size, subset_size) + else: + ref_subset_size = subset_size + assert statistics_aggregator.stat_subset_size == ref_subset_size diff --git a/tests/common/test_telemetry.py b/tests/common/test_telemetry.py index d64d69f46a6..6fbb4bd753c 100644 --- a/tests/common/test_telemetry.py +++ b/tests/common/test_telemetry.py @@ -24,6 +24,7 @@ from nncf.telemetry import tracked_function from nncf.telemetry.extractors import CollectedEvent from nncf.telemetry.wrapper import NNCFTelemetryStub +from nncf.telemetry.wrapper import skip_if_raised @pytest.fixture(name="hide_pytest") @@ -184,3 +185,23 @@ def test_nested_function_different_categories(mocker, spies): assert start_session_event_spy.call_args_list == expected_session_call_args_list assert end_session_event_spy.call_args_list == list(reversed(expected_session_call_args_list)) + + +class Raises: + def __init__(self): + self.call_count = 0 + + def __call__(self, arg1, arg2): + self.call_count += 1 + raise Exception() + + +def test_skip_if_raised(): + raises = Raises() + wrapped = skip_if_raised(raises) + wrapped(1, 2) + assert raises.call_count == 1 + + # Incorrect args + wrapped(1, 2, 3) + assert raises.call_count == 1 diff --git a/tests/common/test_tensor.py b/tests/common/test_tensor.py new file mode 100644 index 00000000000..b45f4d725e0 --- /dev/null +++ b/tests/common/test_tensor.py @@ -0,0 +1,20 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np + +from tests.shared.test_templates.template_test_nncf_tensor import TemplateTestNNCFTensorOperators + + +class TestNPNNCFTensorOperators(TemplateTestNNCFTensorOperators): + @staticmethod + def to_tensor(x): + return np.array(x) diff --git a/tests/cross_fw/examples/conftest.py b/tests/cross_fw/examples/conftest.py new file mode 100644 index 00000000000..cbf665402d8 --- /dev/null +++ b/tests/cross_fw/examples/conftest.py @@ -0,0 +1,40 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +TESTED_BACKENDS = ["torch", "tf", "onnx", "openvino"] + + +def pytest_addoption(parser): + parser.addoption( + "--backend", + type=str, + help="Backend to test installation for.", + choices=[*TESTED_BACKENDS, "all"], + nargs="+", + default=["all"], + ) + parser.addoption( + "--check_performance", + default=False, + help="If the parameter is set then the performance metrics will be tested as well", + ) + + +@pytest.fixture(scope="module") +def backends_list(request): + return request.config.getoption("--backend") + + +@pytest.fixture(scope="module") +def is_check_performance(request): + return request.config.getoption("--check_performance") diff --git a/tests/cross_fw/examples/example_scope.json b/tests/cross_fw/examples/example_scope.json new file mode 100644 index 00000000000..a215b096679 --- /dev/null +++ b/tests/cross_fw/examples/example_scope.json @@ -0,0 +1,147 @@ +{ + "post_training_quantization_onnx_mobilenet_v2": { + "backend": "onnx", + "requirements": "examples/post_training_quantization/onnx/mobilenet_v2/requirements.txt", + "cpu": "Intel(R) Core(TM) i9-10980XE CPU @ 3.00GHz", + "accuracy_metrics": { + "fp32_top1": 0.9864968152866243, + "int8_top1": 0.9844585987261146, + "accuracy_drop": 0.0020382165605096203 + }, + "performance_metrics": { + "fp32_fps": 1732.8, + "int8_fps": 6013.68, + "performance_speed_up": 3.470498614958449 + } + }, + "post_training_quantization_openvino_mobilenet_v2_quantize": { + "backend": "openvino", + "requirements": "examples/post_training_quantization/openvino/mobilenet_v2/requirements.txt", + "cpu": "Intel(R) Core(TM) i9-10980XE CPU @ 3.00GHz", + "accuracy_metrics": { + "fp32_top1": 0.9864968152866243, + "int8_top1": 0.9844585987261146, + "accuracy_drop": 0.0020382165605096203 + }, + "performance_metrics": { + "fp32_fps": 1725.97, + "int8_fps": 6003.05, + "performance_speed_up": 3.478073199418298 + }, + "model_size_metrics": { + "fp32_model_size": 8.605232238769531, + "int8_model_size": 2.6038694381713867, + "model_compression_rate": 3.30478637393306 + } + }, + "post_training_quantization_openvino_yolo8_quantize": { + "backend": "openvino", + "requirements": "examples/post_training_quantization/openvino/yolov8/requirements.txt", + "cpu": "Intel(R) Core(TM) i9-10980XE CPU @ 3.00GHz", + "accuracy_metrics": { + "fp32_mAP": 0.45252755065175254, + "int8_mAP": 0.4425154975267813, + "accuracy_drop": 0.01001205312497122 + }, + "performance_metrics": { + "fp32_fps": 170.69, + "int8_fps": 444.15, + "performance_speed_up": 2.6020856523522173 + } + }, + "post_training_quantization_openvino_anomaly_stfpm_quantize_with_accuracy_control": { + "backend": "openvino", + "requirements": "examples/post_training_quantization/openvino/anomaly_stfpm_quantize_with_accuracy_control/requirements.txt", + "cpu": "Intel(R) Core(TM) i9-10980XE CPU @ 3.00GHz", + "accuracy_metrics": { + "fp32_top1": 0.9680365324020386, + "int8_top1": 0.9683257937431335, + "accuracy_drop": -0.0002892613410949707 + }, + "performance_metrics": { + "fp32_fps": 346.23, + "int8_fps": 833.56, + "performance_speed_up": 2.407532565057909 + }, + "model_size_metrics": { + "fp32_model_size": 21.33917808532715, + "int8_model_size": 5.725968360900879, + "model_compression_rate": 3.726736988461077 + } + }, + "post_training_quantization_openvino_yolo8_quantize_with_accuracy_control": { + "backend": "openvino", + "requirements": "examples/post_training_quantization/openvino/yolov8_quantize_with_accuracy_control/requirements.txt", + "cpu": "Intel(R) Core(TM) i9-10980XE CPU @ 3.00GHz", + "accuracy_metrics": { + "fp32_mAP": 0.44964819083938, + "int8_mAP": 0.4428344895334926, + "accuracy_drop": 0.006813701305887432 + }, + "performance_metrics": { + "fp32_fps": 134.48, + "int8_fps": 274.58, + "performance_speed_up": 2.0417906008328375 + } + }, + "post_training_quantization_tensorflow_mobilenet_v2": { + "backend": "tensorflow", + "requirements": "examples/post_training_quantization/tensorflow/mobilenet_v2/requirements.txt", + "cpu": "Intel(R) Core(TM) i9-10980XE CPU @ 3.00GHz", + "accuracy_metrics": { + "fp32_top1": 0.987770676612854, + "int8_top1": 0.9775795936584473, + "accuracy_drop": 0.010191082954406738 + }, + "performance_metrics": { + "fp32_fps": 1703.04, + "int8_fps": 5796.3, + "performance_speed_up": 3.403501972942503 + }, + "model_size_metrics": { + "fp32_model_size": 8.596238136291504, + "int8_model_size": 2.69466495513916, + "model_compression_rate": 3.1900953474371994 + } + }, + "post_training_quantization_torch_mobilenet_v2": { + "backend": "torch", + "requirements": "examples/post_training_quantization/torch/mobilenet_v2/requirements.txt", + "cpu": "Intel(R) Core(TM) i9-10980XE CPU @ 3.00GHz", + "accuracy_metrics": { + "fp32_top1": 0.9864968152866243, + "int8_top1": 0.9829299363057324, + "accuracy_drop": 0.0035668789808918078 + }, + "performance_metrics": { + "fp32_fps": 1624.67, + "int8_fps": 5981.95, + "performance_speed_up": 3.681947718613626 + }, + "model_size_metrics": { + "fp32_model_size": 8.605537414550781, + "int8_model_size": 2.748152732849121, + "model_compression_rate": 3.131389792018245 + } + }, + "post_training_quantization_torch_ssd300_vgg16": { + "backend": "torch", + "requirements": "examples/post_training_quantization/torch/ssd300_vgg16/requirements.txt", + "cpu": "Intel(R) Core(TM) i9-10980XE CPU @ 3.00GHz", + "accuracy_metrics": { + "fp32_mAP": 0.5232756733894348, + "int8_mAP": 0.5140125155448914, + "accuracy_drop": 0.009263157844543457 + }, + "performance_metrics": { + "fp32_fps": 33.7, + "int8_fps": 132.81, + "performance_speed_up": 3.940949554896142 + }, + "model_size_metrics": { + "fp32_model_size": 137.3418207168579, + "int8_model_size": 35.55147361755371, + "model_compression_rate": 3.8631822183889652 + } + } +} \ No newline at end of file diff --git a/tests/cross_fw/examples/requirements.txt b/tests/cross_fw/examples/requirements.txt new file mode 100644 index 00000000000..9955deccd94 --- /dev/null +++ b/tests/cross_fw/examples/requirements.txt @@ -0,0 +1,2 @@ +pytest +pytest-cov diff --git a/tests/cross_fw/examples/run_example.py b/tests/cross_fw/examples/run_example.py new file mode 100644 index 00000000000..790994dec25 --- /dev/null +++ b/tests/cross_fw/examples/run_example.py @@ -0,0 +1,151 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import sys +from argparse import ArgumentParser +from typing import Dict, Tuple + +from tests.shared.paths import PROJECT_ROOT + +# pylint: disable=maybe-no-member +# pylint: disable=import-error + + +def post_training_quantization_mobilenet_v2(example_root_dir: str) -> Dict[str, float]: + sys.path.append(example_root_dir) + import main as mobilenet_v2 + + metrics = { + "fp32_top1": float(mobilenet_v2.fp32_top1), + "int8_top1": float(mobilenet_v2.int8_top1), + "accuracy_drop": float(mobilenet_v2.fp32_top1 - mobilenet_v2.int8_top1), + "fp32_fps": mobilenet_v2.fp32_fps, + "int8_fps": mobilenet_v2.int8_fps, + "performance_speed_up": mobilenet_v2.int8_fps / mobilenet_v2.fp32_fps, + } + + if hasattr(mobilenet_v2, "fp32_model_size") and hasattr(mobilenet_v2, "int8_model_size"): + metrics["fp32_model_size"] = mobilenet_v2.fp32_model_size + metrics["int8_model_size"] = mobilenet_v2.int8_model_size + metrics["model_compression_rate"] = mobilenet_v2.fp32_model_size / mobilenet_v2.int8_model_size + + return metrics + + +def post_training_quantization_onnx_mobilenet_v2() -> Dict[str, float]: + example_root = str(PROJECT_ROOT / "examples" / "post_training_quantization" / "onnx" / "mobilenet_v2") + return post_training_quantization_mobilenet_v2(example_root) + + +def post_training_quantization_openvino_mobilenet_v2_quantize() -> Dict[str, float]: + example_root = str(PROJECT_ROOT / "examples" / "post_training_quantization" / "openvino" / "mobilenet_v2") + return post_training_quantization_mobilenet_v2(example_root) + + +def post_training_quantization_tensorflow_mobilenet_v2() -> Dict[str, float]: + example_root = str(PROJECT_ROOT / "examples" / "post_training_quantization" / "tensorflow" / "mobilenet_v2") + return post_training_quantization_mobilenet_v2(example_root) + + +def post_training_quantization_torch_mobilenet_v2() -> Dict[str, float]: + example_root = str(PROJECT_ROOT / "examples" / "post_training_quantization" / "torch" / "mobilenet_v2") + return post_training_quantization_mobilenet_v2(example_root) + + +def format_results(results: Tuple[float]) -> Dict[str, float]: + return { + "fp32_mAP": results[0], + "int8_mAP": results[1], + "accuracy_drop": results[0] - results[1], + "fp32_fps": results[2], + "int8_fps": results[3], + "performance_speed_up": results[3] / results[2], + } + + +def post_training_quantization_openvino_yolo8_quantize() -> Dict[str, float]: + from examples.post_training_quantization.openvino.yolov8.main import main as yolo8_main + + results = yolo8_main() + + return format_results(results) + + +def post_training_quantization_openvino_yolo8_quantize_with_accuracy_control() -> Dict[str, float]: + from examples.post_training_quantization.openvino.yolov8_quantize_with_accuracy_control.main import ( + main as yolo8_main, + ) + + results = yolo8_main() + + return format_results(results) + + +def post_training_quantization_openvino_anomaly_stfpm_quantize_with_accuracy_control() -> Dict[str, float]: + sys.path.append( + str( + PROJECT_ROOT + / "examples" + / "post_training_quantization" + / "openvino" + / "anomaly_stfpm_quantize_with_accuracy_control" + ) + ) + import main as stfpm + + return { + "fp32_top1": float(stfpm.fp32_top1), + "int8_top1": float(stfpm.int8_top1), + "accuracy_drop": float(stfpm.fp32_top1 - stfpm.int8_top1), + "fp32_fps": stfpm.fp32_fps, + "int8_fps": stfpm.int8_fps, + "performance_speed_up": stfpm.int8_fps / stfpm.fp32_fps, + "fp32_model_size": stfpm.fp32_size, + "int8_model_size": stfpm.int8_size, + "model_compression_rate": stfpm.fp32_size / stfpm.int8_size, + } + + +def post_training_quantization_torch_ssd300_vgg16() -> Dict[str, float]: + from examples.post_training_quantization.torch.ssd300_vgg16.main import main as ssd300_vgg16_main + + results = ssd300_vgg16_main() + + return { + "fp32_mAP": float(results[0]), + "int8_mAP": float(results[1]), + "accuracy_drop": float(results[0] - results[1]), + "fp32_fps": results[2], + "int8_fps": results[3], + "performance_speed_up": results[3] / results[2], + "fp32_model_size": results[4], + "int8_model_size": results[5], + "model_compression_rate": results[4] / results[5], + } + + +def main(argv): + parser = ArgumentParser() + parser.add_argument("--name", help="Example name", required=True) + parser.add_argument("-o", "--output", help="Path to the json file to save example metrics", required=True) + args = parser.parse_args(args=argv) + + metrics = globals()[args.name]() + + with open(args.output, "w", encoding="utf8") as json_file: + return json.dump(metrics, json_file) + + +if __name__ == "__main__": + self_argv = sys.argv[1:] + sys.argv = sys.argv[:1] + main(self_argv) diff --git a/tests/cross_fw/examples/test_examples.py b/tests/cross_fw/examples/test_examples.py new file mode 100644 index 00000000000..fc3c0d8f815 --- /dev/null +++ b/tests/cross_fw/examples/test_examples.py @@ -0,0 +1,76 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import subprocess + +import pytest + +from tests.shared.case_collection import skip_if_backend_not_selected +from tests.shared.command import Command +from tests.shared.helpers import create_venv_with_nncf +from tests.shared.helpers import get_pip_executable_with_venv +from tests.shared.helpers import get_python_executable_with_venv +from tests.shared.helpers import load_json +from tests.shared.paths import PROJECT_ROOT +from tests.shared.paths import TEST_ROOT + +EXAMPLE_TEST_ROOT = TEST_ROOT / "cross_fw" / "examples" +EXAMPLE_SCOPE_PATH = EXAMPLE_TEST_ROOT / "example_scope.json" + +ACCURACY_TOLERANCE = 0.002 +PERFORMANCE_RELATIVE_TOLERANCE = 0.05 +MODEL_SIZE_RELATIVE_TOLERANCE = 0.05 + +ACCURACY_METRICS = "accuracy_metrics" +MODEL_SIZE_METRICS = "model_size_metrics" +PERFORMNACE_METRICS = "performance_metrics" + + +def example_test_cases(): + example_scope = load_json(EXAMPLE_SCOPE_PATH) + for example_name, example_params in example_scope.items(): + yield pytest.param(example_name, example_params, id=example_name) + + +@pytest.mark.parametrize("example_name, example_params", example_test_cases()) +def test_examples(tmp_path, example_name, example_params, backends_list, is_check_performance): + backend = example_params["backend"] + skip_if_backend_not_selected(backend, backends_list) + venv_path = create_venv_with_nncf(tmp_path, "pip_e_local", "venv", set([backend])) + if "requirements" in example_params: + pip_with_venv = get_pip_executable_with_venv(venv_path) + requirements = PROJECT_ROOT / example_params["requirements"] + run_cmd_line = f"{pip_with_venv} install -r {requirements}" + subprocess.run(run_cmd_line, check=True, shell=True) + + env = os.environ.copy() + env["PYTHONPATH"] = str(PROJECT_ROOT) # need this to be able to import from tests.* in run_example.py + + metrics_file_path = tmp_path / "metrics.json" + python_executable_with_venv = get_python_executable_with_venv(venv_path) + run_example_py = EXAMPLE_TEST_ROOT / "run_example.py" + run_cmd_line = f"{python_executable_with_venv} {run_example_py} --name {example_name} --output {metrics_file_path}" + cmd = Command(run_cmd_line, cwd=PROJECT_ROOT, env=env) + cmd.run() + + measured_metrics = load_json(metrics_file_path) + + for name, value in example_params[ACCURACY_METRICS].items(): + assert measured_metrics[name] == pytest.approx(value, abs=ACCURACY_TOLERANCE) + + if MODEL_SIZE_METRICS in example_params: + for name, value in example_params[MODEL_SIZE_METRICS].items(): + assert measured_metrics[name] == pytest.approx(value, rel=MODEL_SIZE_RELATIVE_TOLERANCE) + + if is_check_performance and PERFORMNACE_METRICS in example_params: + for name, value in example_params[PERFORMNACE_METRICS].items(): + assert measured_metrics[name] == pytest.approx(value, rel=PERFORMANCE_RELATIVE_TOLERANCE) diff --git a/tests/cross_fw/install/install_checks_torch.py b/tests/cross_fw/install/install_checks_torch.py index 6480e089c18..d22dd5e2302 100644 --- a/tests/cross_fw/install/install_checks_torch.py +++ b/tests/cross_fw/install/install_checks_torch.py @@ -13,22 +13,16 @@ import torch -# Do not remove - these imports are for testing purposes. -# pylint:disable=unused-import -import nncf - if len(sys.argv) != 3: raise RuntimeError("Must be run with an execution type as argument (either 'cpu' or 'gpu') and package type") execution_type = sys.argv[1] package_type = sys.argv[2] -if package_type == "pip_pypi": - try: - from nncf.torch import create_compressed_model - except ImportError: - from nncf import create_compressed_model -else: - from nncf.torch import create_compressed_model +# Do not remove - these imports are for testing purposes. +# pylint:disable=unused-import +# pylint:disable=wrong-import-position +import nncf +from nncf.torch import create_compressed_model input_low_tensor = torch.zeros([1]) input_tensor = torch.ones([1, 1, 1, 1]) @@ -38,16 +32,9 @@ levels = 256 if execution_type == "cpu": - if package_type == "pip_pypi": - try: - from nncf.torch.binarization.extensions import BinarizedFunctionsCPU - from nncf.torch.quantization.extensions import QuantizedFunctionsCPU - except ImportError: - from nncf.binarization.extensions import BinarizedFunctionsCPU - from nncf.quantization.extensions import QuantizedFunctionsCPU - else: - from nncf.torch.binarization.extensions import BinarizedFunctionsCPU - from nncf.torch.quantization.extensions import QuantizedFunctionsCPU + from nncf.torch.binarization.extensions import BinarizedFunctionsCPU + from nncf.torch.quantization.extensions import QuantizedFunctionsCPU + output_tensor = QuantizedFunctionsCPU.get("Quantize_forward")( input_tensor, input_low_tensor, input_high_tensor, levels ) @@ -61,16 +48,9 @@ input_high_tensor = input_high_tensor.cuda() scale_tensor = scale_tensor.cuda() threshold_tensor = threshold_tensor.cuda() - if package_type == "pip_pypi": - try: - from nncf.torch.binarization.extensions import BinarizedFunctionsCUDA - from nncf.torch.quantization.extensions import QuantizedFunctionsCUDA - except ImportError: - from nncf.binarization.extensions import BinarizedFunctionsCUDA - from nncf.quantization.extensions import QuantizedFunctionsCUDA - else: - from nncf.torch.binarization.extensions import BinarizedFunctionsCUDA - from nncf.torch.quantization.extensions import QuantizedFunctionsCUDA + from nncf.torch.binarization.extensions import BinarizedFunctionsCUDA + from nncf.torch.quantization.extensions import QuantizedFunctionsCUDA + output_tensor = QuantizedFunctionsCUDA.get("Quantize_forward")( input_tensor, input_low_tensor, input_high_tensor, levels ) @@ -79,4 +59,4 @@ ) output_tensor = BinarizedFunctionsCUDA.get("WeightBinarize_forward")(output_tensor, True) else: - raise RuntimeError("Invalid execution type!") + raise RuntimeError(f"Invalid execution type {execution_type} (expected 'cpu' or 'gpu')!") diff --git a/tests/cross_fw/install/requirements.txt b/tests/cross_fw/install/requirements.txt index 5d0edefc9fa..8af77860ebe 100644 --- a/tests/cross_fw/install/requirements.txt +++ b/tests/cross_fw/install/requirements.txt @@ -1,2 +1,3 @@ pytest +pytest-cov virtualenv diff --git a/tests/cross_fw/install/test_install.py b/tests/cross_fw/install/test_install.py index 513fad1c809..c7953d2c6f5 100644 --- a/tests/cross_fw/install/test_install.py +++ b/tests/cross_fw/install/test_install.py @@ -12,7 +12,6 @@ import os import shutil import subprocess -import sys from pathlib import Path from typing import List @@ -21,6 +20,7 @@ from nncf.common.utils.os import is_linux from nncf.common.utils.os import is_windows from tests.cross_fw.install.conftest import TESTED_BACKENDS +from tests.shared.case_collection import skip_if_backend_not_selected from tests.shared.helpers import create_venv_with_nncf from tests.shared.helpers import get_pip_executable_with_venv from tests.shared.helpers import get_python_executable_with_venv @@ -97,11 +97,6 @@ def backend_to_test_(request, backend_clopt: List[str]): return request.param -def skip_if_backend_not_selected(backend: str, backends_from_cl: List[str]): - if "all" not in backends_from_cl and backend not in backends_from_cl: - pytest.skip("not selected for testing") - - @pytest.mark.parametrize("backend", TESTED_BACKENDS) class TestInstall: @staticmethod @@ -114,10 +109,8 @@ def test_install( host_configuration_clopt: str, ): skip_if_backend_not_selected(backend, backend_clopt) - if backend == "openvino" and "pypi" in package_type: - pytest.xfail("Disabled until OV backend is exposed in a release") - if backend == "torch" and "pypi" in package_type: - pytest.xfail("Disabled until NNCF with torch version supporting CUDA 11.6 backend is exposed in a release") + if "pypi" in package_type: + pytest.xfail("Disabled until NNCF is exposed in a release") venv_path = create_venv_with_nncf(tmp_path, package_type, venv_type, extra_reqs={backend}) run_install_checks(venv_path, tmp_path, package_type, backend=backend, install_type=host_configuration_clopt) @@ -131,10 +124,8 @@ def test_install_with_tests_requirements( host_configuration_clopt: str, ): skip_if_backend_not_selected(backend, backend_clopt) - if backend == "openvino" and "pypi" in package_type: - pytest.xfail("Disabled until OV backend is exposed in a release") - if backend == "torch" and "pypi" in package_type: - pytest.xfail("Disabled until NNCF with torch version supporting CUDA 11.6 backend is exposed in a release") + if "pypi" in package_type: + pytest.xfail("Disabled until NNCF is exposed in a release") venv_path = create_venv_with_nncf(tmp_path, package_type, venv_type, extra_reqs={backend}) if is_linux(): diff --git a/tests/experimental/common/test_statistic_collector.py b/tests/experimental/common/test_statistic_collector.py index cfd49b4bb32..9346b176bd6 100644 --- a/tests/experimental/common/test_statistic_collector.py +++ b/tests/experimental/common/test_statistic_collector.py @@ -9,6 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from abc import abstractmethod from typing import List, Optional import numpy as np @@ -22,6 +23,7 @@ from nncf.experimental.common.tensor_statistics.collectors import TensorType +# pylint: disable=(protected-access) class DummyTensorReducer(TensorReducerBase): def __init__(self, output_name: str, inplace: bool = False, inplace_mock=None): super().__init__(inplace=inplace) @@ -52,7 +54,7 @@ def __init__(self, num_samples: Optional[int]): def _register_reduced_input_impl(self, x: TensorType): return self._container.append(x) - def aggregate(self): + def _aggregate_impl(self): return self._container[0] @@ -266,5 +268,48 @@ def test_multiple_branch_reducer(): ref_stats = {"0": NNCFTensor(np.array(0)), "1": NNCFTensor(np.array(1))} stats = collector.get_statistics() assert len(ref_stats) == len(stats) - for key in ref_stats: - assert ref_stats[key] == stats[key] + for key, value in ref_stats.items(): + assert value == stats[key] + + +class TemplateTestStatisticCollector: + @abstractmethod + def get_nncf_tensor_cls(self): + pass + + @pytest.mark.parametrize("inplace", [False, True]) + @pytest.mark.parametrize("any_not_empty", [False, True]) + def test_empty_tensors_register(self, inplace, any_not_empty): + collector = TensorCollector() + reducer = DummyTensorReducer("Dummy", inplace) + aggregator = DummyTensorAggregator(5) + collector.register_statistic_branch("A", reducer, aggregator) + input_name = "input_name" + full_inputs = TensorCollector.get_tensor_collector_inputs( + {input_name: self.get_nncf_tensor_cls()(np.array([100]))}, [(hash(reducer), [input_name])] + ) + empty_inputs = TensorCollector.get_tensor_collector_inputs( + {input_name: self.get_nncf_tensor_cls()(np.array([]))}, [(hash(reducer), [input_name])] + ) + + stats = collector.get_statistics() + assert len(stats) == 1 + assert stats["A"] is None + + inputs = [full_inputs, empty_inputs, full_inputs] if any_not_empty else [empty_inputs, empty_inputs] + for input_ in inputs: + collector.register_inputs(input_) + + if any_not_empty: + assert len(aggregator._container) == 2 + assert aggregator._collected_samples == 2 + stats = collector.get_statistics() + assert len(stats) == 1 + assert stats["A"] == self.get_nncf_tensor_cls()([100]) + return + + assert len(aggregator._container) == 0 + assert aggregator._collected_samples == 0 + stats = collector.get_statistics() + assert len(stats) == 1 + assert stats["A"] is None diff --git a/tests/onnx/README.md b/tests/onnx/README.md index 274fa2b179c..b094b810571 100644 --- a/tests/onnx/README.md +++ b/tests/onnx/README.md @@ -7,9 +7,9 @@ We provide two types of tests. This is a test that the CI server runs for every PR. It consists of unit tests of ONNX features of NNCF. To run the pre-commit test, please execute the following command. ```bash - $ pytest tests/onnx --junitxml nncf-tests.xml + pytest tests/onnx --junitxml nncf-tests.xml # (alias) - $ make test-onnx + make test-onnx ``` 2. E2E test (pytest markers: `e2e_ptq` and `e2e_eval_reference_model`) @@ -17,7 +17,7 @@ We provide two types of tests. This is a test to validate ONNX PTQ API functionality for the models in ONNX Model ZOO. It compares the quantized model accuracy with the references. To run the E2E test, please execute the following command. ```bash - $ pytest tests/onnx -m e2e_ptq --model-dir (model_dir) --data-dir (data_dir) --output-dir (output_dir) --ckpt-dir (ckpt_dir) --anno-dir (anno_dir) --eval-size (eval_size) --ptq-size (ptq_size) + pytest tests/onnx -m e2e_ptq --model-dir (model_dir) --data-dir (data_dir) --output-dir (output_dir) --ckpt-dir (ckpt_dir) --anno-dir (anno_dir) --eval-size (eval_size) --ptq-size (ptq_size) ``` You should give three arguments to run this test. @@ -30,13 +30,11 @@ We provide two types of tests. 6. (Optional) `--anno-dir`: Directory path for dataset annotations. Please refer to [OpenVINO accuracy checker](https://github.com/openvinotoolkit/open_model_zoo/tree/master/tools/accuracy_checker). 7. (Optional) `--eval-size`: The number of samples for evaluation. 8. (Optional) `--ptq-size`: The number of samples for calibrating quantization parameters. - 9. (Optional) `--enable-ov-ep`: If the parameter is set then the accuracy validation of the quantized models - will be enabled for OpenVINOExecutionProvider. - 10. (Optional) `--disable-cpu-ep`: If the parameter is set then the accuracy validation of the quantized models - will be disabled for CPUExecutionProvider. + 9. (Optional) `--enable-ov-ep`: If the parameter is set then the accuracy validation of the quantized models will be enabled for OpenVINOExecutionProvider. + 10. (Optional) `--disable-cpu-ep`: If the parameter is set then the accuracy validation of the quantized models will be disabled for CPUExecutionProvider. If you want to test the reference (not quantized) model accuracy - try the following command. ```bash - $ pytest tests/onnx -m e2e_eval_reference_model --model-dir (model_dir) --data-dir (data_dir) --output-dir (output_dir) --ckpt-dir (ckpt_dir) --anno-dir (anno_dir) --eval-size (eval_size) --ptq-size (ptq_size) + pytest tests/onnx -m e2e_eval_reference_model \--model-dir (model_dir) --data-dir (data_dir) --output-dir (output_dir) --ckpt-dir (ckpt_dir) --anno-dir (anno_dir) --eval-size (eval_size) --ptq-size (ptq_size) ``` diff --git a/tests/onnx/benchmarking/README.md b/tests/onnx/benchmarking/README.md index e83c61f5cf4..014caa8e5c0 100644 --- a/tests/onnx/benchmarking/README.md +++ b/tests/onnx/benchmarking/README.md @@ -1,4 +1,4 @@ -## Benchmark for ONNX Model Zoo +# Benchmark for ONNX Model Zoo ## Installation @@ -9,7 +9,7 @@ NNCF [here](https://github.com/openvinotoolkit/nncf#user-content-installation). To work with the example you should install the corresponding Python package dependencies: -``` +```bash pip install -r requirements.txt ``` @@ -20,7 +20,7 @@ uses [OpenVINO™ Accuracy Checker](https://github.com/openvinotoolkit/open_mode tool to preprocess data for quantization parameters calibration and for final accuracy validation. The benchmarking supports the following models: -* Classification +- Classification 1. [bvlcalexnet-12](https://github.com/onnx/models/blob/main/vision/classification/alexnet/model/bvlcalexnet-12.onnx) 2. [caffenet-12](https://github.com/onnx/models/blob/main/vision/classification/caffenet/model/caffenet-12.onnx) @@ -37,7 +37,7 @@ The benchmarking supports the following models: 13. [vgg16-12](https://github.com/onnx/models/blob/main/vision/classification/vgg/model/vgg16-12.onnx) 14. [zfnet512-12](https://github.com/onnx/models/blob/main/vision/classification/zfnet-512/model/zfnet512-12.onnx) -* Object detection and segmentation models +- Object detection and segmentation models 1. [FasterRCNN-12](https://github.com/onnx/models/blob/main/vision/object_detection_segmentation/faster-rcnn/model/FasterRCNN-12.onnx) 2. [MaskRCNN-12](https://github.com/onnx/models/blob/main/vision/object_detection_segmentation/mask-rcnn/model/MaskRCNN-12.onnx) @@ -60,7 +60,7 @@ and [object_detection_segmentation](./object_detection_segmentation/onnx_models_ ### 1. Prepare dataset -* Classification models +- Classification models Because we use [OpenVINO™ Accuracy Checker](https://github.com/openvinotoolkit/open_model_zoo/tree/master/tools/accuracy_checker) @@ -68,7 +68,7 @@ tool, you should prepare ILSVRC2012 validation dataset by following the [dataset preparation guide](https://github.com/openvinotoolkit/open_model_zoo/blob/2022.1.0/data/datasets.md#imagenet) . After preparation, your dataset directory will be: -``` +```text DATASET_DIR/ +-- ILSVRC2012_img_val/ | +-- ILSVRC2012_val_00000001.JPEG @@ -78,7 +78,7 @@ DATASET_DIR/ +-- val.txt ``` -* Object detection and segmentation models +- Object detection and segmentation models We use [COCO](https://github.com/openvinotoolkit/open_model_zoo/blob/2022.1.0/data/datasets.md#common-objects-in-context-coco) @@ -86,9 +86,9 @@ use [COCO](https://github.com/openvinotoolkit/open_model_zoo/blob/2022.1.0/data/ and [CityScapes](https://github.com/openvinotoolkit/open_model_zoo/blob/cf9003a95ddb742aabea341aa1573c3fa25ebbe1/data/dataset_definitions.yml#L1300-L1307) datasets. Please follow the link to prepare datasets. After preparation, your dataset directory will be: -``` +```text DATASET_DIR/ -+-- annotations/ (COCO annotatios) ++-- annotations/ (COCO annotations) | +-- instances_val2017.json | +-- ... +-- val2017/ (COCO images) @@ -114,26 +114,26 @@ You can run the benchmarking for particular model with the following command: ### Results -1. Classification models +#### 1. Classification models | Model Name | Dataset | FP32 Accuracy (%) | INT8 accuracy (%) | Accuracy Drop (%) | |-------------------------|----------|-------------------|-------------------|-------------------| -| bvlcalexnet-12 | ImageNet | 52.02 | 51.96 | 0.06 | -| caffenet-12 | ImageNet | 54.26 | 54.22 | 0.04 | -| densenet-12 | ImageNet | 60.96 | 60.16 | 0.8 | -| efficientnet-lite4-11 | ImageNet | 77.58 | 77.43 | 0.15 | -| googlenet-12 | ImageNet | 66.67 | 66.36 | 0.31 | -| inception-v1-12 | ImageNet | 65.21 | 64.87 | 0.34 | -| mobilenetv2-12 | ImageNet | 71.87 | 71.38 | 0.49 | -| resnet50-v1-12 | ImageNet | 74.11 | 73.92 | 0.19 | -| resnet50-v2-7 | ImageNet | 74.84 | 74.63 | 0.21 | -| shufflenet-9 | ImageNet | 47.43 | 47.25 | 0.18 | -| shufflenet-v2-12 | ImageNet | 69.36 | 68.93 | 0.43 | -| squeezenet1.0-12 | ImageNet | 54.84 | 54.3 | 0.54 | -| vgg16-12 | ImageNet | 72.02 | 72.02 | 0.0 | -| zfnet512-12 | ImageNet | 58.57 | 58.53 | 0.04 | - -2. Object detection and segmentation models +| bvlcalexnet-12 | ImageNet | 52.02 | 51.96 | 0.06 | +| caffenet-12 | ImageNet | 54.26 | 54.22 | 0.04 | +| densenet-12 | ImageNet | 60.96 | 60.16 | 0.8 | +| efficientnet-lite4-11 | ImageNet | 77.58 | 77.43 | 0.15 | +| googlenet-12 | ImageNet | 66.67 | 66.36 | 0.31 | +| inception-v1-12 | ImageNet | 65.21 | 64.87 | 0.34 | +| mobilenetv2-12 | ImageNet | 71.87 | 71.38 | 0.49 | +| resnet50-v1-12 | ImageNet | 74.11 | 73.92 | 0.19 | +| resnet50-v2-7 | ImageNet | 74.84 | 74.63 | 0.21 | +| shufflenet-9 | ImageNet | 47.43 | 47.25 | 0.18 | +| shufflenet-v2-12 | ImageNet | 69.36 | 68.93 | 0.43 | +| squeezenet1.0-12 | ImageNet | 54.84 | 54.3 | 0.54 | +| vgg16-12 | ImageNet | 72.02 | 72.02 | 0.0 | +| zfnet512-12 | ImageNet | 58.57 | 58.53 | 0.04 | + +#### 2. Object detection and segmentation models | Model Name | Dataset | FP32 mAP (%) | INT8 mAP (%) | mAP diff. (%) | |----------------------|-----------|--------------|---------------|---------------| diff --git a/tests/onnx/benchmarking/requirements.txt b/tests/onnx/benchmarking/requirements.txt index 2b8956c971b..1dc7965fa6a 100644 --- a/tests/onnx/benchmarking/requirements.txt +++ b/tests/onnx/benchmarking/requirements.txt @@ -1,2 +1,2 @@ pycocotools -openvino-dev==2023.0.0 +openvino-dev==2023.0.1 diff --git a/tests/onnx/conftest.py b/tests/onnx/conftest.py index 69d080c12f9..161cdce53a9 100644 --- a/tests/onnx/conftest.py +++ b/tests/onnx/conftest.py @@ -1,18 +1,14 @@ -""" -Copyright (c) 2023 Intel Corporation +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" import os from tests.shared.paths import TEST_ROOT diff --git a/tests/onnx/data/models/bertsquad-12.onnx b/tests/onnx/data/models/bertsquad-12.onnx new file mode 100644 index 00000000000..bb1bdc1027d --- /dev/null +++ b/tests/onnx/data/models/bertsquad-12.onnx @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:38f84909992a38af62841f78f6ef3f0da4e510a97894efb3a5d2b2afb911a4a5 +size 294498 diff --git a/tests/onnx/data/models/gpt2-10.onnx b/tests/onnx/data/models/gpt2-10.onnx new file mode 100644 index 00000000000..da9d2cf0124 --- /dev/null +++ b/tests/onnx/data/models/gpt2-10.onnx @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2f0bf7c71e7c26d5d658fb239046b5fa8a0c7c80a21a9c01ad105cab8986d780 +size 148238 diff --git a/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/activation_matmul_model.dot b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/activation_matmul_model.dot new file mode 100644 index 00000000000..b4d759b980f --- /dev/null +++ b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/activation_matmul_model.dot @@ -0,0 +1,11 @@ +strict digraph { +"0 MatMul" [id=0, type=MatMul]; +"1 Softmax" [id=1, type=Softmax]; +"2 nncf_model_input_0" [id=2, type=nncf_model_input]; +"3 nncf_model_input_1" [id=3, type=nncf_model_input]; +"4 nncf_model_output_0" [id=4, type=nncf_model_output]; +"0 MatMul" -> "1 Softmax" [label="[10, 10]", style=solid]; +"1 Softmax" -> "4 nncf_model_output_0" [label="[10, 10]", style=solid]; +"2 nncf_model_input_0" -> "0 MatMul" [label="[10, 1]", style=solid]; +"3 nncf_model_input_1" -> "0 MatMul" [label="[1, 10]", style=solid]; +} diff --git a/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/embedding_model.dot b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/embedding_model.dot new file mode 100644 index 00000000000..5cf14d30c4f --- /dev/null +++ b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/embedding_model.dot @@ -0,0 +1,13 @@ +strict digraph { +"0 Identity" [id=0, type=Identity]; +"1 Embedding" [id=1, type=Gather]; +"2 Gather" [id=2, type=Gather]; +"3 MatMul" [id=3, type=MatMul]; +"4 nncf_model_input_0" [id=4, type=nncf_model_input]; +"5 nncf_model_output_0" [id=5, type=nncf_model_output]; +"0 Identity" -> "1 Embedding" [label="[10, 20]", style=solid]; +"1 Embedding" -> "2 Gather" [label="[1, 10, 20]", style=solid]; +"2 Gather" -> "3 MatMul" [label="[10, 20]", style=solid]; +"3 MatMul" -> "5 nncf_model_output_0" [label="[1, 10]", style=solid]; +"4 nncf_model_input_0" -> "1 Embedding" [label="[1, 10]", style=dashed]; +} diff --git a/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/gemm_weight_transpose_model.dot b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/gemm_weight_transpose_model.dot new file mode 100644 index 00000000000..bc1db31a0af --- /dev/null +++ b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/gemm_weight_transpose_model.dot @@ -0,0 +1,11 @@ +strict digraph { +"0 Identity" [id=0, type=Identity]; +"1 Gemm" [id=1, type=Gemm]; +"2 Softmax" [id=2, type=Softmax]; +"3 nncf_model_input_0" [id=3, type=nncf_model_input]; +"4 nncf_model_output_0" [id=4, type=nncf_model_output]; +"0 Identity" -> "1 Gemm" [label="[1, 10]", style=solid]; +"1 Gemm" -> "2 Softmax" [label="[1, 5]", style=solid]; +"2 Softmax" -> "4 nncf_model_output_0" [label="[1, 5]", style=solid]; +"3 nncf_model_input_0" -> "0 Identity" [label="[1, 10]", style=solid]; +} diff --git a/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/unified_embedding_model.dot b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/unified_embedding_model.dot new file mode 100644 index 00000000000..d347156730d --- /dev/null +++ b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/unified_embedding_model.dot @@ -0,0 +1,17 @@ +strict digraph { +"0 Cast" [id=0, type=Cast]; +"1 Embedding" [id=1, type=Gather]; +"2 MatMul_1" [id=2, type=MatMul]; +"3 Reshape" [id=3, type=Reshape]; +"4 Concat" [id=4, type=Concat]; +"5 MatMul_2" [id=5, type=MatMul]; +"6 nncf_model_input_0" [id=6, type=nncf_model_input]; +"7 nncf_model_output_0" [id=7, type=nncf_model_output]; +"0 Cast" -> "1 Embedding" [label="[1, 3]", style=dashed]; +"1 Embedding" -> "4 Concat" [label="[1, 3, 5]", style=solid]; +"2 MatMul_1" -> "3 Reshape" [label="[3, 1, 5]", style=solid]; +"4 Concat" -> "5 MatMul_2" [label="[]", style=solid]; +"5 MatMul_2" -> "7 nncf_model_output_0" [label="[1, 6]", style=solid]; +"6 nncf_model_input_0" -> "0 Cast" [label="[1, 3]", style=solid]; +"6 nncf_model_input_0" -> "2 MatMul_1" [label="[1, 3]", style=solid]; +} diff --git a/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/weight_matmul_model.dot b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/weight_matmul_model.dot new file mode 100644 index 00000000000..2748ebf71f4 --- /dev/null +++ b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/weight_matmul_model.dot @@ -0,0 +1,9 @@ +strict digraph { +"0 MatMul" [id=0, type=MatMul]; +"1 Softmax" [id=1, type=Softmax]; +"2 nncf_model_input_0" [id=2, type=nncf_model_input]; +"3 nncf_model_output_0" [id=3, type=nncf_model_output]; +"0 MatMul" -> "1 Softmax" [label="[1, 5]", style=solid]; +"1 Softmax" -> "3 nncf_model_output_0" [label="[1, 5]", style=solid]; +"2 nncf_model_input_0" -> "0 MatMul" [label="[1, 10]", style=solid]; +} diff --git a/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/weight_propagation_conv_model.dot b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/weight_propagation_conv_model.dot new file mode 100644 index 00000000000..2004cb1a5f0 --- /dev/null +++ b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/weight_propagation_conv_model.dot @@ -0,0 +1,25 @@ +strict digraph { +"0 reshape" [id=0, type=Reshape]; +"1 transpose" [id=1, type=Transpose]; +"2 identity1" [id=2, type=Identity]; +"3 conv1" [id=3, type=Conv]; +"4 constant" [id=4, type=Constant]; +"5 reshape2" [id=5, type=Reshape]; +"6 identity2" [id=6, type=Identity]; +"7 conv2" [id=7, type=Conv]; +"8 constant2" [id=8, type=Constant]; +"9 conv4" [id=9, type=Conv]; +"10 nncf_model_input_0" [id=10, type=nncf_model_input]; +"11 nncf_model_output_0" [id=11, type=nncf_model_output]; +"0 reshape" -> "1 transpose" [label="[1, 1, 3, 3]", style=solid]; +"1 transpose" -> "2 identity1" [label="[1, 1, 3, 3]", style=solid]; +"2 identity1" -> "3 conv1" [label="[1, 1, 3, 3]", style=solid]; +"3 conv1" -> "7 conv2" [label="[1, 1, 28, 28]", style=solid]; +"4 constant" -> "5 reshape2" [label="[1, 1, 3, 3]", style=solid]; +"5 reshape2" -> "6 identity2" [label="[1, 1, 3, 3]", style=solid]; +"6 identity2" -> "7 conv2" [label="[1, 1, 3, 3]", style=solid]; +"7 conv2" -> "9 conv4" [label="[1, 1, 28, 28]", style=solid]; +"8 constant2" -> "9 conv4" [label="[1, 1, 3, 3]", style=solid]; +"9 conv4" -> "11 nncf_model_output_0" [label="[1, 1, 28, 28]", style=solid]; +"10 nncf_model_input_0" -> "3 conv1" [label="[1, 1, 28, 28]", style=solid]; +} diff --git a/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/weight_propagation_matmul_model.dot b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/weight_propagation_matmul_model.dot new file mode 100644 index 00000000000..35a471e440d --- /dev/null +++ b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/weight_propagation_matmul_model.dot @@ -0,0 +1,15 @@ +strict digraph { +"0 Identity_1" [id=0, type=Identity]; +"1 Identity_2" [id=1, type=Identity]; +"2 MatMul_1" [id=2, type=MatMul]; +"3 constant" [id=3, type=Constant]; +"4 MatMul_2" [id=4, type=MatMul]; +"5 nncf_model_input_0" [id=5, type=nncf_model_input]; +"6 nncf_model_output_0" [id=6, type=nncf_model_output]; +"0 Identity_1" -> "1 Identity_2" [label="[10, 5]", style=solid]; +"1 Identity_2" -> "2 MatMul_1" [label="[10, 5]", style=solid]; +"2 MatMul_1" -> "4 MatMul_2" [label="[1, 5]", style=solid]; +"3 constant" -> "4 MatMul_2" [label="[5, 10]", style=solid]; +"4 MatMul_2" -> "6 nncf_model_output_0" [label="[1, 10]", style=solid]; +"5 nncf_model_input_0" -> "2 MatMul_1" [label="[1, 10]", style=solid]; +} diff --git a/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/weight_sharing_model.dot b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/weight_sharing_model.dot index f705c66a3c5..ccaf73538f3 100644 --- a/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/weight_sharing_model.dot +++ b/tests/onnx/data/reference_graphs/original_nncf_graph/synthetic/weight_sharing_model.dot @@ -7,8 +7,8 @@ strict digraph { "5 nncf_model_output_0" [id=5, type=nncf_model_output]; "0 Relu" -> "1 Conv1" [label="[1, 1, 5, 5]", style=solid]; "0 Relu" -> "2 Conv2" [label="[1, 1, 5, 5]", style=solid]; -"1 Conv1" -> "3 Add" [label="[1, 1, 5, 5]", style=solid]; -"2 Conv2" -> "3 Add" [label="[1, 1, 5, 5]", style=solid]; -"3 Add" -> "5 nncf_model_output_0" [label="[1, 1, 5, 5]", style=solid]; +"1 Conv1" -> "3 Add" [label="[1, 5, 5, 5]", style=solid]; +"2 Conv2" -> "3 Add" [label="[1, 5, 5, 5]", style=solid]; +"3 Add" -> "5 nncf_model_output_0" [label="[1, 5, 5, 5]", style=solid]; "4 nncf_model_input_0" -> "0 Relu" [label="[1, 1, 5, 5]", style=solid]; } diff --git a/tests/onnx/data/reference_graphs/quantization/MaskRCNN-12.dot b/tests/onnx/data/reference_graphs/quantization/MaskRCNN-12.dot index 6e97b94a4a1..0775c236c76 100644 --- a/tests/onnx/data/reference_graphs/quantization/MaskRCNN-12.dot +++ b/tests/onnx/data/reference_graphs/quantization/MaskRCNN-12.dot @@ -1749,2503 +1749,2541 @@ strict digraph { "1747 1172" [id=1747, type=Gather]; "1748 2479" [id=1748, type=Concat]; "1749 2490" [id=1749, type=Gather]; -"1750 2532" [id=1750, type=Slice]; -"1751 2534" [id=1751, type=Gather]; -"1752 2525" [id=1752, type=Slice]; -"1753 2527" [id=1753, type=Gather]; -"1754 2535" [id=1754, type=Sub]; -"1755 2537" [id=1755, type=Add]; -"1756 2515" [id=1756, type=Slice]; -"1757 2517" [id=1757, type=Gather]; -"1758 2508" [id=1758, type=Slice]; -"1759 2510" [id=1759, type=Gather]; -"1760 2518" [id=1760, type=Sub]; -"1761 2520" [id=1761, type=Add]; -"1762 2538" [id=1762, type=Mul]; -"1763 2539" [id=1763, type=Sqrt]; -"1764 2542" [id=1764, type=Div]; -"1765 2543" [id=1765, type=Add]; -"1766 2544" [id=1766, type=Log]; -"1767 2546" [id=1767, type=Div]; -"1768 2548" [id=1768, type=Add]; -"1769 2549" [id=1769, type=Floor]; -"1770 2550" [id=1770, type=Clip]; -"1771 2551" [id=1771, type=Cast]; -"1772 2553" [id=1772, type=Sub]; -"1773 2555" [id=1773, type=Equal]; -"1774 2557" [id=1774, type=Cast]; -"1775 2558" [id=1775, type=NonZero]; -"1776 2559" [id=1776, type=Transpose]; -"1777 2560" [id=1777, type=Squeeze]; -"1778 2561" [id=1778, type=Cast]; -"1779 2495" [id=1779, type=Slice]; -"1780 2500" [id=1780, type=Slice]; -"1781 2501" [id=1781, type=Shape]; -"1782 2502" [id=1782, type=ConstantOfShape]; -"1783 2503" [id=1783, type=Concat]; -"1784 2562" [id=1784, type=Gather]; -"1785 2568" [id=1785, type=Gather]; -"1786 2564" [id=1786, type=Gather]; -"1787 2565" [id=1787, type=Squeeze]; -"1788 2566" [id=1788, type=Cast]; -"1789 2569" [id=1789, type=RoiAlign]; -"1790 2570" [id=1790, type=Cast]; -"1791 2658" [id=1791, type=Shape]; -"1792 2659" [id=1792, type=Gather]; -"1793 2663" [id=1793, type=Unsqueeze]; -"1794 2655" [id=1794, type=Shape]; -"1795 2656" [id=1795, type=Gather]; -"1796 2662" [id=1796, type=Unsqueeze]; -"1797 2652" [id=1797, type=Shape]; -"1798 2653" [id=1798, type=Gather]; -"1799 2661" [id=1799, type=Unsqueeze]; -"1800 2641" [id=1800, type=Equal]; -"1801 2643" [id=1801, type=Cast]; -"1802 2644" [id=1802, type=NonZero]; -"1803 2645" [id=1803, type=Transpose]; -"1804 2647" [id=1804, type=Reshape]; -"1805 2649" [id=1805, type=Shape]; -"1806 2650" [id=1806, type=Gather]; -"1807 2660" [id=1807, type=Unsqueeze]; -"1808 2664" [id=1808, type=Concat]; -"1809 2665" [id=1809, type=Expand]; -"1810 2666" [id=1810, type=Cast]; -"1811 2632" [id=1811, type=Shape]; -"1812 2633" [id=1812, type=Gather]; -"1813 2637" [id=1813, type=Unsqueeze]; -"1814 2629" [id=1814, type=Shape]; -"1815 2630" [id=1815, type=Gather]; -"1816 2636" [id=1816, type=Unsqueeze]; -"1817 2626" [id=1817, type=Shape]; -"1818 2627" [id=1818, type=Gather]; -"1819 2635" [id=1819, type=Unsqueeze]; -"1820 2623" [id=1820, type=Shape]; -"1821 2624" [id=1821, type=Gather]; -"1822 2634" [id=1822, type=Unsqueeze]; -"1823 2638" [id=1823, type=Concat]; -"1824 2639" [id=1824, type=ConstantOfShape]; -"1825 2667" [id=1825, type=ScatterElements]; -"1826 2572" [id=1826, type=Equal]; -"1827 2574" [id=1827, type=Cast]; -"1828 2575" [id=1828, type=NonZero]; -"1829 2576" [id=1829, type=Transpose]; -"1830 2577" [id=1830, type=Squeeze]; -"1831 2578" [id=1831, type=Cast]; -"1832 2579" [id=1832, type=Gather]; -"1833 2585" [id=1833, type=Gather]; -"1834 2581" [id=1834, type=Gather]; -"1835 2582" [id=1835, type=Squeeze]; -"1836 2583" [id=1836, type=Cast]; -"1837 2586" [id=1837, type=RoiAlign]; -"1838 2587" [id=1838, type=Cast]; -"1839 2686" [id=1839, type=Shape]; -"1840 2687" [id=1840, type=Gather]; -"1841 2691" [id=1841, type=Unsqueeze]; -"1842 2683" [id=1842, type=Shape]; -"1843 2684" [id=1843, type=Gather]; -"1844 2690" [id=1844, type=Unsqueeze]; -"1845 2680" [id=1845, type=Shape]; -"1846 2681" [id=1846, type=Gather]; -"1847 2689" [id=1847, type=Unsqueeze]; -"1848 2669" [id=1848, type=Equal]; -"1849 2671" [id=1849, type=Cast]; -"1850 2672" [id=1850, type=NonZero]; -"1851 2673" [id=1851, type=Transpose]; -"1852 2675" [id=1852, type=Reshape]; -"1853 2677" [id=1853, type=Shape]; -"1854 2678" [id=1854, type=Gather]; -"1855 2688" [id=1855, type=Unsqueeze]; -"1856 2692" [id=1856, type=Concat]; -"1857 2693" [id=1857, type=Expand]; -"1858 2694" [id=1858, type=Cast]; -"1859 2695" [id=1859, type=ScatterElements]; -"1860 2589" [id=1860, type=Equal]; -"1861 2591" [id=1861, type=Cast]; -"1862 2592" [id=1862, type=NonZero]; -"1863 2593" [id=1863, type=Transpose]; -"1864 2594" [id=1864, type=Squeeze]; -"1865 2595" [id=1865, type=Cast]; -"1866 2596" [id=1866, type=Gather]; -"1867 2602" [id=1867, type=Gather]; -"1868 2598" [id=1868, type=Gather]; -"1869 2599" [id=1869, type=Squeeze]; -"1870 2600" [id=1870, type=Cast]; -"1871 2603" [id=1871, type=RoiAlign]; -"1872 2604" [id=1872, type=Cast]; -"1873 2714" [id=1873, type=Shape]; -"1874 2715" [id=1874, type=Gather]; -"1875 2719" [id=1875, type=Unsqueeze]; -"1876 2711" [id=1876, type=Shape]; -"1877 2712" [id=1877, type=Gather]; -"1878 2718" [id=1878, type=Unsqueeze]; -"1879 2708" [id=1879, type=Shape]; -"1880 2709" [id=1880, type=Gather]; -"1881 2717" [id=1881, type=Unsqueeze]; -"1882 2697" [id=1882, type=Equal]; -"1883 2699" [id=1883, type=Cast]; -"1884 2700" [id=1884, type=NonZero]; -"1885 2701" [id=1885, type=Transpose]; -"1886 2703" [id=1886, type=Reshape]; -"1887 2705" [id=1887, type=Shape]; -"1888 2706" [id=1888, type=Gather]; -"1889 2716" [id=1889, type=Unsqueeze]; -"1890 2720" [id=1890, type=Concat]; -"1891 2721" [id=1891, type=Expand]; -"1892 2722" [id=1892, type=Cast]; -"1893 2723" [id=1893, type=ScatterElements]; -"1894 2606" [id=1894, type=Equal]; -"1895 2608" [id=1895, type=Cast]; -"1896 2609" [id=1896, type=NonZero]; -"1897 2610" [id=1897, type=Transpose]; -"1898 2611" [id=1898, type=Squeeze]; -"1899 2612" [id=1899, type=Cast]; -"1900 2613" [id=1900, type=Gather]; -"1901 2619" [id=1901, type=Gather]; -"1902 2615" [id=1902, type=Gather]; -"1903 2616" [id=1903, type=Squeeze]; -"1904 2617" [id=1904, type=Cast]; -"1905 2620" [id=1905, type=RoiAlign]; -"1906 2621" [id=1906, type=Cast]; -"1907 2742" [id=1907, type=Shape]; -"1908 2743" [id=1908, type=Gather]; -"1909 2747" [id=1909, type=Unsqueeze]; -"1910 2739" [id=1910, type=Shape]; -"1911 2740" [id=1911, type=Gather]; -"1912 2746" [id=1912, type=Unsqueeze]; -"1913 2736" [id=1913, type=Shape]; -"1914 2737" [id=1914, type=Gather]; -"1915 2745" [id=1915, type=Unsqueeze]; -"1916 2725" [id=1916, type=Equal]; -"1917 2727" [id=1917, type=Cast]; -"1918 2728" [id=1918, type=NonZero]; -"1919 2729" [id=1919, type=Transpose]; -"1920 2731" [id=1920, type=Reshape]; -"1921 2733" [id=1921, type=Shape]; -"1922 2734" [id=1922, type=Gather]; -"1923 2744" [id=1923, type=Unsqueeze]; -"1924 2748" [id=1924, type=Concat]; -"1925 2749" [id=1925, type=Expand]; -"1926 2750" [id=1926, type=Cast]; -"1927 2751" [id=1927, type=ScatterElements]; -"1928 2757" [id=1928, type=Unsqueeze]; -"1929 2753" [id=1929, type=Shape]; -"1930 2754" [id=1930, type=Gather]; -"1931 2756" [id=1931, type=Unsqueeze]; -"1932 2758" [id=1932, type=Concat]; -"1933 2759" [id=1933, type=Reshape]; -"1934 2762_MatMul" [id=1934, type=MatMul]; -"1935 2762_Add" [id=1935, type=Add]; -"1936 2763" [id=1936, type=Relu]; -"1937 2766_MatMul" [id=1937, type=MatMul]; -"1938 2766_Add" [id=1938, type=Add]; -"1939 2767" [id=1939, type=Relu]; -"1940 2770_MatMul" [id=1940, type=MatMul]; -"1941 2770_Add" [id=1941, type=Add]; -"1942 2774" [id=1942, type=Softmax]; -"1943 2950" [id=1943, type=Shape]; -"1944 2951" [id=1944, type=Gather]; -"1945 2992" [id=1945, type=Unsqueeze]; -"1946 2991" [id=1946, type=Unsqueeze]; -"1947 2993" [id=1947, type=Concat]; -"1948 2955" [id=1948, type=Reshape]; -"1949 2994" [id=1949, type=Reshape]; -"1950 2996" [id=1950, type=Greater]; -"1951 2997" [id=1951, type=Cast]; -"1952 6478" [id=1952, type=Slice]; -"1953 6480" [id=1953, type=Gather]; -"1954 6481" [id=1954, type=Cast]; -"1955 6482" [id=1955, type=NonZero]; -"1956 6483" [id=1956, type=Transpose]; -"1957 6484" [id=1957, type=Squeeze]; -"1958 6487" [id=1958, type=Cast]; -"1959 6486" [id=1959, type=Gather]; -"1960 6488" [id=1960, type=Gather]; -"1961 6497" [id=1961, type=Unsqueeze]; -"1962 6498" [id=1962, type=Unsqueeze]; -"1963 2984" [id=1963, type=Mul]; -"1964 2987" [id=1964, type=Unsqueeze]; -"1965 2986" [id=1965, type=Unsqueeze]; -"1966 2988" [id=1966, type=Concat]; -"1967 2773_MatMul" [id=1967, type=MatMul]; -"1968 2773_Add" [id=1968, type=Add]; -"1969 2776" [id=1969, type=Flatten]; -"1970 2947" [id=1970, type=Shape]; -"1971 2775" [id=1971, type=Concat]; -"1972 2777" [id=1972, type=Cast]; -"1973 2806" [id=1973, type=Slice]; -"1974 2808" [id=1974, type=Gather]; -"1975 2799" [id=1975, type=Slice]; -"1976 2801" [id=1976, type=Gather]; -"1977 2809" [id=1977, type=Sub]; -"1978 2811" [id=1978, type=Add]; -"1979 2923" [id=1979, type=Slice]; -"1980 2924" [id=1980, type=Unsqueeze]; -"1981 2872" [id=1981, type=Slice]; -"1982 2877" [id=1982, type=Slice]; -"1983 2879" [id=1983, type=Div]; -"1984 2881" [id=1984, type=Clip]; -"1985 2918" [id=1985, type=Exp]; -"1986 2925" [id=1986, type=Mul]; -"1987 2938" [id=1987, type=Mul]; -"1988 2830" [id=1988, type=Mul]; -"1989 2826" [id=1989, type=Slice]; -"1990 2828" [id=1990, type=Gather]; -"1991 2831" [id=1991, type=Add]; -"1992 2907" [id=1992, type=Slice]; -"1993 2908" [id=1993, type=Unsqueeze]; -"1994 2900" [id=1994, type=Slice]; -"1995 2901" [id=1995, type=Unsqueeze]; -"1996 2848" [id=1996, type=Slice]; -"1997 2853" [id=1997, type=Slice]; -"1998 2855" [id=1998, type=Div]; -"1999 2902" [id=1999, type=Mul]; -"2000 2909" [id=2000, type=Add]; -"2001 2939" [id=2001, type=Add]; -"2002 2941" [id=2002, type=Sub]; -"2003 2945" [id=2003, type=Unsqueeze]; -"2004 2789" [id=2004, type=Slice]; -"2005 2791" [id=2005, type=Gather]; -"2006 2782" [id=2006, type=Slice]; -"2007 2784" [id=2007, type=Gather]; -"2008 2792" [id=2008, type=Sub]; -"2009 2794" [id=2009, type=Add]; -"2010 2915" [id=2010, type=Slice]; -"2011 2916" [id=2011, type=Unsqueeze]; -"2012 2860" [id=2012, type=Slice]; -"2013 2865" [id=2013, type=Slice]; -"2014 2867" [id=2014, type=Div]; -"2015 2880" [id=2015, type=Clip]; -"2016 2910" [id=2016, type=Exp]; -"2017 2917" [id=2017, type=Mul]; -"2018 2933" [id=2018, type=Mul]; -"2019 2820" [id=2019, type=Mul]; -"2020 2816" [id=2020, type=Slice]; -"2021 2818" [id=2021, type=Gather]; -"2022 2821" [id=2022, type=Add]; -"2023 2893" [id=2023, type=Slice]; -"2024 2894" [id=2024, type=Unsqueeze]; -"2025 2886" [id=2025, type=Slice]; -"2026 2887" [id=2026, type=Unsqueeze]; -"2027 2836" [id=2027, type=Slice]; -"2028 2841" [id=2028, type=Slice]; -"2029 2843" [id=2029, type=Div]; -"2030 2888" [id=2030, type=Mul]; -"2031 2895" [id=2031, type=Add]; -"2032 2934" [id=2032, type=Add]; -"2033 2936" [id=2033, type=Sub]; -"2034 2944" [id=2034, type=Unsqueeze]; -"2035 2930" [id=2035, type=Mul]; -"2036 2931" [id=2036, type=Sub]; -"2037 2943" [id=2037, type=Unsqueeze]; -"2038 2927" [id=2038, type=Mul]; -"2039 2928" [id=2039, type=Sub]; -"2040 2942" [id=2040, type=Unsqueeze]; -"2041 2946" [id=2041, type=Concat]; -"2042 2948" [id=2042, type=Reshape]; -"2043 2953" [id=2043, type=Reshape]; -"2044 2971" [id=2044, type=Slice]; -"2045 2976" [id=2045, type=Slice]; -"2046 2977" [id=2046, type=Clip]; -"2047 2979" [id=2047, type=Unsqueeze]; -"2048 2960" [id=2048, type=Slice]; -"2049 2965" [id=2049, type=Slice]; -"2050 2966" [id=2050, type=Clip]; -"2051 2978" [id=2051, type=Unsqueeze]; -"2052 2980" [id=2052, type=Concat]; -"2053 2982" [id=2053, type=Reshape]; -"2054 2989" [id=2054, type=Reshape]; -"2055 6493" [id=2055, type=Slice]; -"2056 6495" [id=2056, type=Gather]; -"2057 6496" [id=2057, type=Unsqueeze]; -"2058 6501" [id=2058, type=NonMaxSuppression]; -"2059 6503" [id=2059, type=Gather]; -"2060 6504" [id=2060, type=Squeeze]; -"2061 6508" [id=2061, type=Gather]; -"2062 6434" [id=2062, type=Slice]; -"2063 6436" [id=2063, type=Gather]; -"2064 6437" [id=2064, type=Cast]; -"2065 6438" [id=2065, type=NonZero]; -"2066 6439" [id=2066, type=Transpose]; -"2067 6440" [id=2067, type=Squeeze]; -"2068 6443" [id=2068, type=Cast]; -"2069 6442" [id=2069, type=Gather]; -"2070 6444" [id=2070, type=Gather]; -"2071 6453" [id=2071, type=Unsqueeze]; -"2072 6454" [id=2072, type=Unsqueeze]; -"2073 6449" [id=2073, type=Slice]; -"2074 6451" [id=2074, type=Gather]; -"2075 6452" [id=2075, type=Unsqueeze]; -"2076 6457" [id=2076, type=NonMaxSuppression]; -"2077 6459" [id=2077, type=Gather]; -"2078 6460" [id=2078, type=Squeeze]; -"2079 6464" [id=2079, type=Gather]; -"2080 6390" [id=2080, type=Slice]; -"2081 6392" [id=2081, type=Gather]; -"2082 6393" [id=2082, type=Cast]; -"2083 6394" [id=2083, type=NonZero]; -"2084 6395" [id=2084, type=Transpose]; -"2085 6396" [id=2085, type=Squeeze]; -"2086 6399" [id=2086, type=Cast]; -"2087 6398" [id=2087, type=Gather]; -"2088 6400" [id=2088, type=Gather]; -"2089 6409" [id=2089, type=Unsqueeze]; -"2090 6410" [id=2090, type=Unsqueeze]; -"2091 6405" [id=2091, type=Slice]; -"2092 6407" [id=2092, type=Gather]; -"2093 6408" [id=2093, type=Unsqueeze]; -"2094 6413" [id=2094, type=NonMaxSuppression]; -"2095 6415" [id=2095, type=Gather]; -"2096 6416" [id=2096, type=Squeeze]; -"2097 6420" [id=2097, type=Gather]; -"2098 6346" [id=2098, type=Slice]; -"2099 6348" [id=2099, type=Gather]; -"2100 6349" [id=2100, type=Cast]; -"2101 6350" [id=2101, type=NonZero]; -"2102 6351" [id=2102, type=Transpose]; -"2103 6352" [id=2103, type=Squeeze]; -"2104 6355" [id=2104, type=Cast]; -"2105 6354" [id=2105, type=Gather]; -"2106 6356" [id=2106, type=Gather]; -"2107 6365" [id=2107, type=Unsqueeze]; -"2108 6366" [id=2108, type=Unsqueeze]; -"2109 6361" [id=2109, type=Slice]; -"2110 6363" [id=2110, type=Gather]; -"2111 6364" [id=2111, type=Unsqueeze]; -"2112 6369" [id=2112, type=NonMaxSuppression]; -"2113 6371" [id=2113, type=Gather]; -"2114 6372" [id=2114, type=Squeeze]; -"2115 6376" [id=2115, type=Gather]; -"2116 6302" [id=2116, type=Slice]; -"2117 6304" [id=2117, type=Gather]; -"2118 6305" [id=2118, type=Cast]; -"2119 6306" [id=2119, type=NonZero]; -"2120 6307" [id=2120, type=Transpose]; -"2121 6308" [id=2121, type=Squeeze]; -"2122 6311" [id=2122, type=Cast]; -"2123 6310" [id=2123, type=Gather]; -"2124 6312" [id=2124, type=Gather]; -"2125 6321" [id=2125, type=Unsqueeze]; -"2126 6322" [id=2126, type=Unsqueeze]; -"2127 6317" [id=2127, type=Slice]; -"2128 6319" [id=2128, type=Gather]; -"2129 6320" [id=2129, type=Unsqueeze]; -"2130 6325" [id=2130, type=NonMaxSuppression]; -"2131 6327" [id=2131, type=Gather]; -"2132 6328" [id=2132, type=Squeeze]; -"2133 6332" [id=2133, type=Gather]; -"2134 6258" [id=2134, type=Slice]; -"2135 6260" [id=2135, type=Gather]; -"2136 6261" [id=2136, type=Cast]; -"2137 6262" [id=2137, type=NonZero]; -"2138 6263" [id=2138, type=Transpose]; -"2139 6264" [id=2139, type=Squeeze]; -"2140 6267" [id=2140, type=Cast]; -"2141 6266" [id=2141, type=Gather]; -"2142 6268" [id=2142, type=Gather]; -"2143 6277" [id=2143, type=Unsqueeze]; -"2144 6278" [id=2144, type=Unsqueeze]; -"2145 6273" [id=2145, type=Slice]; -"2146 6275" [id=2146, type=Gather]; -"2147 6276" [id=2147, type=Unsqueeze]; -"2148 6281" [id=2148, type=NonMaxSuppression]; -"2149 6283" [id=2149, type=Gather]; -"2150 6284" [id=2150, type=Squeeze]; -"2151 6288" [id=2151, type=Gather]; -"2152 6214" [id=2152, type=Slice]; -"2153 6216" [id=2153, type=Gather]; -"2154 6217" [id=2154, type=Cast]; -"2155 6218" [id=2155, type=NonZero]; -"2156 6219" [id=2156, type=Transpose]; -"2157 6220" [id=2157, type=Squeeze]; -"2158 6223" [id=2158, type=Cast]; -"2159 6222" [id=2159, type=Gather]; -"2160 6224" [id=2160, type=Gather]; -"2161 6233" [id=2161, type=Unsqueeze]; -"2162 6234" [id=2162, type=Unsqueeze]; -"2163 6229" [id=2163, type=Slice]; -"2164 6231" [id=2164, type=Gather]; -"2165 6232" [id=2165, type=Unsqueeze]; -"2166 6237" [id=2166, type=NonMaxSuppression]; -"2167 6239" [id=2167, type=Gather]; -"2168 6240" [id=2168, type=Squeeze]; -"2169 6244" [id=2169, type=Gather]; -"2170 6170" [id=2170, type=Slice]; -"2171 6172" [id=2171, type=Gather]; -"2172 6173" [id=2172, type=Cast]; -"2173 6174" [id=2173, type=NonZero]; -"2174 6175" [id=2174, type=Transpose]; -"2175 6176" [id=2175, type=Squeeze]; -"2176 6179" [id=2176, type=Cast]; -"2177 6178" [id=2177, type=Gather]; -"2178 6180" [id=2178, type=Gather]; -"2179 6189" [id=2179, type=Unsqueeze]; -"2180 6190" [id=2180, type=Unsqueeze]; -"2181 6185" [id=2181, type=Slice]; -"2182 6187" [id=2182, type=Gather]; -"2183 6188" [id=2183, type=Unsqueeze]; -"2184 6193" [id=2184, type=NonMaxSuppression]; -"2185 6195" [id=2185, type=Gather]; -"2186 6196" [id=2186, type=Squeeze]; -"2187 6200" [id=2187, type=Gather]; -"2188 6126" [id=2188, type=Slice]; -"2189 6128" [id=2189, type=Gather]; -"2190 6129" [id=2190, type=Cast]; -"2191 6130" [id=2191, type=NonZero]; -"2192 6131" [id=2192, type=Transpose]; -"2193 6132" [id=2193, type=Squeeze]; -"2194 6135" [id=2194, type=Cast]; -"2195 6134" [id=2195, type=Gather]; -"2196 6136" [id=2196, type=Gather]; -"2197 6145" [id=2197, type=Unsqueeze]; -"2198 6146" [id=2198, type=Unsqueeze]; -"2199 6141" [id=2199, type=Slice]; -"2200 6143" [id=2200, type=Gather]; -"2201 6144" [id=2201, type=Unsqueeze]; -"2202 6149" [id=2202, type=NonMaxSuppression]; -"2203 6151" [id=2203, type=Gather]; -"2204 6152" [id=2204, type=Squeeze]; -"2205 6156" [id=2205, type=Gather]; -"2206 6082" [id=2206, type=Slice]; -"2207 6084" [id=2207, type=Gather]; -"2208 6085" [id=2208, type=Cast]; -"2209 6086" [id=2209, type=NonZero]; -"2210 6087" [id=2210, type=Transpose]; -"2211 6088" [id=2211, type=Squeeze]; -"2212 6091" [id=2212, type=Cast]; -"2213 6090" [id=2213, type=Gather]; -"2214 6092" [id=2214, type=Gather]; -"2215 6101" [id=2215, type=Unsqueeze]; -"2216 6102" [id=2216, type=Unsqueeze]; -"2217 6097" [id=2217, type=Slice]; -"2218 6099" [id=2218, type=Gather]; -"2219 6100" [id=2219, type=Unsqueeze]; -"2220 6105" [id=2220, type=NonMaxSuppression]; -"2221 6107" [id=2221, type=Gather]; -"2222 6108" [id=2222, type=Squeeze]; -"2223 6112" [id=2223, type=Gather]; -"2224 6038" [id=2224, type=Slice]; -"2225 6040" [id=2225, type=Gather]; -"2226 6041" [id=2226, type=Cast]; -"2227 6042" [id=2227, type=NonZero]; -"2228 6043" [id=2228, type=Transpose]; -"2229 6044" [id=2229, type=Squeeze]; -"2230 6047" [id=2230, type=Cast]; -"2231 6046" [id=2231, type=Gather]; -"2232 6048" [id=2232, type=Gather]; -"2233 6057" [id=2233, type=Unsqueeze]; -"2234 6058" [id=2234, type=Unsqueeze]; -"2235 6053" [id=2235, type=Slice]; -"2236 6055" [id=2236, type=Gather]; -"2237 6056" [id=2237, type=Unsqueeze]; -"2238 6061" [id=2238, type=NonMaxSuppression]; -"2239 6063" [id=2239, type=Gather]; -"2240 6064" [id=2240, type=Squeeze]; -"2241 6068" [id=2241, type=Gather]; -"2242 5994" [id=2242, type=Slice]; -"2243 5996" [id=2243, type=Gather]; -"2244 5997" [id=2244, type=Cast]; -"2245 5998" [id=2245, type=NonZero]; -"2246 5999" [id=2246, type=Transpose]; -"2247 6000" [id=2247, type=Squeeze]; -"2248 6003" [id=2248, type=Cast]; -"2249 6002" [id=2249, type=Gather]; -"2250 6004" [id=2250, type=Gather]; -"2251 6013" [id=2251, type=Unsqueeze]; -"2252 6014" [id=2252, type=Unsqueeze]; -"2253 6009" [id=2253, type=Slice]; -"2254 6011" [id=2254, type=Gather]; -"2255 6012" [id=2255, type=Unsqueeze]; -"2256 6017" [id=2256, type=NonMaxSuppression]; -"2257 6019" [id=2257, type=Gather]; -"2258 6020" [id=2258, type=Squeeze]; -"2259 6024" [id=2259, type=Gather]; -"2260 5950" [id=2260, type=Slice]; -"2261 5952" [id=2261, type=Gather]; -"2262 5953" [id=2262, type=Cast]; -"2263 5954" [id=2263, type=NonZero]; -"2264 5955" [id=2264, type=Transpose]; -"2265 5956" [id=2265, type=Squeeze]; -"2266 5959" [id=2266, type=Cast]; -"2267 5958" [id=2267, type=Gather]; -"2268 5960" [id=2268, type=Gather]; -"2269 5969" [id=2269, type=Unsqueeze]; -"2270 5970" [id=2270, type=Unsqueeze]; -"2271 5965" [id=2271, type=Slice]; -"2272 5967" [id=2272, type=Gather]; -"2273 5968" [id=2273, type=Unsqueeze]; -"2274 5973" [id=2274, type=NonMaxSuppression]; -"2275 5975" [id=2275, type=Gather]; -"2276 5976" [id=2276, type=Squeeze]; -"2277 5980" [id=2277, type=Gather]; -"2278 5906" [id=2278, type=Slice]; -"2279 5908" [id=2279, type=Gather]; -"2280 5909" [id=2280, type=Cast]; -"2281 5910" [id=2281, type=NonZero]; -"2282 5911" [id=2282, type=Transpose]; -"2283 5912" [id=2283, type=Squeeze]; -"2284 5915" [id=2284, type=Cast]; -"2285 5914" [id=2285, type=Gather]; -"2286 5916" [id=2286, type=Gather]; -"2287 5925" [id=2287, type=Unsqueeze]; -"2288 5926" [id=2288, type=Unsqueeze]; -"2289 5921" [id=2289, type=Slice]; -"2290 5923" [id=2290, type=Gather]; -"2291 5924" [id=2291, type=Unsqueeze]; -"2292 5929" [id=2292, type=NonMaxSuppression]; -"2293 5931" [id=2293, type=Gather]; -"2294 5932" [id=2294, type=Squeeze]; -"2295 5936" [id=2295, type=Gather]; -"2296 5862" [id=2296, type=Slice]; -"2297 5864" [id=2297, type=Gather]; -"2298 5865" [id=2298, type=Cast]; -"2299 5866" [id=2299, type=NonZero]; -"2300 5867" [id=2300, type=Transpose]; -"2301 5868" [id=2301, type=Squeeze]; -"2302 5871" [id=2302, type=Cast]; -"2303 5870" [id=2303, type=Gather]; -"2304 5872" [id=2304, type=Gather]; -"2305 5881" [id=2305, type=Unsqueeze]; -"2306 5882" [id=2306, type=Unsqueeze]; -"2307 5877" [id=2307, type=Slice]; -"2308 5879" [id=2308, type=Gather]; -"2309 5880" [id=2309, type=Unsqueeze]; -"2310 5885" [id=2310, type=NonMaxSuppression]; -"2311 5887" [id=2311, type=Gather]; -"2312 5888" [id=2312, type=Squeeze]; -"2313 5892" [id=2313, type=Gather]; -"2314 5818" [id=2314, type=Slice]; -"2315 5820" [id=2315, type=Gather]; -"2316 5821" [id=2316, type=Cast]; -"2317 5822" [id=2317, type=NonZero]; -"2318 5823" [id=2318, type=Transpose]; -"2319 5824" [id=2319, type=Squeeze]; -"2320 5827" [id=2320, type=Cast]; -"2321 5826" [id=2321, type=Gather]; -"2322 5828" [id=2322, type=Gather]; -"2323 5837" [id=2323, type=Unsqueeze]; -"2324 5838" [id=2324, type=Unsqueeze]; -"2325 5833" [id=2325, type=Slice]; -"2326 5835" [id=2326, type=Gather]; -"2327 5836" [id=2327, type=Unsqueeze]; -"2328 5841" [id=2328, type=NonMaxSuppression]; -"2329 5843" [id=2329, type=Gather]; -"2330 5844" [id=2330, type=Squeeze]; -"2331 5848" [id=2331, type=Gather]; -"2332 5774" [id=2332, type=Slice]; -"2333 5776" [id=2333, type=Gather]; -"2334 5777" [id=2334, type=Cast]; -"2335 5778" [id=2335, type=NonZero]; -"2336 5779" [id=2336, type=Transpose]; -"2337 5780" [id=2337, type=Squeeze]; -"2338 5783" [id=2338, type=Cast]; -"2339 5782" [id=2339, type=Gather]; -"2340 5784" [id=2340, type=Gather]; -"2341 5793" [id=2341, type=Unsqueeze]; -"2342 5794" [id=2342, type=Unsqueeze]; -"2343 5789" [id=2343, type=Slice]; -"2344 5791" [id=2344, type=Gather]; -"2345 5792" [id=2345, type=Unsqueeze]; -"2346 5797" [id=2346, type=NonMaxSuppression]; -"2347 5799" [id=2347, type=Gather]; -"2348 5800" [id=2348, type=Squeeze]; -"2349 5804" [id=2349, type=Gather]; -"2350 5730" [id=2350, type=Slice]; -"2351 5732" [id=2351, type=Gather]; -"2352 5733" [id=2352, type=Cast]; -"2353 5734" [id=2353, type=NonZero]; -"2354 5735" [id=2354, type=Transpose]; -"2355 5736" [id=2355, type=Squeeze]; -"2356 5739" [id=2356, type=Cast]; -"2357 5738" [id=2357, type=Gather]; -"2358 5740" [id=2358, type=Gather]; -"2359 5749" [id=2359, type=Unsqueeze]; -"2360 5750" [id=2360, type=Unsqueeze]; -"2361 5745" [id=2361, type=Slice]; -"2362 5747" [id=2362, type=Gather]; -"2363 5748" [id=2363, type=Unsqueeze]; -"2364 5753" [id=2364, type=NonMaxSuppression]; -"2365 5755" [id=2365, type=Gather]; -"2366 5756" [id=2366, type=Squeeze]; -"2367 5760" [id=2367, type=Gather]; -"2368 5686" [id=2368, type=Slice]; -"2369 5688" [id=2369, type=Gather]; -"2370 5689" [id=2370, type=Cast]; -"2371 5690" [id=2371, type=NonZero]; -"2372 5691" [id=2372, type=Transpose]; -"2373 5692" [id=2373, type=Squeeze]; -"2374 5695" [id=2374, type=Cast]; -"2375 5694" [id=2375, type=Gather]; -"2376 5696" [id=2376, type=Gather]; -"2377 5705" [id=2377, type=Unsqueeze]; -"2378 5706" [id=2378, type=Unsqueeze]; -"2379 5701" [id=2379, type=Slice]; -"2380 5703" [id=2380, type=Gather]; -"2381 5704" [id=2381, type=Unsqueeze]; -"2382 5709" [id=2382, type=NonMaxSuppression]; -"2383 5711" [id=2383, type=Gather]; -"2384 5712" [id=2384, type=Squeeze]; -"2385 5716" [id=2385, type=Gather]; -"2386 5642" [id=2386, type=Slice]; -"2387 5644" [id=2387, type=Gather]; -"2388 5645" [id=2388, type=Cast]; -"2389 5646" [id=2389, type=NonZero]; -"2390 5647" [id=2390, type=Transpose]; -"2391 5648" [id=2391, type=Squeeze]; -"2392 5651" [id=2392, type=Cast]; -"2393 5650" [id=2393, type=Gather]; -"2394 5652" [id=2394, type=Gather]; -"2395 5661" [id=2395, type=Unsqueeze]; -"2396 5662" [id=2396, type=Unsqueeze]; -"2397 5657" [id=2397, type=Slice]; -"2398 5659" [id=2398, type=Gather]; -"2399 5660" [id=2399, type=Unsqueeze]; -"2400 5665" [id=2400, type=NonMaxSuppression]; -"2401 5667" [id=2401, type=Gather]; -"2402 5668" [id=2402, type=Squeeze]; -"2403 5672" [id=2403, type=Gather]; -"2404 5598" [id=2404, type=Slice]; -"2405 5600" [id=2405, type=Gather]; -"2406 5601" [id=2406, type=Cast]; -"2407 5602" [id=2407, type=NonZero]; -"2408 5603" [id=2408, type=Transpose]; -"2409 5604" [id=2409, type=Squeeze]; -"2410 5607" [id=2410, type=Cast]; -"2411 5606" [id=2411, type=Gather]; -"2412 5608" [id=2412, type=Gather]; -"2413 5617" [id=2413, type=Unsqueeze]; -"2414 5618" [id=2414, type=Unsqueeze]; -"2415 5613" [id=2415, type=Slice]; -"2416 5615" [id=2416, type=Gather]; -"2417 5616" [id=2417, type=Unsqueeze]; -"2418 5621" [id=2418, type=NonMaxSuppression]; -"2419 5623" [id=2419, type=Gather]; -"2420 5624" [id=2420, type=Squeeze]; -"2421 5628" [id=2421, type=Gather]; -"2422 5554" [id=2422, type=Slice]; -"2423 5556" [id=2423, type=Gather]; -"2424 5557" [id=2424, type=Cast]; -"2425 5558" [id=2425, type=NonZero]; -"2426 5559" [id=2426, type=Transpose]; -"2427 5560" [id=2427, type=Squeeze]; -"2428 5563" [id=2428, type=Cast]; -"2429 5562" [id=2429, type=Gather]; -"2430 5564" [id=2430, type=Gather]; -"2431 5573" [id=2431, type=Unsqueeze]; -"2432 5574" [id=2432, type=Unsqueeze]; -"2433 5569" [id=2433, type=Slice]; -"2434 5571" [id=2434, type=Gather]; -"2435 5572" [id=2435, type=Unsqueeze]; -"2436 5577" [id=2436, type=NonMaxSuppression]; -"2437 5579" [id=2437, type=Gather]; -"2438 5580" [id=2438, type=Squeeze]; -"2439 5584" [id=2439, type=Gather]; -"2440 5510" [id=2440, type=Slice]; -"2441 5512" [id=2441, type=Gather]; -"2442 5513" [id=2442, type=Cast]; -"2443 5514" [id=2443, type=NonZero]; -"2444 5515" [id=2444, type=Transpose]; -"2445 5516" [id=2445, type=Squeeze]; -"2446 5519" [id=2446, type=Cast]; -"2447 5518" [id=2447, type=Gather]; -"2448 5520" [id=2448, type=Gather]; -"2449 5529" [id=2449, type=Unsqueeze]; -"2450 5530" [id=2450, type=Unsqueeze]; -"2451 5525" [id=2451, type=Slice]; -"2452 5527" [id=2452, type=Gather]; -"2453 5528" [id=2453, type=Unsqueeze]; -"2454 5533" [id=2454, type=NonMaxSuppression]; -"2455 5535" [id=2455, type=Gather]; -"2456 5536" [id=2456, type=Squeeze]; -"2457 5540" [id=2457, type=Gather]; -"2458 5466" [id=2458, type=Slice]; -"2459 5468" [id=2459, type=Gather]; -"2460 5469" [id=2460, type=Cast]; -"2461 5470" [id=2461, type=NonZero]; -"2462 5471" [id=2462, type=Transpose]; -"2463 5472" [id=2463, type=Squeeze]; -"2464 5475" [id=2464, type=Cast]; -"2465 5474" [id=2465, type=Gather]; -"2466 5476" [id=2466, type=Gather]; -"2467 5485" [id=2467, type=Unsqueeze]; -"2468 5486" [id=2468, type=Unsqueeze]; -"2469 5481" [id=2469, type=Slice]; -"2470 5483" [id=2470, type=Gather]; -"2471 5484" [id=2471, type=Unsqueeze]; -"2472 5489" [id=2472, type=NonMaxSuppression]; -"2473 5491" [id=2473, type=Gather]; -"2474 5492" [id=2474, type=Squeeze]; -"2475 5496" [id=2475, type=Gather]; -"2476 5422" [id=2476, type=Slice]; -"2477 5424" [id=2477, type=Gather]; -"2478 5425" [id=2478, type=Cast]; -"2479 5426" [id=2479, type=NonZero]; -"2480 5427" [id=2480, type=Transpose]; -"2481 5428" [id=2481, type=Squeeze]; -"2482 5431" [id=2482, type=Cast]; -"2483 5430" [id=2483, type=Gather]; -"2484 5432" [id=2484, type=Gather]; -"2485 5441" [id=2485, type=Unsqueeze]; -"2486 5442" [id=2486, type=Unsqueeze]; -"2487 5437" [id=2487, type=Slice]; -"2488 5439" [id=2488, type=Gather]; -"2489 5440" [id=2489, type=Unsqueeze]; -"2490 5445" [id=2490, type=NonMaxSuppression]; -"2491 5447" [id=2491, type=Gather]; -"2492 5448" [id=2492, type=Squeeze]; -"2493 5452" [id=2493, type=Gather]; -"2494 5378" [id=2494, type=Slice]; -"2495 5380" [id=2495, type=Gather]; -"2496 5381" [id=2496, type=Cast]; -"2497 5382" [id=2497, type=NonZero]; -"2498 5383" [id=2498, type=Transpose]; -"2499 5384" [id=2499, type=Squeeze]; -"2500 5387" [id=2500, type=Cast]; -"2501 5386" [id=2501, type=Gather]; -"2502 5388" [id=2502, type=Gather]; -"2503 5397" [id=2503, type=Unsqueeze]; -"2504 5398" [id=2504, type=Unsqueeze]; -"2505 5393" [id=2505, type=Slice]; -"2506 5395" [id=2506, type=Gather]; -"2507 5396" [id=2507, type=Unsqueeze]; -"2508 5401" [id=2508, type=NonMaxSuppression]; -"2509 5403" [id=2509, type=Gather]; -"2510 5404" [id=2510, type=Squeeze]; -"2511 5408" [id=2511, type=Gather]; -"2512 5334" [id=2512, type=Slice]; -"2513 5336" [id=2513, type=Gather]; -"2514 5337" [id=2514, type=Cast]; -"2515 5338" [id=2515, type=NonZero]; -"2516 5339" [id=2516, type=Transpose]; -"2517 5340" [id=2517, type=Squeeze]; -"2518 5343" [id=2518, type=Cast]; -"2519 5342" [id=2519, type=Gather]; -"2520 5344" [id=2520, type=Gather]; -"2521 5353" [id=2521, type=Unsqueeze]; -"2522 5354" [id=2522, type=Unsqueeze]; -"2523 5349" [id=2523, type=Slice]; -"2524 5351" [id=2524, type=Gather]; -"2525 5352" [id=2525, type=Unsqueeze]; -"2526 5357" [id=2526, type=NonMaxSuppression]; -"2527 5359" [id=2527, type=Gather]; -"2528 5360" [id=2528, type=Squeeze]; -"2529 5364" [id=2529, type=Gather]; -"2530 5290" [id=2530, type=Slice]; -"2531 5292" [id=2531, type=Gather]; -"2532 5293" [id=2532, type=Cast]; -"2533 5294" [id=2533, type=NonZero]; -"2534 5295" [id=2534, type=Transpose]; -"2535 5296" [id=2535, type=Squeeze]; -"2536 5299" [id=2536, type=Cast]; -"2537 5298" [id=2537, type=Gather]; -"2538 5300" [id=2538, type=Gather]; -"2539 5309" [id=2539, type=Unsqueeze]; -"2540 5310" [id=2540, type=Unsqueeze]; -"2541 5305" [id=2541, type=Slice]; -"2542 5307" [id=2542, type=Gather]; -"2543 5308" [id=2543, type=Unsqueeze]; -"2544 5313" [id=2544, type=NonMaxSuppression]; -"2545 5315" [id=2545, type=Gather]; -"2546 5316" [id=2546, type=Squeeze]; -"2547 5320" [id=2547, type=Gather]; -"2548 5246" [id=2548, type=Slice]; -"2549 5248" [id=2549, type=Gather]; -"2550 5249" [id=2550, type=Cast]; -"2551 5250" [id=2551, type=NonZero]; -"2552 5251" [id=2552, type=Transpose]; -"2553 5252" [id=2553, type=Squeeze]; -"2554 5255" [id=2554, type=Cast]; -"2555 5254" [id=2555, type=Gather]; -"2556 5256" [id=2556, type=Gather]; -"2557 5265" [id=2557, type=Unsqueeze]; -"2558 5266" [id=2558, type=Unsqueeze]; -"2559 5261" [id=2559, type=Slice]; -"2560 5263" [id=2560, type=Gather]; -"2561 5264" [id=2561, type=Unsqueeze]; -"2562 5269" [id=2562, type=NonMaxSuppression]; -"2563 5271" [id=2563, type=Gather]; -"2564 5272" [id=2564, type=Squeeze]; -"2565 5276" [id=2565, type=Gather]; -"2566 5202" [id=2566, type=Slice]; -"2567 5204" [id=2567, type=Gather]; -"2568 5205" [id=2568, type=Cast]; -"2569 5206" [id=2569, type=NonZero]; -"2570 5207" [id=2570, type=Transpose]; -"2571 5208" [id=2571, type=Squeeze]; -"2572 5211" [id=2572, type=Cast]; -"2573 5210" [id=2573, type=Gather]; -"2574 5212" [id=2574, type=Gather]; -"2575 5221" [id=2575, type=Unsqueeze]; -"2576 5222" [id=2576, type=Unsqueeze]; -"2577 5217" [id=2577, type=Slice]; -"2578 5219" [id=2578, type=Gather]; -"2579 5220" [id=2579, type=Unsqueeze]; -"2580 5225" [id=2580, type=NonMaxSuppression]; -"2581 5227" [id=2581, type=Gather]; -"2582 5228" [id=2582, type=Squeeze]; -"2583 5232" [id=2583, type=Gather]; -"2584 5158" [id=2584, type=Slice]; -"2585 5160" [id=2585, type=Gather]; -"2586 5161" [id=2586, type=Cast]; -"2587 5162" [id=2587, type=NonZero]; -"2588 5163" [id=2588, type=Transpose]; -"2589 5164" [id=2589, type=Squeeze]; -"2590 5167" [id=2590, type=Cast]; -"2591 5166" [id=2591, type=Gather]; -"2592 5168" [id=2592, type=Gather]; -"2593 5177" [id=2593, type=Unsqueeze]; -"2594 5178" [id=2594, type=Unsqueeze]; -"2595 5173" [id=2595, type=Slice]; -"2596 5175" [id=2596, type=Gather]; -"2597 5176" [id=2597, type=Unsqueeze]; -"2598 5181" [id=2598, type=NonMaxSuppression]; -"2599 5183" [id=2599, type=Gather]; -"2600 5184" [id=2600, type=Squeeze]; -"2601 5188" [id=2601, type=Gather]; -"2602 5114" [id=2602, type=Slice]; -"2603 5116" [id=2603, type=Gather]; -"2604 5117" [id=2604, type=Cast]; -"2605 5118" [id=2605, type=NonZero]; -"2606 5119" [id=2606, type=Transpose]; -"2607 5120" [id=2607, type=Squeeze]; -"2608 5123" [id=2608, type=Cast]; -"2609 5122" [id=2609, type=Gather]; -"2610 5124" [id=2610, type=Gather]; -"2611 5133" [id=2611, type=Unsqueeze]; -"2612 5134" [id=2612, type=Unsqueeze]; -"2613 5129" [id=2613, type=Slice]; -"2614 5131" [id=2614, type=Gather]; -"2615 5132" [id=2615, type=Unsqueeze]; -"2616 5137" [id=2616, type=NonMaxSuppression]; -"2617 5139" [id=2617, type=Gather]; -"2618 5140" [id=2618, type=Squeeze]; -"2619 5144" [id=2619, type=Gather]; -"2620 5070" [id=2620, type=Slice]; -"2621 5072" [id=2621, type=Gather]; -"2622 5073" [id=2622, type=Cast]; -"2623 5074" [id=2623, type=NonZero]; -"2624 5075" [id=2624, type=Transpose]; -"2625 5076" [id=2625, type=Squeeze]; -"2626 5079" [id=2626, type=Cast]; -"2627 5078" [id=2627, type=Gather]; -"2628 5080" [id=2628, type=Gather]; -"2629 5089" [id=2629, type=Unsqueeze]; -"2630 5090" [id=2630, type=Unsqueeze]; -"2631 5085" [id=2631, type=Slice]; -"2632 5087" [id=2632, type=Gather]; -"2633 5088" [id=2633, type=Unsqueeze]; -"2634 5093" [id=2634, type=NonMaxSuppression]; -"2635 5095" [id=2635, type=Gather]; -"2636 5096" [id=2636, type=Squeeze]; -"2637 5100" [id=2637, type=Gather]; -"2638 5026" [id=2638, type=Slice]; -"2639 5028" [id=2639, type=Gather]; -"2640 5029" [id=2640, type=Cast]; -"2641 5030" [id=2641, type=NonZero]; -"2642 5031" [id=2642, type=Transpose]; -"2643 5032" [id=2643, type=Squeeze]; -"2644 5035" [id=2644, type=Cast]; -"2645 5034" [id=2645, type=Gather]; -"2646 5036" [id=2646, type=Gather]; -"2647 5045" [id=2647, type=Unsqueeze]; -"2648 5046" [id=2648, type=Unsqueeze]; -"2649 5041" [id=2649, type=Slice]; -"2650 5043" [id=2650, type=Gather]; -"2651 5044" [id=2651, type=Unsqueeze]; -"2652 5049" [id=2652, type=NonMaxSuppression]; -"2653 5051" [id=2653, type=Gather]; -"2654 5052" [id=2654, type=Squeeze]; -"2655 5056" [id=2655, type=Gather]; -"2656 4982" [id=2656, type=Slice]; -"2657 4984" [id=2657, type=Gather]; -"2658 4985" [id=2658, type=Cast]; -"2659 4986" [id=2659, type=NonZero]; -"2660 4987" [id=2660, type=Transpose]; -"2661 4988" [id=2661, type=Squeeze]; -"2662 4991" [id=2662, type=Cast]; -"2663 4990" [id=2663, type=Gather]; -"2664 4992" [id=2664, type=Gather]; -"2665 5001" [id=2665, type=Unsqueeze]; -"2666 5002" [id=2666, type=Unsqueeze]; -"2667 4997" [id=2667, type=Slice]; -"2668 4999" [id=2668, type=Gather]; -"2669 5000" [id=2669, type=Unsqueeze]; -"2670 5005" [id=2670, type=NonMaxSuppression]; -"2671 5007" [id=2671, type=Gather]; -"2672 5008" [id=2672, type=Squeeze]; -"2673 5012" [id=2673, type=Gather]; -"2674 4938" [id=2674, type=Slice]; -"2675 4940" [id=2675, type=Gather]; -"2676 4941" [id=2676, type=Cast]; -"2677 4942" [id=2677, type=NonZero]; -"2678 4943" [id=2678, type=Transpose]; -"2679 4944" [id=2679, type=Squeeze]; -"2680 4947" [id=2680, type=Cast]; -"2681 4946" [id=2681, type=Gather]; -"2682 4948" [id=2682, type=Gather]; -"2683 4957" [id=2683, type=Unsqueeze]; -"2684 4958" [id=2684, type=Unsqueeze]; -"2685 4953" [id=2685, type=Slice]; -"2686 4955" [id=2686, type=Gather]; -"2687 4956" [id=2687, type=Unsqueeze]; -"2688 4961" [id=2688, type=NonMaxSuppression]; -"2689 4963" [id=2689, type=Gather]; -"2690 4964" [id=2690, type=Squeeze]; -"2691 4968" [id=2691, type=Gather]; -"2692 4894" [id=2692, type=Slice]; -"2693 4896" [id=2693, type=Gather]; -"2694 4897" [id=2694, type=Cast]; -"2695 4898" [id=2695, type=NonZero]; -"2696 4899" [id=2696, type=Transpose]; -"2697 4900" [id=2697, type=Squeeze]; -"2698 4903" [id=2698, type=Cast]; -"2699 4902" [id=2699, type=Gather]; -"2700 4904" [id=2700, type=Gather]; -"2701 4913" [id=2701, type=Unsqueeze]; -"2702 4914" [id=2702, type=Unsqueeze]; -"2703 4909" [id=2703, type=Slice]; -"2704 4911" [id=2704, type=Gather]; -"2705 4912" [id=2705, type=Unsqueeze]; -"2706 4917" [id=2706, type=NonMaxSuppression]; -"2707 4919" [id=2707, type=Gather]; -"2708 4920" [id=2708, type=Squeeze]; -"2709 4924" [id=2709, type=Gather]; -"2710 4850" [id=2710, type=Slice]; -"2711 4852" [id=2711, type=Gather]; -"2712 4853" [id=2712, type=Cast]; -"2713 4854" [id=2713, type=NonZero]; -"2714 4855" [id=2714, type=Transpose]; -"2715 4856" [id=2715, type=Squeeze]; -"2716 4859" [id=2716, type=Cast]; -"2717 4858" [id=2717, type=Gather]; -"2718 4860" [id=2718, type=Gather]; -"2719 4869" [id=2719, type=Unsqueeze]; -"2720 4870" [id=2720, type=Unsqueeze]; -"2721 4865" [id=2721, type=Slice]; -"2722 4867" [id=2722, type=Gather]; -"2723 4868" [id=2723, type=Unsqueeze]; -"2724 4873" [id=2724, type=NonMaxSuppression]; -"2725 4875" [id=2725, type=Gather]; -"2726 4876" [id=2726, type=Squeeze]; -"2727 4880" [id=2727, type=Gather]; -"2728 4806" [id=2728, type=Slice]; -"2729 4808" [id=2729, type=Gather]; -"2730 4809" [id=2730, type=Cast]; -"2731 4810" [id=2731, type=NonZero]; -"2732 4811" [id=2732, type=Transpose]; -"2733 4812" [id=2733, type=Squeeze]; -"2734 4815" [id=2734, type=Cast]; -"2735 4814" [id=2735, type=Gather]; -"2736 4816" [id=2736, type=Gather]; -"2737 4825" [id=2737, type=Unsqueeze]; -"2738 4826" [id=2738, type=Unsqueeze]; -"2739 4821" [id=2739, type=Slice]; -"2740 4823" [id=2740, type=Gather]; -"2741 4824" [id=2741, type=Unsqueeze]; -"2742 4829" [id=2742, type=NonMaxSuppression]; -"2743 4831" [id=2743, type=Gather]; -"2744 4832" [id=2744, type=Squeeze]; -"2745 4836" [id=2745, type=Gather]; -"2746 4762" [id=2746, type=Slice]; -"2747 4764" [id=2747, type=Gather]; -"2748 4765" [id=2748, type=Cast]; -"2749 4766" [id=2749, type=NonZero]; -"2750 4767" [id=2750, type=Transpose]; -"2751 4768" [id=2751, type=Squeeze]; -"2752 4771" [id=2752, type=Cast]; -"2753 4770" [id=2753, type=Gather]; -"2754 4772" [id=2754, type=Gather]; -"2755 4781" [id=2755, type=Unsqueeze]; -"2756 4782" [id=2756, type=Unsqueeze]; -"2757 4777" [id=2757, type=Slice]; -"2758 4779" [id=2758, type=Gather]; -"2759 4780" [id=2759, type=Unsqueeze]; -"2760 4785" [id=2760, type=NonMaxSuppression]; -"2761 4787" [id=2761, type=Gather]; -"2762 4788" [id=2762, type=Squeeze]; -"2763 4792" [id=2763, type=Gather]; -"2764 4718" [id=2764, type=Slice]; -"2765 4720" [id=2765, type=Gather]; -"2766 4721" [id=2766, type=Cast]; -"2767 4722" [id=2767, type=NonZero]; -"2768 4723" [id=2768, type=Transpose]; -"2769 4724" [id=2769, type=Squeeze]; -"2770 4727" [id=2770, type=Cast]; -"2771 4726" [id=2771, type=Gather]; -"2772 4728" [id=2772, type=Gather]; -"2773 4737" [id=2773, type=Unsqueeze]; -"2774 4738" [id=2774, type=Unsqueeze]; -"2775 4733" [id=2775, type=Slice]; -"2776 4735" [id=2776, type=Gather]; -"2777 4736" [id=2777, type=Unsqueeze]; -"2778 4741" [id=2778, type=NonMaxSuppression]; -"2779 4743" [id=2779, type=Gather]; -"2780 4744" [id=2780, type=Squeeze]; -"2781 4748" [id=2781, type=Gather]; -"2782 4674" [id=2782, type=Slice]; -"2783 4676" [id=2783, type=Gather]; -"2784 4677" [id=2784, type=Cast]; -"2785 4678" [id=2785, type=NonZero]; -"2786 4679" [id=2786, type=Transpose]; -"2787 4680" [id=2787, type=Squeeze]; -"2788 4683" [id=2788, type=Cast]; -"2789 4682" [id=2789, type=Gather]; -"2790 4684" [id=2790, type=Gather]; -"2791 4693" [id=2791, type=Unsqueeze]; -"2792 4694" [id=2792, type=Unsqueeze]; -"2793 4689" [id=2793, type=Slice]; -"2794 4691" [id=2794, type=Gather]; -"2795 4692" [id=2795, type=Unsqueeze]; -"2796 4697" [id=2796, type=NonMaxSuppression]; -"2797 4699" [id=2797, type=Gather]; -"2798 4700" [id=2798, type=Squeeze]; -"2799 4704" [id=2799, type=Gather]; -"2800 4630" [id=2800, type=Slice]; -"2801 4632" [id=2801, type=Gather]; -"2802 4633" [id=2802, type=Cast]; -"2803 4634" [id=2803, type=NonZero]; -"2804 4635" [id=2804, type=Transpose]; -"2805 4636" [id=2805, type=Squeeze]; -"2806 4639" [id=2806, type=Cast]; -"2807 4638" [id=2807, type=Gather]; -"2808 4640" [id=2808, type=Gather]; -"2809 4649" [id=2809, type=Unsqueeze]; -"2810 4650" [id=2810, type=Unsqueeze]; -"2811 4645" [id=2811, type=Slice]; -"2812 4647" [id=2812, type=Gather]; -"2813 4648" [id=2813, type=Unsqueeze]; -"2814 4653" [id=2814, type=NonMaxSuppression]; -"2815 4655" [id=2815, type=Gather]; -"2816 4656" [id=2816, type=Squeeze]; -"2817 4660" [id=2817, type=Gather]; -"2818 4586" [id=2818, type=Slice]; -"2819 4588" [id=2819, type=Gather]; -"2820 4589" [id=2820, type=Cast]; -"2821 4590" [id=2821, type=NonZero]; -"2822 4591" [id=2822, type=Transpose]; -"2823 4592" [id=2823, type=Squeeze]; -"2824 4595" [id=2824, type=Cast]; -"2825 4594" [id=2825, type=Gather]; -"2826 4596" [id=2826, type=Gather]; -"2827 4605" [id=2827, type=Unsqueeze]; -"2828 4606" [id=2828, type=Unsqueeze]; -"2829 4601" [id=2829, type=Slice]; -"2830 4603" [id=2830, type=Gather]; -"2831 4604" [id=2831, type=Unsqueeze]; -"2832 4609" [id=2832, type=NonMaxSuppression]; -"2833 4611" [id=2833, type=Gather]; -"2834 4612" [id=2834, type=Squeeze]; -"2835 4616" [id=2835, type=Gather]; -"2836 4542" [id=2836, type=Slice]; -"2837 4544" [id=2837, type=Gather]; -"2838 4545" [id=2838, type=Cast]; -"2839 4546" [id=2839, type=NonZero]; -"2840 4547" [id=2840, type=Transpose]; -"2841 4548" [id=2841, type=Squeeze]; -"2842 4551" [id=2842, type=Cast]; -"2843 4550" [id=2843, type=Gather]; -"2844 4552" [id=2844, type=Gather]; -"2845 4561" [id=2845, type=Unsqueeze]; -"2846 4562" [id=2846, type=Unsqueeze]; -"2847 4557" [id=2847, type=Slice]; -"2848 4559" [id=2848, type=Gather]; -"2849 4560" [id=2849, type=Unsqueeze]; -"2850 4565" [id=2850, type=NonMaxSuppression]; -"2851 4567" [id=2851, type=Gather]; -"2852 4568" [id=2852, type=Squeeze]; -"2853 4572" [id=2853, type=Gather]; -"2854 4498" [id=2854, type=Slice]; -"2855 4500" [id=2855, type=Gather]; -"2856 4501" [id=2856, type=Cast]; -"2857 4502" [id=2857, type=NonZero]; -"2858 4503" [id=2858, type=Transpose]; -"2859 4504" [id=2859, type=Squeeze]; -"2860 4507" [id=2860, type=Cast]; -"2861 4506" [id=2861, type=Gather]; -"2862 4508" [id=2862, type=Gather]; -"2863 4517" [id=2863, type=Unsqueeze]; -"2864 4518" [id=2864, type=Unsqueeze]; -"2865 4513" [id=2865, type=Slice]; -"2866 4515" [id=2866, type=Gather]; -"2867 4516" [id=2867, type=Unsqueeze]; -"2868 4521" [id=2868, type=NonMaxSuppression]; -"2869 4523" [id=2869, type=Gather]; -"2870 4524" [id=2870, type=Squeeze]; -"2871 4528" [id=2871, type=Gather]; -"2872 4454" [id=2872, type=Slice]; -"2873 4456" [id=2873, type=Gather]; -"2874 4457" [id=2874, type=Cast]; -"2875 4458" [id=2875, type=NonZero]; -"2876 4459" [id=2876, type=Transpose]; -"2877 4460" [id=2877, type=Squeeze]; -"2878 4463" [id=2878, type=Cast]; -"2879 4462" [id=2879, type=Gather]; -"2880 4464" [id=2880, type=Gather]; -"2881 4473" [id=2881, type=Unsqueeze]; -"2882 4474" [id=2882, type=Unsqueeze]; -"2883 4469" [id=2883, type=Slice]; -"2884 4471" [id=2884, type=Gather]; -"2885 4472" [id=2885, type=Unsqueeze]; -"2886 4477" [id=2886, type=NonMaxSuppression]; -"2887 4479" [id=2887, type=Gather]; -"2888 4480" [id=2888, type=Squeeze]; -"2889 4484" [id=2889, type=Gather]; -"2890 4410" [id=2890, type=Slice]; -"2891 4412" [id=2891, type=Gather]; -"2892 4413" [id=2892, type=Cast]; -"2893 4414" [id=2893, type=NonZero]; -"2894 4415" [id=2894, type=Transpose]; -"2895 4416" [id=2895, type=Squeeze]; -"2896 4419" [id=2896, type=Cast]; -"2897 4418" [id=2897, type=Gather]; -"2898 4420" [id=2898, type=Gather]; -"2899 4429" [id=2899, type=Unsqueeze]; -"2900 4430" [id=2900, type=Unsqueeze]; -"2901 4425" [id=2901, type=Slice]; -"2902 4427" [id=2902, type=Gather]; -"2903 4428" [id=2903, type=Unsqueeze]; -"2904 4433" [id=2904, type=NonMaxSuppression]; -"2905 4435" [id=2905, type=Gather]; -"2906 4436" [id=2906, type=Squeeze]; -"2907 4440" [id=2907, type=Gather]; -"2908 4366" [id=2908, type=Slice]; -"2909 4368" [id=2909, type=Gather]; -"2910 4369" [id=2910, type=Cast]; -"2911 4370" [id=2911, type=NonZero]; -"2912 4371" [id=2912, type=Transpose]; -"2913 4372" [id=2913, type=Squeeze]; -"2914 4375" [id=2914, type=Cast]; -"2915 4374" [id=2915, type=Gather]; -"2916 4376" [id=2916, type=Gather]; -"2917 4385" [id=2917, type=Unsqueeze]; -"2918 4386" [id=2918, type=Unsqueeze]; -"2919 4381" [id=2919, type=Slice]; -"2920 4383" [id=2920, type=Gather]; -"2921 4384" [id=2921, type=Unsqueeze]; -"2922 4389" [id=2922, type=NonMaxSuppression]; -"2923 4391" [id=2923, type=Gather]; -"2924 4392" [id=2924, type=Squeeze]; -"2925 4396" [id=2925, type=Gather]; -"2926 4322" [id=2926, type=Slice]; -"2927 4324" [id=2927, type=Gather]; -"2928 4325" [id=2928, type=Cast]; -"2929 4326" [id=2929, type=NonZero]; -"2930 4327" [id=2930, type=Transpose]; -"2931 4328" [id=2931, type=Squeeze]; -"2932 4331" [id=2932, type=Cast]; -"2933 4330" [id=2933, type=Gather]; -"2934 4332" [id=2934, type=Gather]; -"2935 4341" [id=2935, type=Unsqueeze]; -"2936 4342" [id=2936, type=Unsqueeze]; -"2937 4337" [id=2937, type=Slice]; -"2938 4339" [id=2938, type=Gather]; -"2939 4340" [id=2939, type=Unsqueeze]; -"2940 4345" [id=2940, type=NonMaxSuppression]; -"2941 4347" [id=2941, type=Gather]; -"2942 4348" [id=2942, type=Squeeze]; -"2943 4352" [id=2943, type=Gather]; -"2944 4278" [id=2944, type=Slice]; -"2945 4280" [id=2945, type=Gather]; -"2946 4281" [id=2946, type=Cast]; -"2947 4282" [id=2947, type=NonZero]; -"2948 4283" [id=2948, type=Transpose]; -"2949 4284" [id=2949, type=Squeeze]; -"2950 4287" [id=2950, type=Cast]; -"2951 4286" [id=2951, type=Gather]; -"2952 4288" [id=2952, type=Gather]; -"2953 4297" [id=2953, type=Unsqueeze]; -"2954 4298" [id=2954, type=Unsqueeze]; -"2955 4293" [id=2955, type=Slice]; -"2956 4295" [id=2956, type=Gather]; -"2957 4296" [id=2957, type=Unsqueeze]; -"2958 4301" [id=2958, type=NonMaxSuppression]; -"2959 4303" [id=2959, type=Gather]; -"2960 4304" [id=2960, type=Squeeze]; -"2961 4308" [id=2961, type=Gather]; -"2962 4234" [id=2962, type=Slice]; -"2963 4236" [id=2963, type=Gather]; -"2964 4237" [id=2964, type=Cast]; -"2965 4238" [id=2965, type=NonZero]; -"2966 4239" [id=2966, type=Transpose]; -"2967 4240" [id=2967, type=Squeeze]; -"2968 4243" [id=2968, type=Cast]; -"2969 4242" [id=2969, type=Gather]; -"2970 4244" [id=2970, type=Gather]; -"2971 4253" [id=2971, type=Unsqueeze]; -"2972 4254" [id=2972, type=Unsqueeze]; -"2973 4249" [id=2973, type=Slice]; -"2974 4251" [id=2974, type=Gather]; -"2975 4252" [id=2975, type=Unsqueeze]; -"2976 4257" [id=2976, type=NonMaxSuppression]; -"2977 4259" [id=2977, type=Gather]; -"2978 4260" [id=2978, type=Squeeze]; -"2979 4264" [id=2979, type=Gather]; -"2980 4190" [id=2980, type=Slice]; -"2981 4192" [id=2981, type=Gather]; -"2982 4193" [id=2982, type=Cast]; -"2983 4194" [id=2983, type=NonZero]; -"2984 4195" [id=2984, type=Transpose]; -"2985 4196" [id=2985, type=Squeeze]; -"2986 4199" [id=2986, type=Cast]; -"2987 4198" [id=2987, type=Gather]; -"2988 4200" [id=2988, type=Gather]; -"2989 4209" [id=2989, type=Unsqueeze]; -"2990 4210" [id=2990, type=Unsqueeze]; -"2991 4205" [id=2991, type=Slice]; -"2992 4207" [id=2992, type=Gather]; -"2993 4208" [id=2993, type=Unsqueeze]; -"2994 4213" [id=2994, type=NonMaxSuppression]; -"2995 4215" [id=2995, type=Gather]; -"2996 4216" [id=2996, type=Squeeze]; -"2997 4220" [id=2997, type=Gather]; -"2998 4146" [id=2998, type=Slice]; -"2999 4148" [id=2999, type=Gather]; -"3000 4149" [id=3000, type=Cast]; -"3001 4150" [id=3001, type=NonZero]; -"3002 4151" [id=3002, type=Transpose]; -"3003 4152" [id=3003, type=Squeeze]; -"3004 4155" [id=3004, type=Cast]; -"3005 4154" [id=3005, type=Gather]; -"3006 4156" [id=3006, type=Gather]; -"3007 4165" [id=3007, type=Unsqueeze]; -"3008 4166" [id=3008, type=Unsqueeze]; -"3009 4161" [id=3009, type=Slice]; -"3010 4163" [id=3010, type=Gather]; -"3011 4164" [id=3011, type=Unsqueeze]; -"3012 4169" [id=3012, type=NonMaxSuppression]; -"3013 4171" [id=3013, type=Gather]; -"3014 4172" [id=3014, type=Squeeze]; -"3015 4176" [id=3015, type=Gather]; -"3016 4102" [id=3016, type=Slice]; -"3017 4104" [id=3017, type=Gather]; -"3018 4105" [id=3018, type=Cast]; -"3019 4106" [id=3019, type=NonZero]; -"3020 4107" [id=3020, type=Transpose]; -"3021 4108" [id=3021, type=Squeeze]; -"3022 4111" [id=3022, type=Cast]; -"3023 4110" [id=3023, type=Gather]; -"3024 4112" [id=3024, type=Gather]; -"3025 4121" [id=3025, type=Unsqueeze]; -"3026 4122" [id=3026, type=Unsqueeze]; -"3027 4117" [id=3027, type=Slice]; -"3028 4119" [id=3028, type=Gather]; -"3029 4120" [id=3029, type=Unsqueeze]; -"3030 4125" [id=3030, type=NonMaxSuppression]; -"3031 4127" [id=3031, type=Gather]; -"3032 4128" [id=3032, type=Squeeze]; -"3033 4132" [id=3033, type=Gather]; -"3034 4058" [id=3034, type=Slice]; -"3035 4060" [id=3035, type=Gather]; -"3036 4061" [id=3036, type=Cast]; -"3037 4062" [id=3037, type=NonZero]; -"3038 4063" [id=3038, type=Transpose]; -"3039 4064" [id=3039, type=Squeeze]; -"3040 4067" [id=3040, type=Cast]; -"3041 4066" [id=3041, type=Gather]; -"3042 4068" [id=3042, type=Gather]; -"3043 4077" [id=3043, type=Unsqueeze]; -"3044 4078" [id=3044, type=Unsqueeze]; -"3045 4073" [id=3045, type=Slice]; -"3046 4075" [id=3046, type=Gather]; -"3047 4076" [id=3047, type=Unsqueeze]; -"3048 4081" [id=3048, type=NonMaxSuppression]; -"3049 4083" [id=3049, type=Gather]; -"3050 4084" [id=3050, type=Squeeze]; -"3051 4088" [id=3051, type=Gather]; -"3052 4014" [id=3052, type=Slice]; -"3053 4016" [id=3053, type=Gather]; -"3054 4017" [id=3054, type=Cast]; -"3055 4018" [id=3055, type=NonZero]; -"3056 4019" [id=3056, type=Transpose]; -"3057 4020" [id=3057, type=Squeeze]; -"3058 4023" [id=3058, type=Cast]; -"3059 4022" [id=3059, type=Gather]; -"3060 4024" [id=3060, type=Gather]; -"3061 4033" [id=3061, type=Unsqueeze]; -"3062 4034" [id=3062, type=Unsqueeze]; -"3063 4029" [id=3063, type=Slice]; -"3064 4031" [id=3064, type=Gather]; -"3065 4032" [id=3065, type=Unsqueeze]; -"3066 4037" [id=3066, type=NonMaxSuppression]; -"3067 4039" [id=3067, type=Gather]; -"3068 4040" [id=3068, type=Squeeze]; -"3069 4044" [id=3069, type=Gather]; -"3070 3970" [id=3070, type=Slice]; -"3071 3972" [id=3071, type=Gather]; -"3072 3973" [id=3072, type=Cast]; -"3073 3974" [id=3073, type=NonZero]; -"3074 3975" [id=3074, type=Transpose]; -"3075 3976" [id=3075, type=Squeeze]; -"3076 3979" [id=3076, type=Cast]; -"3077 3978" [id=3077, type=Gather]; -"3078 3980" [id=3078, type=Gather]; -"3079 3989" [id=3079, type=Unsqueeze]; -"3080 3990" [id=3080, type=Unsqueeze]; -"3081 3985" [id=3081, type=Slice]; -"3082 3987" [id=3082, type=Gather]; -"3083 3988" [id=3083, type=Unsqueeze]; -"3084 3993" [id=3084, type=NonMaxSuppression]; -"3085 3995" [id=3085, type=Gather]; -"3086 3996" [id=3086, type=Squeeze]; -"3087 4000" [id=3087, type=Gather]; -"3088 3926" [id=3088, type=Slice]; -"3089 3928" [id=3089, type=Gather]; -"3090 3929" [id=3090, type=Cast]; -"3091 3930" [id=3091, type=NonZero]; -"3092 3931" [id=3092, type=Transpose]; -"3093 3932" [id=3093, type=Squeeze]; -"3094 3935" [id=3094, type=Cast]; -"3095 3934" [id=3095, type=Gather]; -"3096 3936" [id=3096, type=Gather]; -"3097 3945" [id=3097, type=Unsqueeze]; -"3098 3946" [id=3098, type=Unsqueeze]; -"3099 3941" [id=3099, type=Slice]; -"3100 3943" [id=3100, type=Gather]; -"3101 3944" [id=3101, type=Unsqueeze]; -"3102 3949" [id=3102, type=NonMaxSuppression]; -"3103 3951" [id=3103, type=Gather]; -"3104 3952" [id=3104, type=Squeeze]; -"3105 3956" [id=3105, type=Gather]; -"3106 3882" [id=3106, type=Slice]; -"3107 3884" [id=3107, type=Gather]; -"3108 3885" [id=3108, type=Cast]; -"3109 3886" [id=3109, type=NonZero]; -"3110 3887" [id=3110, type=Transpose]; -"3111 3888" [id=3111, type=Squeeze]; -"3112 3891" [id=3112, type=Cast]; -"3113 3890" [id=3113, type=Gather]; -"3114 3892" [id=3114, type=Gather]; -"3115 3901" [id=3115, type=Unsqueeze]; -"3116 3902" [id=3116, type=Unsqueeze]; -"3117 3897" [id=3117, type=Slice]; -"3118 3899" [id=3118, type=Gather]; -"3119 3900" [id=3119, type=Unsqueeze]; -"3120 3905" [id=3120, type=NonMaxSuppression]; -"3121 3907" [id=3121, type=Gather]; -"3122 3908" [id=3122, type=Squeeze]; -"3123 3912" [id=3123, type=Gather]; -"3124 3838" [id=3124, type=Slice]; -"3125 3840" [id=3125, type=Gather]; -"3126 3841" [id=3126, type=Cast]; -"3127 3842" [id=3127, type=NonZero]; -"3128 3843" [id=3128, type=Transpose]; -"3129 3844" [id=3129, type=Squeeze]; -"3130 3847" [id=3130, type=Cast]; -"3131 3846" [id=3131, type=Gather]; -"3132 3848" [id=3132, type=Gather]; -"3133 3857" [id=3133, type=Unsqueeze]; -"3134 3858" [id=3134, type=Unsqueeze]; -"3135 3853" [id=3135, type=Slice]; -"3136 3855" [id=3136, type=Gather]; -"3137 3856" [id=3137, type=Unsqueeze]; -"3138 3861" [id=3138, type=NonMaxSuppression]; -"3139 3863" [id=3139, type=Gather]; -"3140 3864" [id=3140, type=Squeeze]; -"3141 3868" [id=3141, type=Gather]; -"3142 3794" [id=3142, type=Slice]; -"3143 3796" [id=3143, type=Gather]; -"3144 3797" [id=3144, type=Cast]; -"3145 3798" [id=3145, type=NonZero]; -"3146 3799" [id=3146, type=Transpose]; -"3147 3800" [id=3147, type=Squeeze]; -"3148 3803" [id=3148, type=Cast]; -"3149 3802" [id=3149, type=Gather]; -"3150 3804" [id=3150, type=Gather]; -"3151 3813" [id=3151, type=Unsqueeze]; -"3152 3814" [id=3152, type=Unsqueeze]; -"3153 3809" [id=3153, type=Slice]; -"3154 3811" [id=3154, type=Gather]; -"3155 3812" [id=3155, type=Unsqueeze]; -"3156 3817" [id=3156, type=NonMaxSuppression]; -"3157 3819" [id=3157, type=Gather]; -"3158 3820" [id=3158, type=Squeeze]; -"3159 3824" [id=3159, type=Gather]; -"3160 3750" [id=3160, type=Slice]; -"3161 3752" [id=3161, type=Gather]; -"3162 3753" [id=3162, type=Cast]; -"3163 3754" [id=3163, type=NonZero]; -"3164 3755" [id=3164, type=Transpose]; -"3165 3756" [id=3165, type=Squeeze]; -"3166 3759" [id=3166, type=Cast]; -"3167 3758" [id=3167, type=Gather]; -"3168 3760" [id=3168, type=Gather]; -"3169 3769" [id=3169, type=Unsqueeze]; -"3170 3770" [id=3170, type=Unsqueeze]; -"3171 3765" [id=3171, type=Slice]; -"3172 3767" [id=3172, type=Gather]; -"3173 3768" [id=3173, type=Unsqueeze]; -"3174 3773" [id=3174, type=NonMaxSuppression]; -"3175 3775" [id=3175, type=Gather]; -"3176 3776" [id=3176, type=Squeeze]; -"3177 3780" [id=3177, type=Gather]; -"3178 3706" [id=3178, type=Slice]; -"3179 3708" [id=3179, type=Gather]; -"3180 3709" [id=3180, type=Cast]; -"3181 3710" [id=3181, type=NonZero]; -"3182 3711" [id=3182, type=Transpose]; -"3183 3712" [id=3183, type=Squeeze]; -"3184 3715" [id=3184, type=Cast]; -"3185 3714" [id=3185, type=Gather]; -"3186 3716" [id=3186, type=Gather]; -"3187 3725" [id=3187, type=Unsqueeze]; -"3188 3726" [id=3188, type=Unsqueeze]; -"3189 3721" [id=3189, type=Slice]; -"3190 3723" [id=3190, type=Gather]; -"3191 3724" [id=3191, type=Unsqueeze]; -"3192 3729" [id=3192, type=NonMaxSuppression]; -"3193 3731" [id=3193, type=Gather]; -"3194 3732" [id=3194, type=Squeeze]; -"3195 3736" [id=3195, type=Gather]; -"3196 3662" [id=3196, type=Slice]; -"3197 3664" [id=3197, type=Gather]; -"3198 3665" [id=3198, type=Cast]; -"3199 3666" [id=3199, type=NonZero]; -"3200 3667" [id=3200, type=Transpose]; -"3201 3668" [id=3201, type=Squeeze]; -"3202 3671" [id=3202, type=Cast]; -"3203 3670" [id=3203, type=Gather]; -"3204 3672" [id=3204, type=Gather]; -"3205 3681" [id=3205, type=Unsqueeze]; -"3206 3682" [id=3206, type=Unsqueeze]; -"3207 3677" [id=3207, type=Slice]; -"3208 3679" [id=3208, type=Gather]; -"3209 3680" [id=3209, type=Unsqueeze]; -"3210 3685" [id=3210, type=NonMaxSuppression]; -"3211 3687" [id=3211, type=Gather]; -"3212 3688" [id=3212, type=Squeeze]; -"3213 3692" [id=3213, type=Gather]; -"3214 3618" [id=3214, type=Slice]; -"3215 3620" [id=3215, type=Gather]; -"3216 3621" [id=3216, type=Cast]; -"3217 3622" [id=3217, type=NonZero]; -"3218 3623" [id=3218, type=Transpose]; -"3219 3624" [id=3219, type=Squeeze]; -"3220 3627" [id=3220, type=Cast]; -"3221 3626" [id=3221, type=Gather]; -"3222 3628" [id=3222, type=Gather]; -"3223 3637" [id=3223, type=Unsqueeze]; -"3224 3638" [id=3224, type=Unsqueeze]; -"3225 3633" [id=3225, type=Slice]; -"3226 3635" [id=3226, type=Gather]; -"3227 3636" [id=3227, type=Unsqueeze]; -"3228 3641" [id=3228, type=NonMaxSuppression]; -"3229 3643" [id=3229, type=Gather]; -"3230 3644" [id=3230, type=Squeeze]; -"3231 3648" [id=3231, type=Gather]; -"3232 3574" [id=3232, type=Slice]; -"3233 3576" [id=3233, type=Gather]; -"3234 3577" [id=3234, type=Cast]; -"3235 3578" [id=3235, type=NonZero]; -"3236 3579" [id=3236, type=Transpose]; -"3237 3580" [id=3237, type=Squeeze]; -"3238 3583" [id=3238, type=Cast]; -"3239 3582" [id=3239, type=Gather]; -"3240 3584" [id=3240, type=Gather]; -"3241 3593" [id=3241, type=Unsqueeze]; -"3242 3594" [id=3242, type=Unsqueeze]; -"3243 3589" [id=3243, type=Slice]; -"3244 3591" [id=3244, type=Gather]; -"3245 3592" [id=3245, type=Unsqueeze]; -"3246 3597" [id=3246, type=NonMaxSuppression]; -"3247 3599" [id=3247, type=Gather]; -"3248 3600" [id=3248, type=Squeeze]; -"3249 3604" [id=3249, type=Gather]; -"3250 3530" [id=3250, type=Slice]; -"3251 3532" [id=3251, type=Gather]; -"3252 3533" [id=3252, type=Cast]; -"3253 3534" [id=3253, type=NonZero]; -"3254 3535" [id=3254, type=Transpose]; -"3255 3536" [id=3255, type=Squeeze]; -"3256 3539" [id=3256, type=Cast]; -"3257 3538" [id=3257, type=Gather]; -"3258 3540" [id=3258, type=Gather]; -"3259 3549" [id=3259, type=Unsqueeze]; -"3260 3550" [id=3260, type=Unsqueeze]; -"3261 3545" [id=3261, type=Slice]; -"3262 3547" [id=3262, type=Gather]; -"3263 3548" [id=3263, type=Unsqueeze]; -"3264 3553" [id=3264, type=NonMaxSuppression]; -"3265 3555" [id=3265, type=Gather]; -"3266 3556" [id=3266, type=Squeeze]; -"3267 3560" [id=3267, type=Gather]; -"3268 3486" [id=3268, type=Slice]; -"3269 3488" [id=3269, type=Gather]; -"3270 3489" [id=3270, type=Cast]; -"3271 3490" [id=3271, type=NonZero]; -"3272 3491" [id=3272, type=Transpose]; -"3273 3492" [id=3273, type=Squeeze]; -"3274 3495" [id=3274, type=Cast]; -"3275 3494" [id=3275, type=Gather]; -"3276 3496" [id=3276, type=Gather]; -"3277 3505" [id=3277, type=Unsqueeze]; -"3278 3506" [id=3278, type=Unsqueeze]; -"3279 3501" [id=3279, type=Slice]; -"3280 3503" [id=3280, type=Gather]; -"3281 3504" [id=3281, type=Unsqueeze]; -"3282 3509" [id=3282, type=NonMaxSuppression]; -"3283 3511" [id=3283, type=Gather]; -"3284 3512" [id=3284, type=Squeeze]; -"3285 3516" [id=3285, type=Gather]; -"3286 3442" [id=3286, type=Slice]; -"3287 3444" [id=3287, type=Gather]; -"3288 3445" [id=3288, type=Cast]; -"3289 3446" [id=3289, type=NonZero]; -"3290 3447" [id=3290, type=Transpose]; -"3291 3448" [id=3291, type=Squeeze]; -"3292 3451" [id=3292, type=Cast]; -"3293 3450" [id=3293, type=Gather]; -"3294 3452" [id=3294, type=Gather]; -"3295 3461" [id=3295, type=Unsqueeze]; -"3296 3462" [id=3296, type=Unsqueeze]; -"3297 3457" [id=3297, type=Slice]; -"3298 3459" [id=3298, type=Gather]; -"3299 3460" [id=3299, type=Unsqueeze]; -"3300 3465" [id=3300, type=NonMaxSuppression]; -"3301 3467" [id=3301, type=Gather]; -"3302 3468" [id=3302, type=Squeeze]; -"3303 3472" [id=3303, type=Gather]; -"3304 3398" [id=3304, type=Slice]; -"3305 3400" [id=3305, type=Gather]; -"3306 3401" [id=3306, type=Cast]; -"3307 3402" [id=3307, type=NonZero]; -"3308 3403" [id=3308, type=Transpose]; -"3309 3404" [id=3309, type=Squeeze]; -"3310 3407" [id=3310, type=Cast]; -"3311 3406" [id=3311, type=Gather]; -"3312 3408" [id=3312, type=Gather]; -"3313 3417" [id=3313, type=Unsqueeze]; -"3314 3418" [id=3314, type=Unsqueeze]; -"3315 3413" [id=3315, type=Slice]; -"3316 3415" [id=3316, type=Gather]; -"3317 3416" [id=3317, type=Unsqueeze]; -"3318 3421" [id=3318, type=NonMaxSuppression]; -"3319 3423" [id=3319, type=Gather]; -"3320 3424" [id=3320, type=Squeeze]; -"3321 3428" [id=3321, type=Gather]; -"3322 3354" [id=3322, type=Slice]; -"3323 3356" [id=3323, type=Gather]; -"3324 3357" [id=3324, type=Cast]; -"3325 3358" [id=3325, type=NonZero]; -"3326 3359" [id=3326, type=Transpose]; -"3327 3360" [id=3327, type=Squeeze]; -"3328 3363" [id=3328, type=Cast]; -"3329 3362" [id=3329, type=Gather]; -"3330 3364" [id=3330, type=Gather]; -"3331 3373" [id=3331, type=Unsqueeze]; -"3332 3374" [id=3332, type=Unsqueeze]; -"3333 3369" [id=3333, type=Slice]; -"3334 3371" [id=3334, type=Gather]; -"3335 3372" [id=3335, type=Unsqueeze]; -"3336 3377" [id=3336, type=NonMaxSuppression]; -"3337 3379" [id=3337, type=Gather]; -"3338 3380" [id=3338, type=Squeeze]; -"3339 3384" [id=3339, type=Gather]; -"3340 3310" [id=3340, type=Slice]; -"3341 3312" [id=3341, type=Gather]; -"3342 3313" [id=3342, type=Cast]; -"3343 3314" [id=3343, type=NonZero]; -"3344 3315" [id=3344, type=Transpose]; -"3345 3316" [id=3345, type=Squeeze]; -"3346 3319" [id=3346, type=Cast]; -"3347 3318" [id=3347, type=Gather]; -"3348 3320" [id=3348, type=Gather]; -"3349 3329" [id=3349, type=Unsqueeze]; -"3350 3330" [id=3350, type=Unsqueeze]; -"3351 3325" [id=3351, type=Slice]; -"3352 3327" [id=3352, type=Gather]; -"3353 3328" [id=3353, type=Unsqueeze]; -"3354 3333" [id=3354, type=NonMaxSuppression]; -"3355 3335" [id=3355, type=Gather]; -"3356 3336" [id=3356, type=Squeeze]; -"3357 3340" [id=3357, type=Gather]; -"3358 3266" [id=3358, type=Slice]; -"3359 3268" [id=3359, type=Gather]; -"3360 3269" [id=3360, type=Cast]; -"3361 3270" [id=3361, type=NonZero]; -"3362 3271" [id=3362, type=Transpose]; -"3363 3272" [id=3363, type=Squeeze]; -"3364 3275" [id=3364, type=Cast]; -"3365 3274" [id=3365, type=Gather]; -"3366 3276" [id=3366, type=Gather]; -"3367 3285" [id=3367, type=Unsqueeze]; -"3368 3286" [id=3368, type=Unsqueeze]; -"3369 3281" [id=3369, type=Slice]; -"3370 3283" [id=3370, type=Gather]; -"3371 3284" [id=3371, type=Unsqueeze]; -"3372 3289" [id=3372, type=NonMaxSuppression]; -"3373 3291" [id=3373, type=Gather]; -"3374 3292" [id=3374, type=Squeeze]; -"3375 3296" [id=3375, type=Gather]; -"3376 3222" [id=3376, type=Slice]; -"3377 3224" [id=3377, type=Gather]; -"3378 3225" [id=3378, type=Cast]; -"3379 3226" [id=3379, type=NonZero]; -"3380 3227" [id=3380, type=Transpose]; -"3381 3228" [id=3381, type=Squeeze]; -"3382 3231" [id=3382, type=Cast]; -"3383 3230" [id=3383, type=Gather]; -"3384 3232" [id=3384, type=Gather]; -"3385 3241" [id=3385, type=Unsqueeze]; -"3386 3242" [id=3386, type=Unsqueeze]; -"3387 3237" [id=3387, type=Slice]; -"3388 3239" [id=3388, type=Gather]; -"3389 3240" [id=3389, type=Unsqueeze]; -"3390 3245" [id=3390, type=NonMaxSuppression]; -"3391 3247" [id=3391, type=Gather]; -"3392 3248" [id=3392, type=Squeeze]; -"3393 3252" [id=3393, type=Gather]; -"3394 3178" [id=3394, type=Slice]; -"3395 3180" [id=3395, type=Gather]; -"3396 3181" [id=3396, type=Cast]; -"3397 3182" [id=3397, type=NonZero]; -"3398 3183" [id=3398, type=Transpose]; -"3399 3184" [id=3399, type=Squeeze]; -"3400 3187" [id=3400, type=Cast]; -"3401 3186" [id=3401, type=Gather]; -"3402 3188" [id=3402, type=Gather]; -"3403 3197" [id=3403, type=Unsqueeze]; -"3404 3198" [id=3404, type=Unsqueeze]; -"3405 3193" [id=3405, type=Slice]; -"3406 3195" [id=3406, type=Gather]; -"3407 3196" [id=3407, type=Unsqueeze]; -"3408 3201" [id=3408, type=NonMaxSuppression]; -"3409 3203" [id=3409, type=Gather]; -"3410 3204" [id=3410, type=Squeeze]; -"3411 3208" [id=3411, type=Gather]; -"3412 3134" [id=3412, type=Slice]; -"3413 3136" [id=3413, type=Gather]; -"3414 3137" [id=3414, type=Cast]; -"3415 3138" [id=3415, type=NonZero]; -"3416 3139" [id=3416, type=Transpose]; -"3417 3140" [id=3417, type=Squeeze]; -"3418 3143" [id=3418, type=Cast]; -"3419 3142" [id=3419, type=Gather]; -"3420 3144" [id=3420, type=Gather]; -"3421 3153" [id=3421, type=Unsqueeze]; -"3422 3154" [id=3422, type=Unsqueeze]; -"3423 3149" [id=3423, type=Slice]; -"3424 3151" [id=3424, type=Gather]; -"3425 3152" [id=3425, type=Unsqueeze]; -"3426 3157" [id=3426, type=NonMaxSuppression]; -"3427 3159" [id=3427, type=Gather]; -"3428 3160" [id=3428, type=Squeeze]; -"3429 3164" [id=3429, type=Gather]; -"3430 3090" [id=3430, type=Slice]; -"3431 3092" [id=3431, type=Gather]; -"3432 3093" [id=3432, type=Cast]; -"3433 3094" [id=3433, type=NonZero]; -"3434 3095" [id=3434, type=Transpose]; -"3435 3096" [id=3435, type=Squeeze]; -"3436 3099" [id=3436, type=Cast]; -"3437 3098" [id=3437, type=Gather]; -"3438 3100" [id=3438, type=Gather]; -"3439 3109" [id=3439, type=Unsqueeze]; -"3440 3110" [id=3440, type=Unsqueeze]; -"3441 3105" [id=3441, type=Slice]; -"3442 3107" [id=3442, type=Gather]; -"3443 3108" [id=3443, type=Unsqueeze]; -"3444 3113" [id=3444, type=NonMaxSuppression]; -"3445 3115" [id=3445, type=Gather]; -"3446 3116" [id=3446, type=Squeeze]; -"3447 3120" [id=3447, type=Gather]; -"3448 3046" [id=3448, type=Slice]; -"3449 3048" [id=3449, type=Gather]; -"3450 3049" [id=3450, type=Cast]; -"3451 3050" [id=3451, type=NonZero]; -"3452 3051" [id=3452, type=Transpose]; -"3453 3052" [id=3453, type=Squeeze]; -"3454 3055" [id=3454, type=Cast]; -"3455 3054" [id=3455, type=Gather]; -"3456 3056" [id=3456, type=Gather]; -"3457 3065" [id=3457, type=Unsqueeze]; -"3458 3066" [id=3458, type=Unsqueeze]; -"3459 3061" [id=3459, type=Slice]; -"3460 3063" [id=3460, type=Gather]; -"3461 3064" [id=3461, type=Unsqueeze]; -"3462 3069" [id=3462, type=NonMaxSuppression]; -"3463 3071" [id=3463, type=Gather]; -"3464 3072" [id=3464, type=Squeeze]; -"3465 3076" [id=3465, type=Gather]; -"3466 3002" [id=3466, type=Slice]; -"3467 3004" [id=3467, type=Gather]; -"3468 3005" [id=3468, type=Cast]; -"3469 3006" [id=3469, type=NonZero]; -"3470 3007" [id=3470, type=Transpose]; -"3471 3008" [id=3471, type=Squeeze]; -"3472 3011" [id=3472, type=Cast]; -"3473 3010" [id=3473, type=Gather]; -"3474 3012" [id=3474, type=Gather]; -"3475 3021" [id=3475, type=Unsqueeze]; -"3476 3022" [id=3476, type=Unsqueeze]; -"3477 3017" [id=3477, type=Slice]; -"3478 3019" [id=3478, type=Gather]; -"3479 3020" [id=3479, type=Unsqueeze]; -"3480 3025" [id=3480, type=NonMaxSuppression]; -"3481 3027" [id=3481, type=Gather]; -"3482 3028" [id=3482, type=Squeeze]; -"3483 3032" [id=3483, type=Gather]; -"3484 6520" [id=3484, type=Concat]; -"3485 6521" [id=3485, type=Shape]; -"3486 6523" [id=3486, type=Concat]; -"3487 6524" [id=3487, type=Cast]; -"3488 6525" [id=3488, type=ReduceMin]; -"3489 6526" [id=3489, type=Cast]; -"3490 6527" [id=3490, type=Unsqueeze]; -"3491 6528" [id=3491, type=TopK]; -"3492 6529" [id=3492, type=Cast]; -"3493 6505" [id=3493, type=Cast]; -"3494 6506" [id=3494, type=Gather]; -"3495 6461" [id=3495, type=Cast]; -"3496 6462" [id=3496, type=Gather]; -"3497 6417" [id=3497, type=Cast]; -"3498 6418" [id=3498, type=Gather]; -"3499 6373" [id=3499, type=Cast]; -"3500 6374" [id=3500, type=Gather]; -"3501 6329" [id=3501, type=Cast]; -"3502 6330" [id=3502, type=Gather]; -"3503 6285" [id=3503, type=Cast]; -"3504 6286" [id=3504, type=Gather]; -"3505 6241" [id=3505, type=Cast]; -"3506 6242" [id=3506, type=Gather]; -"3507 6197" [id=3507, type=Cast]; -"3508 6198" [id=3508, type=Gather]; -"3509 6153" [id=3509, type=Cast]; -"3510 6154" [id=3510, type=Gather]; -"3511 6109" [id=3511, type=Cast]; -"3512 6110" [id=3512, type=Gather]; -"3513 6065" [id=3513, type=Cast]; -"3514 6066" [id=3514, type=Gather]; -"3515 6021" [id=3515, type=Cast]; -"3516 6022" [id=3516, type=Gather]; -"3517 5977" [id=3517, type=Cast]; -"3518 5978" [id=3518, type=Gather]; -"3519 5933" [id=3519, type=Cast]; -"3520 5934" [id=3520, type=Gather]; -"3521 5889" [id=3521, type=Cast]; -"3522 5890" [id=3522, type=Gather]; -"3523 5845" [id=3523, type=Cast]; -"3524 5846" [id=3524, type=Gather]; -"3525 5801" [id=3525, type=Cast]; -"3526 5802" [id=3526, type=Gather]; -"3527 5757" [id=3527, type=Cast]; -"3528 5758" [id=3528, type=Gather]; -"3529 5713" [id=3529, type=Cast]; -"3530 5714" [id=3530, type=Gather]; -"3531 5669" [id=3531, type=Cast]; -"3532 5670" [id=3532, type=Gather]; -"3533 5625" [id=3533, type=Cast]; -"3534 5626" [id=3534, type=Gather]; -"3535 5581" [id=3535, type=Cast]; -"3536 5582" [id=3536, type=Gather]; -"3537 5537" [id=3537, type=Cast]; -"3538 5538" [id=3538, type=Gather]; -"3539 5493" [id=3539, type=Cast]; -"3540 5494" [id=3540, type=Gather]; -"3541 5449" [id=3541, type=Cast]; -"3542 5450" [id=3542, type=Gather]; -"3543 5405" [id=3543, type=Cast]; -"3544 5406" [id=3544, type=Gather]; -"3545 5361" [id=3545, type=Cast]; -"3546 5362" [id=3546, type=Gather]; -"3547 5317" [id=3547, type=Cast]; -"3548 5318" [id=3548, type=Gather]; -"3549 5273" [id=3549, type=Cast]; -"3550 5274" [id=3550, type=Gather]; -"3551 5229" [id=3551, type=Cast]; -"3552 5230" [id=3552, type=Gather]; -"3553 5185" [id=3553, type=Cast]; -"3554 5186" [id=3554, type=Gather]; -"3555 5141" [id=3555, type=Cast]; -"3556 5142" [id=3556, type=Gather]; -"3557 5097" [id=3557, type=Cast]; -"3558 5098" [id=3558, type=Gather]; -"3559 5053" [id=3559, type=Cast]; -"3560 5054" [id=3560, type=Gather]; -"3561 5009" [id=3561, type=Cast]; -"3562 5010" [id=3562, type=Gather]; -"3563 4965" [id=3563, type=Cast]; -"3564 4966" [id=3564, type=Gather]; -"3565 4921" [id=3565, type=Cast]; -"3566 4922" [id=3566, type=Gather]; -"3567 4877" [id=3567, type=Cast]; -"3568 4878" [id=3568, type=Gather]; -"3569 4833" [id=3569, type=Cast]; -"3570 4834" [id=3570, type=Gather]; -"3571 4789" [id=3571, type=Cast]; -"3572 4790" [id=3572, type=Gather]; -"3573 4745" [id=3573, type=Cast]; -"3574 4746" [id=3574, type=Gather]; -"3575 4701" [id=3575, type=Cast]; -"3576 4702" [id=3576, type=Gather]; -"3577 4657" [id=3577, type=Cast]; -"3578 4658" [id=3578, type=Gather]; -"3579 4613" [id=3579, type=Cast]; -"3580 4614" [id=3580, type=Gather]; -"3581 4569" [id=3581, type=Cast]; -"3582 4570" [id=3582, type=Gather]; -"3583 4525" [id=3583, type=Cast]; -"3584 4526" [id=3584, type=Gather]; -"3585 4481" [id=3585, type=Cast]; -"3586 4482" [id=3586, type=Gather]; -"3587 4437" [id=3587, type=Cast]; -"3588 4438" [id=3588, type=Gather]; -"3589 4393" [id=3589, type=Cast]; -"3590 4394" [id=3590, type=Gather]; -"3591 4349" [id=3591, type=Cast]; -"3592 4350" [id=3592, type=Gather]; -"3593 4305" [id=3593, type=Cast]; -"3594 4306" [id=3594, type=Gather]; -"3595 4261" [id=3595, type=Cast]; -"3596 4262" [id=3596, type=Gather]; -"3597 4217" [id=3597, type=Cast]; -"3598 4218" [id=3598, type=Gather]; -"3599 4173" [id=3599, type=Cast]; -"3600 4174" [id=3600, type=Gather]; -"3601 4129" [id=3601, type=Cast]; -"3602 4130" [id=3602, type=Gather]; -"3603 4085" [id=3603, type=Cast]; -"3604 4086" [id=3604, type=Gather]; -"3605 4041" [id=3605, type=Cast]; -"3606 4042" [id=3606, type=Gather]; -"3607 3997" [id=3607, type=Cast]; -"3608 3998" [id=3608, type=Gather]; -"3609 3953" [id=3609, type=Cast]; -"3610 3954" [id=3610, type=Gather]; -"3611 3909" [id=3611, type=Cast]; -"3612 3910" [id=3612, type=Gather]; -"3613 3865" [id=3613, type=Cast]; -"3614 3866" [id=3614, type=Gather]; -"3615 3821" [id=3615, type=Cast]; -"3616 3822" [id=3616, type=Gather]; -"3617 3777" [id=3617, type=Cast]; -"3618 3778" [id=3618, type=Gather]; -"3619 3733" [id=3619, type=Cast]; -"3620 3734" [id=3620, type=Gather]; -"3621 3689" [id=3621, type=Cast]; -"3622 3690" [id=3622, type=Gather]; -"3623 3645" [id=3623, type=Cast]; -"3624 3646" [id=3624, type=Gather]; -"3625 3601" [id=3625, type=Cast]; -"3626 3602" [id=3626, type=Gather]; -"3627 3557" [id=3627, type=Cast]; -"3628 3558" [id=3628, type=Gather]; -"3629 3513" [id=3629, type=Cast]; -"3630 3514" [id=3630, type=Gather]; -"3631 3469" [id=3631, type=Cast]; -"3632 3470" [id=3632, type=Gather]; -"3633 3425" [id=3633, type=Cast]; -"3634 3426" [id=3634, type=Gather]; -"3635 3381" [id=3635, type=Cast]; -"3636 3382" [id=3636, type=Gather]; -"3637 3337" [id=3637, type=Cast]; -"3638 3338" [id=3638, type=Gather]; -"3639 3293" [id=3639, type=Cast]; -"3640 3294" [id=3640, type=Gather]; -"3641 3249" [id=3641, type=Cast]; -"3642 3250" [id=3642, type=Gather]; -"3643 3205" [id=3643, type=Cast]; -"3644 3206" [id=3644, type=Gather]; -"3645 3161" [id=3645, type=Cast]; -"3646 3162" [id=3646, type=Gather]; -"3647 3117" [id=3647, type=Cast]; -"3648 3118" [id=3648, type=Gather]; -"3649 3073" [id=3649, type=Cast]; -"3650 3074" [id=3650, type=Gather]; -"3651 3029" [id=3651, type=Cast]; -"3652 3030" [id=3652, type=Gather]; -"3653 6518" [id=3653, type=Concat]; -"3654 6530" [id=3654, type=Gather]; -"3655 QuantizeLinear_6568_4" [id=3655, type=QuantizeLinear]; -"3656 DequantizeLinear_6568_4" [id=3656, type=DequantizeLinear]; -"3657 QuantizeLinear_6568_3" [id=3657, type=QuantizeLinear]; -"3658 DequantizeLinear_6568_3" [id=3658, type=DequantizeLinear]; -"3659 QuantizeLinear_6568_2" [id=3659, type=QuantizeLinear]; -"3660 DequantizeLinear_6568_2" [id=3660, type=DequantizeLinear]; -"3661 QuantizeLinear_6568_1" [id=3661, type=QuantizeLinear]; -"3662 DequantizeLinear_6568_1" [id=3662, type=DequantizeLinear]; -"3663 6576" [id=3663, type=Slice]; -"3664 6578" [id=3664, type=Gather]; -"3665 6569" [id=3665, type=Slice]; -"3666 6571" [id=3666, type=Gather]; -"3667 6579" [id=3667, type=Sub]; -"3668 QuantizeLinear_6617_1" [id=3668, type=QuantizeLinear]; -"3669 DequantizeLinear_6617_1" [id=3669, type=DequantizeLinear]; -"3670 6581" [id=3670, type=Add]; -"3671 6559" [id=3671, type=Slice]; -"3672 6561" [id=3672, type=Gather]; -"3673 6552" [id=3673, type=Slice]; -"3674 6554" [id=3674, type=Gather]; -"3675 6562" [id=3675, type=Sub]; -"3676 QuantizeLinear_6600_1" [id=3676, type=QuantizeLinear]; -"3677 DequantizeLinear_6600_1" [id=3677, type=DequantizeLinear]; -"3678 6564" [id=3678, type=Add]; -"3679 QuantizeLinear_6619_1" [id=3679, type=QuantizeLinear]; -"3680 DequantizeLinear_6619_1" [id=3680, type=DequantizeLinear]; -"3681 QuantizeLinear_6602_1" [id=3681, type=QuantizeLinear]; -"3682 DequantizeLinear_6602_1" [id=3682, type=DequantizeLinear]; -"3683 6582" [id=3683, type=Mul]; -"3684 QuantizeLinear_6620_1" [id=3684, type=QuantizeLinear]; -"3685 DequantizeLinear_6620_1" [id=3685, type=DequantizeLinear]; -"3686 6583" [id=3686, type=Sqrt]; -"3687 6586" [id=3687, type=Div]; -"3688 QuantizeLinear_6624_1" [id=3688, type=QuantizeLinear]; -"3689 DequantizeLinear_6624_1" [id=3689, type=DequantizeLinear]; -"3690 6587" [id=3690, type=Add]; -"3691 6588" [id=3691, type=Log]; -"3692 6590" [id=3692, type=Div]; -"3693 QuantizeLinear_6628_1" [id=3693, type=QuantizeLinear]; -"3694 DequantizeLinear_6628_1" [id=3694, type=DequantizeLinear]; -"3695 6592" [id=3695, type=Add]; -"3696 QuantizeLinear_6630_1" [id=3696, type=QuantizeLinear]; -"3697 DequantizeLinear_6630_1" [id=3697, type=DequantizeLinear]; -"3698 6593" [id=3698, type=Floor]; -"3699 6594" [id=3699, type=Clip]; -"3700 6595" [id=3700, type=Cast]; -"3701 6597" [id=3701, type=Sub]; -"3702 6599" [id=3702, type=Equal]; -"3703 6601" [id=3703, type=Cast]; -"3704 6602" [id=3704, type=NonZero]; -"3705 6603" [id=3705, type=Transpose]; -"3706 6604" [id=3706, type=Squeeze]; -"3707 6605" [id=3707, type=Cast]; -"3708 6539" [id=3708, type=Slice]; -"3709 6544" [id=3709, type=Slice]; -"3710 6545" [id=3710, type=Shape]; -"3711 6546" [id=3711, type=ConstantOfShape]; -"3712 6547" [id=3712, type=Concat]; -"3713 6606" [id=3713, type=Gather]; -"3714 6612" [id=3714, type=Gather]; -"3715 6608" [id=3715, type=Gather]; -"3716 6609" [id=3716, type=Squeeze]; -"3717 6610" [id=3717, type=Cast]; -"3718 6613" [id=3718, type=RoiAlign]; -"3719 6614" [id=3719, type=Cast]; -"3720 6702" [id=3720, type=Shape]; -"3721 6703" [id=3721, type=Gather]; -"3722 6707" [id=3722, type=Unsqueeze]; -"3723 6699" [id=3723, type=Shape]; -"3724 6700" [id=3724, type=Gather]; -"3725 6706" [id=3725, type=Unsqueeze]; -"3726 6696" [id=3726, type=Shape]; -"3727 6697" [id=3727, type=Gather]; -"3728 6705" [id=3728, type=Unsqueeze]; -"3729 6685" [id=3729, type=Equal]; -"3730 6687" [id=3730, type=Cast]; -"3731 6688" [id=3731, type=NonZero]; -"3732 6689" [id=3732, type=Transpose]; -"3733 6691" [id=3733, type=Reshape]; -"3734 6693" [id=3734, type=Shape]; -"3735 6694" [id=3735, type=Gather]; -"3736 6704" [id=3736, type=Unsqueeze]; -"3737 6708" [id=3737, type=Concat]; -"3738 6709" [id=3738, type=Expand]; -"3739 6710" [id=3739, type=Cast]; -"3740 6676" [id=3740, type=Shape]; -"3741 6677" [id=3741, type=Gather]; -"3742 6681" [id=3742, type=Unsqueeze]; -"3743 6673" [id=3743, type=Shape]; -"3744 6674" [id=3744, type=Gather]; -"3745 6680" [id=3745, type=Unsqueeze]; -"3746 6670" [id=3746, type=Shape]; -"3747 6671" [id=3747, type=Gather]; -"3748 6679" [id=3748, type=Unsqueeze]; -"3749 6667" [id=3749, type=Shape]; -"3750 6668" [id=3750, type=Gather]; -"3751 6678" [id=3751, type=Unsqueeze]; -"3752 6682" [id=3752, type=Concat]; -"3753 6683" [id=3753, type=ConstantOfShape]; -"3754 6711" [id=3754, type=ScatterElements]; -"3755 6616" [id=3755, type=Equal]; -"3756 6618" [id=3756, type=Cast]; -"3757 6619" [id=3757, type=NonZero]; -"3758 6620" [id=3758, type=Transpose]; -"3759 6621" [id=3759, type=Squeeze]; -"3760 6622" [id=3760, type=Cast]; -"3761 6623" [id=3761, type=Gather]; -"3762 6629" [id=3762, type=Gather]; -"3763 6625" [id=3763, type=Gather]; -"3764 6626" [id=3764, type=Squeeze]; -"3765 6627" [id=3765, type=Cast]; -"3766 6630" [id=3766, type=RoiAlign]; -"3767 6631" [id=3767, type=Cast]; -"3768 6730" [id=3768, type=Shape]; -"3769 6731" [id=3769, type=Gather]; -"3770 6735" [id=3770, type=Unsqueeze]; -"3771 6727" [id=3771, type=Shape]; -"3772 6728" [id=3772, type=Gather]; -"3773 6734" [id=3773, type=Unsqueeze]; -"3774 6724" [id=3774, type=Shape]; -"3775 6725" [id=3775, type=Gather]; -"3776 6733" [id=3776, type=Unsqueeze]; -"3777 6713" [id=3777, type=Equal]; -"3778 6715" [id=3778, type=Cast]; -"3779 6716" [id=3779, type=NonZero]; -"3780 6717" [id=3780, type=Transpose]; -"3781 6719" [id=3781, type=Reshape]; -"3782 6721" [id=3782, type=Shape]; -"3783 6722" [id=3783, type=Gather]; -"3784 6732" [id=3784, type=Unsqueeze]; -"3785 6736" [id=3785, type=Concat]; -"3786 6737" [id=3786, type=Expand]; -"3787 6738" [id=3787, type=Cast]; -"3788 6739" [id=3788, type=ScatterElements]; -"3789 6633" [id=3789, type=Equal]; -"3790 6635" [id=3790, type=Cast]; -"3791 6636" [id=3791, type=NonZero]; -"3792 6637" [id=3792, type=Transpose]; -"3793 6638" [id=3793, type=Squeeze]; -"3794 6639" [id=3794, type=Cast]; -"3795 6640" [id=3795, type=Gather]; -"3796 6646" [id=3796, type=Gather]; -"3797 6642" [id=3797, type=Gather]; -"3798 6643" [id=3798, type=Squeeze]; -"3799 6644" [id=3799, type=Cast]; -"3800 6647" [id=3800, type=RoiAlign]; -"3801 6648" [id=3801, type=Cast]; -"3802 6758" [id=3802, type=Shape]; -"3803 6759" [id=3803, type=Gather]; -"3804 6763" [id=3804, type=Unsqueeze]; -"3805 6755" [id=3805, type=Shape]; -"3806 6756" [id=3806, type=Gather]; -"3807 6762" [id=3807, type=Unsqueeze]; -"3808 6752" [id=3808, type=Shape]; -"3809 6753" [id=3809, type=Gather]; -"3810 6761" [id=3810, type=Unsqueeze]; -"3811 6741" [id=3811, type=Equal]; -"3812 6743" [id=3812, type=Cast]; -"3813 6744" [id=3813, type=NonZero]; -"3814 6745" [id=3814, type=Transpose]; -"3815 6747" [id=3815, type=Reshape]; -"3816 6749" [id=3816, type=Shape]; -"3817 6750" [id=3817, type=Gather]; -"3818 6760" [id=3818, type=Unsqueeze]; -"3819 6764" [id=3819, type=Concat]; -"3820 6765" [id=3820, type=Expand]; -"3821 6766" [id=3821, type=Cast]; -"3822 6767" [id=3822, type=ScatterElements]; -"3823 6650" [id=3823, type=Equal]; -"3824 6652" [id=3824, type=Cast]; -"3825 6653" [id=3825, type=NonZero]; -"3826 6654" [id=3826, type=Transpose]; -"3827 6655" [id=3827, type=Squeeze]; -"3828 6656" [id=3828, type=Cast]; -"3829 6657" [id=3829, type=Gather]; -"3830 6663" [id=3830, type=Gather]; -"3831 6659" [id=3831, type=Gather]; -"3832 6660" [id=3832, type=Squeeze]; -"3833 6661" [id=3833, type=Cast]; -"3834 6664" [id=3834, type=RoiAlign]; -"3835 6665" [id=3835, type=Cast]; -"3836 6786" [id=3836, type=Shape]; -"3837 6787" [id=3837, type=Gather]; -"3838 6791" [id=3838, type=Unsqueeze]; -"3839 6783" [id=3839, type=Shape]; -"3840 6784" [id=3840, type=Gather]; -"3841 6790" [id=3841, type=Unsqueeze]; -"3842 6780" [id=3842, type=Shape]; -"3843 6781" [id=3843, type=Gather]; -"3844 6789" [id=3844, type=Unsqueeze]; -"3845 6769" [id=3845, type=Equal]; -"3846 6771" [id=3846, type=Cast]; -"3847 6772" [id=3847, type=NonZero]; -"3848 6773" [id=3848, type=Transpose]; -"3849 6775" [id=3849, type=Reshape]; -"3850 6777" [id=3850, type=Shape]; -"3851 6778" [id=3851, type=Gather]; -"3852 6788" [id=3852, type=Unsqueeze]; -"3853 6792" [id=3853, type=Concat]; -"3854 6793" [id=3854, type=Expand]; -"3855 6794" [id=3855, type=Cast]; -"3856 6795" [id=3856, type=ScatterElements]; -"3857 QuantizeLinear_6833_1" [id=3857, type=QuantizeLinear]; -"3858 DequantizeLinear_6833_1" [id=3858, type=DequantizeLinear]; -"3859 QuantizeLinear_6834_1" [id=3859, type=QuantizeLinear]; -"3860 DequantizeLinear_6834_1" [id=3860, type=DequantizeLinear]; -"3861 6798" [id=3861, type=Conv]; -"3862 6799" [id=3862, type=Relu]; -"3863 QuantizeLinear_6837_1" [id=3863, type=QuantizeLinear]; -"3864 DequantizeLinear_6837_1" [id=3864, type=DequantizeLinear]; -"3865 QuantizeLinear_6838_1" [id=3865, type=QuantizeLinear]; -"3866 DequantizeLinear_6838_1" [id=3866, type=DequantizeLinear]; -"3867 6802" [id=3867, type=Conv]; -"3868 6803" [id=3868, type=Relu]; -"3869 QuantizeLinear_6841_1" [id=3869, type=QuantizeLinear]; -"3870 DequantizeLinear_6841_1" [id=3870, type=DequantizeLinear]; -"3871 QuantizeLinear_6842_1" [id=3871, type=QuantizeLinear]; -"3872 DequantizeLinear_6842_1" [id=3872, type=DequantizeLinear]; -"3873 6806" [id=3873, type=Conv]; -"3874 6807" [id=3874, type=Relu]; -"3875 QuantizeLinear_6845_1" [id=3875, type=QuantizeLinear]; -"3876 DequantizeLinear_6845_1" [id=3876, type=DequantizeLinear]; -"3877 QuantizeLinear_6846_1" [id=3877, type=QuantizeLinear]; -"3878 DequantizeLinear_6846_1" [id=3878, type=DequantizeLinear]; -"3879 6810" [id=3879, type=Conv]; -"3880 6811" [id=3880, type=Relu]; -"3881 QuantizeLinear_6849_1" [id=3881, type=QuantizeLinear]; -"3882 DequantizeLinear_6849_1" [id=3882, type=DequantizeLinear]; -"3883 QuantizeLinear_6850_1" [id=3883, type=QuantizeLinear]; -"3884 DequantizeLinear_6850_1" [id=3884, type=DequantizeLinear]; -"3885 6814" [id=3885, type=ConvTranspose]; -"3886 6815" [id=3886, type=Relu]; -"3887 QuantizeLinear_6853_1" [id=3887, type=QuantizeLinear]; -"3888 DequantizeLinear_6853_1" [id=3888, type=DequantizeLinear]; -"3889 QuantizeLinear_6854_1" [id=3889, type=QuantizeLinear]; -"3890 DequantizeLinear_6854_1" [id=3890, type=DequantizeLinear]; -"3891 6818" [id=3891, type=Conv]; -"3892 6819" [id=3892, type=Sigmoid]; -"3893 6844" [id=3893, type=Shape]; -"3894 6845" [id=3894, type=Gather]; -"3895 6822" [id=3895, type=Shape]; -"3896 6823" [id=3896, type=Gather]; -"3897 6824" [id=3897, type=Unsqueeze]; -"3898 6825" [id=3898, type=Concat]; -"3899 6826" [id=3899, type=ConstantOfShape]; -"3900 6827" [id=3900, type=Cast]; -"3901 6828" [id=3901, type=NonZero]; -"3902 6829" [id=3902, type=Transpose]; -"3903 6830" [id=3903, type=Squeeze]; -"3904 6846" [id=3904, type=Mul]; -"3905 6513" [id=3905, type=Slice]; -"3906 6515" [id=3906, type=Gather]; -"3907 6516" [id=3907, type=Shape]; -"3908 6517" [id=3908, type=ConstantOfShape]; -"3909 6469" [id=3909, type=Slice]; -"3910 6471" [id=3910, type=Gather]; -"3911 6472" [id=3911, type=Shape]; -"3912 6473" [id=3912, type=ConstantOfShape]; -"3913 6425" [id=3913, type=Slice]; -"3914 6427" [id=3914, type=Gather]; -"3915 6428" [id=3915, type=Shape]; -"3916 6429" [id=3916, type=ConstantOfShape]; -"3917 6381" [id=3917, type=Slice]; -"3918 6383" [id=3918, type=Gather]; -"3919 6384" [id=3919, type=Shape]; -"3920 6385" [id=3920, type=ConstantOfShape]; -"3921 6337" [id=3921, type=Slice]; -"3922 6339" [id=3922, type=Gather]; -"3923 6340" [id=3923, type=Shape]; -"3924 6341" [id=3924, type=ConstantOfShape]; -"3925 6293" [id=3925, type=Slice]; -"3926 6295" [id=3926, type=Gather]; -"3927 6296" [id=3927, type=Shape]; -"3928 6297" [id=3928, type=ConstantOfShape]; -"3929 6249" [id=3929, type=Slice]; -"3930 6251" [id=3930, type=Gather]; -"3931 6252" [id=3931, type=Shape]; -"3932 6253" [id=3932, type=ConstantOfShape]; -"3933 6205" [id=3933, type=Slice]; -"3934 6207" [id=3934, type=Gather]; -"3935 6208" [id=3935, type=Shape]; -"3936 6209" [id=3936, type=ConstantOfShape]; -"3937 6161" [id=3937, type=Slice]; -"3938 6163" [id=3938, type=Gather]; -"3939 6164" [id=3939, type=Shape]; -"3940 6165" [id=3940, type=ConstantOfShape]; -"3941 6117" [id=3941, type=Slice]; -"3942 6119" [id=3942, type=Gather]; -"3943 6120" [id=3943, type=Shape]; -"3944 6121" [id=3944, type=ConstantOfShape]; -"3945 6073" [id=3945, type=Slice]; -"3946 6075" [id=3946, type=Gather]; -"3947 6076" [id=3947, type=Shape]; -"3948 6077" [id=3948, type=ConstantOfShape]; -"3949 6029" [id=3949, type=Slice]; -"3950 6031" [id=3950, type=Gather]; -"3951 6032" [id=3951, type=Shape]; -"3952 6033" [id=3952, type=ConstantOfShape]; -"3953 5985" [id=3953, type=Slice]; -"3954 5987" [id=3954, type=Gather]; -"3955 5988" [id=3955, type=Shape]; -"3956 5989" [id=3956, type=ConstantOfShape]; -"3957 5941" [id=3957, type=Slice]; -"3958 5943" [id=3958, type=Gather]; -"3959 5944" [id=3959, type=Shape]; -"3960 5945" [id=3960, type=ConstantOfShape]; -"3961 5897" [id=3961, type=Slice]; -"3962 5899" [id=3962, type=Gather]; -"3963 5900" [id=3963, type=Shape]; -"3964 5901" [id=3964, type=ConstantOfShape]; -"3965 5853" [id=3965, type=Slice]; -"3966 5855" [id=3966, type=Gather]; -"3967 5856" [id=3967, type=Shape]; -"3968 5857" [id=3968, type=ConstantOfShape]; -"3969 5809" [id=3969, type=Slice]; -"3970 5811" [id=3970, type=Gather]; -"3971 5812" [id=3971, type=Shape]; -"3972 5813" [id=3972, type=ConstantOfShape]; -"3973 5765" [id=3973, type=Slice]; -"3974 5767" [id=3974, type=Gather]; -"3975 5768" [id=3975, type=Shape]; -"3976 5769" [id=3976, type=ConstantOfShape]; -"3977 5721" [id=3977, type=Slice]; -"3978 5723" [id=3978, type=Gather]; -"3979 5724" [id=3979, type=Shape]; -"3980 5725" [id=3980, type=ConstantOfShape]; -"3981 5677" [id=3981, type=Slice]; -"3982 5679" [id=3982, type=Gather]; -"3983 5680" [id=3983, type=Shape]; -"3984 5681" [id=3984, type=ConstantOfShape]; -"3985 5633" [id=3985, type=Slice]; -"3986 5635" [id=3986, type=Gather]; -"3987 5636" [id=3987, type=Shape]; -"3988 5637" [id=3988, type=ConstantOfShape]; -"3989 5589" [id=3989, type=Slice]; -"3990 5591" [id=3990, type=Gather]; -"3991 5592" [id=3991, type=Shape]; -"3992 5593" [id=3992, type=ConstantOfShape]; -"3993 5545" [id=3993, type=Slice]; -"3994 5547" [id=3994, type=Gather]; -"3995 5548" [id=3995, type=Shape]; -"3996 5549" [id=3996, type=ConstantOfShape]; -"3997 5501" [id=3997, type=Slice]; -"3998 5503" [id=3998, type=Gather]; -"3999 5504" [id=3999, type=Shape]; -"4000 5505" [id=4000, type=ConstantOfShape]; -"4001 5457" [id=4001, type=Slice]; -"4002 5459" [id=4002, type=Gather]; -"4003 5460" [id=4003, type=Shape]; -"4004 5461" [id=4004, type=ConstantOfShape]; -"4005 5413" [id=4005, type=Slice]; -"4006 5415" [id=4006, type=Gather]; -"4007 5416" [id=4007, type=Shape]; -"4008 5417" [id=4008, type=ConstantOfShape]; -"4009 5369" [id=4009, type=Slice]; -"4010 5371" [id=4010, type=Gather]; -"4011 5372" [id=4011, type=Shape]; -"4012 5373" [id=4012, type=ConstantOfShape]; -"4013 5325" [id=4013, type=Slice]; -"4014 5327" [id=4014, type=Gather]; -"4015 5328" [id=4015, type=Shape]; -"4016 5329" [id=4016, type=ConstantOfShape]; -"4017 5281" [id=4017, type=Slice]; -"4018 5283" [id=4018, type=Gather]; -"4019 5284" [id=4019, type=Shape]; -"4020 5285" [id=4020, type=ConstantOfShape]; -"4021 5237" [id=4021, type=Slice]; -"4022 5239" [id=4022, type=Gather]; -"4023 5240" [id=4023, type=Shape]; -"4024 5241" [id=4024, type=ConstantOfShape]; -"4025 5193" [id=4025, type=Slice]; -"4026 5195" [id=4026, type=Gather]; -"4027 5196" [id=4027, type=Shape]; -"4028 5197" [id=4028, type=ConstantOfShape]; -"4029 5149" [id=4029, type=Slice]; -"4030 5151" [id=4030, type=Gather]; -"4031 5152" [id=4031, type=Shape]; -"4032 5153" [id=4032, type=ConstantOfShape]; -"4033 5105" [id=4033, type=Slice]; -"4034 5107" [id=4034, type=Gather]; -"4035 5108" [id=4035, type=Shape]; -"4036 5109" [id=4036, type=ConstantOfShape]; -"4037 5061" [id=4037, type=Slice]; -"4038 5063" [id=4038, type=Gather]; -"4039 5064" [id=4039, type=Shape]; -"4040 5065" [id=4040, type=ConstantOfShape]; -"4041 5017" [id=4041, type=Slice]; -"4042 5019" [id=4042, type=Gather]; -"4043 5020" [id=4043, type=Shape]; -"4044 5021" [id=4044, type=ConstantOfShape]; -"4045 4973" [id=4045, type=Slice]; -"4046 4975" [id=4046, type=Gather]; -"4047 4976" [id=4047, type=Shape]; -"4048 4977" [id=4048, type=ConstantOfShape]; -"4049 4929" [id=4049, type=Slice]; -"4050 4931" [id=4050, type=Gather]; -"4051 4932" [id=4051, type=Shape]; -"4052 4933" [id=4052, type=ConstantOfShape]; -"4053 4885" [id=4053, type=Slice]; -"4054 4887" [id=4054, type=Gather]; -"4055 4888" [id=4055, type=Shape]; -"4056 4889" [id=4056, type=ConstantOfShape]; -"4057 4841" [id=4057, type=Slice]; -"4058 4843" [id=4058, type=Gather]; -"4059 4844" [id=4059, type=Shape]; -"4060 4845" [id=4060, type=ConstantOfShape]; -"4061 4797" [id=4061, type=Slice]; -"4062 4799" [id=4062, type=Gather]; -"4063 4800" [id=4063, type=Shape]; -"4064 4801" [id=4064, type=ConstantOfShape]; -"4065 4753" [id=4065, type=Slice]; -"4066 4755" [id=4066, type=Gather]; -"4067 4756" [id=4067, type=Shape]; -"4068 4757" [id=4068, type=ConstantOfShape]; -"4069 4709" [id=4069, type=Slice]; -"4070 4711" [id=4070, type=Gather]; -"4071 4712" [id=4071, type=Shape]; -"4072 4713" [id=4072, type=ConstantOfShape]; -"4073 4665" [id=4073, type=Slice]; -"4074 4667" [id=4074, type=Gather]; -"4075 4668" [id=4075, type=Shape]; -"4076 4669" [id=4076, type=ConstantOfShape]; -"4077 4621" [id=4077, type=Slice]; -"4078 4623" [id=4078, type=Gather]; -"4079 4624" [id=4079, type=Shape]; -"4080 4625" [id=4080, type=ConstantOfShape]; -"4081 4577" [id=4081, type=Slice]; -"4082 4579" [id=4082, type=Gather]; -"4083 4580" [id=4083, type=Shape]; -"4084 4581" [id=4084, type=ConstantOfShape]; -"4085 4533" [id=4085, type=Slice]; -"4086 4535" [id=4086, type=Gather]; -"4087 4536" [id=4087, type=Shape]; -"4088 4537" [id=4088, type=ConstantOfShape]; -"4089 4489" [id=4089, type=Slice]; -"4090 4491" [id=4090, type=Gather]; -"4091 4492" [id=4091, type=Shape]; -"4092 4493" [id=4092, type=ConstantOfShape]; -"4093 4445" [id=4093, type=Slice]; -"4094 4447" [id=4094, type=Gather]; -"4095 4448" [id=4095, type=Shape]; -"4096 4449" [id=4096, type=ConstantOfShape]; -"4097 4401" [id=4097, type=Slice]; -"4098 4403" [id=4098, type=Gather]; -"4099 4404" [id=4099, type=Shape]; -"4100 4405" [id=4100, type=ConstantOfShape]; -"4101 4357" [id=4101, type=Slice]; -"4102 4359" [id=4102, type=Gather]; -"4103 4360" [id=4103, type=Shape]; -"4104 4361" [id=4104, type=ConstantOfShape]; -"4105 4313" [id=4105, type=Slice]; -"4106 4315" [id=4106, type=Gather]; -"4107 4316" [id=4107, type=Shape]; -"4108 4317" [id=4108, type=ConstantOfShape]; -"4109 4269" [id=4109, type=Slice]; -"4110 4271" [id=4110, type=Gather]; -"4111 4272" [id=4111, type=Shape]; -"4112 4273" [id=4112, type=ConstantOfShape]; -"4113 4225" [id=4113, type=Slice]; -"4114 4227" [id=4114, type=Gather]; -"4115 4228" [id=4115, type=Shape]; -"4116 4229" [id=4116, type=ConstantOfShape]; -"4117 4181" [id=4117, type=Slice]; -"4118 4183" [id=4118, type=Gather]; -"4119 4184" [id=4119, type=Shape]; -"4120 4185" [id=4120, type=ConstantOfShape]; -"4121 4137" [id=4121, type=Slice]; -"4122 4139" [id=4122, type=Gather]; -"4123 4140" [id=4123, type=Shape]; -"4124 4141" [id=4124, type=ConstantOfShape]; -"4125 4093" [id=4125, type=Slice]; -"4126 4095" [id=4126, type=Gather]; -"4127 4096" [id=4127, type=Shape]; -"4128 4097" [id=4128, type=ConstantOfShape]; -"4129 4049" [id=4129, type=Slice]; -"4130 4051" [id=4130, type=Gather]; -"4131 4052" [id=4131, type=Shape]; -"4132 4053" [id=4132, type=ConstantOfShape]; -"4133 4005" [id=4133, type=Slice]; -"4134 4007" [id=4134, type=Gather]; -"4135 4008" [id=4135, type=Shape]; -"4136 4009" [id=4136, type=ConstantOfShape]; -"4137 3961" [id=4137, type=Slice]; -"4138 3963" [id=4138, type=Gather]; -"4139 3964" [id=4139, type=Shape]; -"4140 3965" [id=4140, type=ConstantOfShape]; -"4141 3917" [id=4141, type=Slice]; -"4142 3919" [id=4142, type=Gather]; -"4143 3920" [id=4143, type=Shape]; -"4144 3921" [id=4144, type=ConstantOfShape]; -"4145 3873" [id=4145, type=Slice]; -"4146 3875" [id=4146, type=Gather]; -"4147 3876" [id=4147, type=Shape]; -"4148 3877" [id=4148, type=ConstantOfShape]; -"4149 3829" [id=4149, type=Slice]; -"4150 3831" [id=4150, type=Gather]; -"4151 3832" [id=4151, type=Shape]; -"4152 3833" [id=4152, type=ConstantOfShape]; -"4153 3785" [id=4153, type=Slice]; -"4154 3787" [id=4154, type=Gather]; -"4155 3788" [id=4155, type=Shape]; -"4156 3789" [id=4156, type=ConstantOfShape]; -"4157 3741" [id=4157, type=Slice]; -"4158 3743" [id=4158, type=Gather]; -"4159 3744" [id=4159, type=Shape]; -"4160 3745" [id=4160, type=ConstantOfShape]; -"4161 3697" [id=4161, type=Slice]; -"4162 3699" [id=4162, type=Gather]; -"4163 3700" [id=4163, type=Shape]; -"4164 3701" [id=4164, type=ConstantOfShape]; -"4165 3653" [id=4165, type=Slice]; -"4166 3655" [id=4166, type=Gather]; -"4167 3656" [id=4167, type=Shape]; -"4168 3657" [id=4168, type=ConstantOfShape]; -"4169 3609" [id=4169, type=Slice]; -"4170 3611" [id=4170, type=Gather]; -"4171 3612" [id=4171, type=Shape]; -"4172 3613" [id=4172, type=ConstantOfShape]; -"4173 3565" [id=4173, type=Slice]; -"4174 3567" [id=4174, type=Gather]; -"4175 3568" [id=4175, type=Shape]; -"4176 3569" [id=4176, type=ConstantOfShape]; -"4177 3521" [id=4177, type=Slice]; -"4178 3523" [id=4178, type=Gather]; -"4179 3524" [id=4179, type=Shape]; -"4180 3525" [id=4180, type=ConstantOfShape]; -"4181 3477" [id=4181, type=Slice]; -"4182 3479" [id=4182, type=Gather]; -"4183 3480" [id=4183, type=Shape]; -"4184 3481" [id=4184, type=ConstantOfShape]; -"4185 3433" [id=4185, type=Slice]; -"4186 3435" [id=4186, type=Gather]; -"4187 3436" [id=4187, type=Shape]; -"4188 3437" [id=4188, type=ConstantOfShape]; -"4189 3389" [id=4189, type=Slice]; -"4190 3391" [id=4190, type=Gather]; -"4191 3392" [id=4191, type=Shape]; -"4192 3393" [id=4192, type=ConstantOfShape]; -"4193 3345" [id=4193, type=Slice]; -"4194 3347" [id=4194, type=Gather]; -"4195 3348" [id=4195, type=Shape]; -"4196 3349" [id=4196, type=ConstantOfShape]; -"4197 3301" [id=4197, type=Slice]; -"4198 3303" [id=4198, type=Gather]; -"4199 3304" [id=4199, type=Shape]; -"4200 3305" [id=4200, type=ConstantOfShape]; -"4201 3257" [id=4201, type=Slice]; -"4202 3259" [id=4202, type=Gather]; -"4203 3260" [id=4203, type=Shape]; -"4204 3261" [id=4204, type=ConstantOfShape]; -"4205 3213" [id=4205, type=Slice]; -"4206 3215" [id=4206, type=Gather]; -"4207 3216" [id=4207, type=Shape]; -"4208 3217" [id=4208, type=ConstantOfShape]; -"4209 3169" [id=4209, type=Slice]; -"4210 3171" [id=4210, type=Gather]; -"4211 3172" [id=4211, type=Shape]; -"4212 3173" [id=4212, type=ConstantOfShape]; -"4213 3125" [id=4213, type=Slice]; -"4214 3127" [id=4214, type=Gather]; -"4215 3128" [id=4215, type=Shape]; -"4216 3129" [id=4216, type=ConstantOfShape]; -"4217 3081" [id=4217, type=Slice]; -"4218 3083" [id=4218, type=Gather]; -"4219 3084" [id=4219, type=Shape]; -"4220 3085" [id=4220, type=ConstantOfShape]; -"4221 3037" [id=4221, type=Slice]; -"4222 3039" [id=4222, type=Gather]; -"4223 3040" [id=4223, type=Shape]; -"4224 3041" [id=4224, type=ConstantOfShape]; -"4225 6519" [id=4225, type=Concat]; -"4226 6532" [id=4226, type=Gather]; -"4227 6820" [id=4227, type=Concat]; -"4228 6847" [id=4228, type=Add]; -"4229 6835" [id=4229, type=Shape]; -"4230 6836" [id=4230, type=Gather]; -"4231 6840" [id=4231, type=Unsqueeze]; -"4232 6832" [id=4232, type=Shape]; -"4233 6833" [id=4233, type=Gather]; -"4234 6839" [id=4234, type=Unsqueeze]; -"4235 6838" [id=4235, type=Unsqueeze]; -"4236 6841" [id=4236, type=Concat]; -"4237 6842" [id=4237, type=Reshape]; -"4238 6848" [id=4238, type=Gather]; -"4239 6849" [id=4239, type=Unsqueeze]; -"4240 6533" [id=4240, type=Cast]; -"4241 6534" [id=4241, type=Gather]; -"4242 nncf_model_input_0" [id=4242, type=nncf_model_input]; -"4243 nncf_model_output_0" [id=4243, type=nncf_model_output]; -"4244 nncf_model_output_1" [id=4244, type=nncf_model_output]; -"4245 nncf_model_output_2" [id=4245, type=nncf_model_output]; -"4246 nncf_model_output_3" [id=4246, type=nncf_model_output]; +"1750 QuantizeLinear_2527_4" [id=1750, type=QuantizeLinear]; +"1751 DequantizeLinear_2527_4" [id=1751, type=DequantizeLinear]; +"1752 QuantizeLinear_2527_3" [id=1752, type=QuantizeLinear]; +"1753 DequantizeLinear_2527_3" [id=1753, type=DequantizeLinear]; +"1754 QuantizeLinear_2527_2" [id=1754, type=QuantizeLinear]; +"1755 DequantizeLinear_2527_2" [id=1755, type=DequantizeLinear]; +"1756 QuantizeLinear_2527_1" [id=1756, type=QuantizeLinear]; +"1757 DequantizeLinear_2527_1" [id=1757, type=DequantizeLinear]; +"1758 2532" [id=1758, type=Slice]; +"1759 2534" [id=1759, type=Gather]; +"1760 2525" [id=1760, type=Slice]; +"1761 2527" [id=1761, type=Gather]; +"1762 2535" [id=1762, type=Sub]; +"1763 QuantizeLinear_2572_1" [id=1763, type=QuantizeLinear]; +"1764 DequantizeLinear_2572_1" [id=1764, type=DequantizeLinear]; +"1765 2537" [id=1765, type=Add]; +"1766 2515" [id=1766, type=Slice]; +"1767 2517" [id=1767, type=Gather]; +"1768 2508" [id=1768, type=Slice]; +"1769 2510" [id=1769, type=Gather]; +"1770 2518" [id=1770, type=Sub]; +"1771 QuantizeLinear_2555_1" [id=1771, type=QuantizeLinear]; +"1772 DequantizeLinear_2555_1" [id=1772, type=DequantizeLinear]; +"1773 2520" [id=1773, type=Add]; +"1774 QuantizeLinear_2574_1" [id=1774, type=QuantizeLinear]; +"1775 DequantizeLinear_2574_1" [id=1775, type=DequantizeLinear]; +"1776 QuantizeLinear_2557_1" [id=1776, type=QuantizeLinear]; +"1777 DequantizeLinear_2557_1" [id=1777, type=DequantizeLinear]; +"1778 2538" [id=1778, type=Mul]; +"1779 QuantizeLinear_2575_1" [id=1779, type=QuantizeLinear]; +"1780 DequantizeLinear_2575_1" [id=1780, type=DequantizeLinear]; +"1781 2539" [id=1781, type=Sqrt]; +"1782 2542" [id=1782, type=Div]; +"1783 QuantizeLinear_2579_1" [id=1783, type=QuantizeLinear]; +"1784 DequantizeLinear_2579_1" [id=1784, type=DequantizeLinear]; +"1785 2543" [id=1785, type=Add]; +"1786 2544" [id=1786, type=Log]; +"1787 2546" [id=1787, type=Div]; +"1788 QuantizeLinear_2583_1" [id=1788, type=QuantizeLinear]; +"1789 DequantizeLinear_2583_1" [id=1789, type=DequantizeLinear]; +"1790 2548" [id=1790, type=Add]; +"1791 QuantizeLinear_2585_1" [id=1791, type=QuantizeLinear]; +"1792 DequantizeLinear_2585_1" [id=1792, type=DequantizeLinear]; +"1793 2549" [id=1793, type=Floor]; +"1794 2550" [id=1794, type=Clip]; +"1795 2551" [id=1795, type=Cast]; +"1796 2553" [id=1796, type=Sub]; +"1797 2555" [id=1797, type=Equal]; +"1798 2557" [id=1798, type=Cast]; +"1799 2558" [id=1799, type=NonZero]; +"1800 2559" [id=1800, type=Transpose]; +"1801 2560" [id=1801, type=Squeeze]; +"1802 2561" [id=1802, type=Cast]; +"1803 2495" [id=1803, type=Slice]; +"1804 2500" [id=1804, type=Slice]; +"1805 2501" [id=1805, type=Shape]; +"1806 2502" [id=1806, type=ConstantOfShape]; +"1807 2503" [id=1807, type=Concat]; +"1808 2562" [id=1808, type=Gather]; +"1809 2568" [id=1809, type=Gather]; +"1810 2564" [id=1810, type=Gather]; +"1811 2565" [id=1811, type=Squeeze]; +"1812 2566" [id=1812, type=Cast]; +"1813 2569" [id=1813, type=RoiAlign]; +"1814 2570" [id=1814, type=Cast]; +"1815 2658" [id=1815, type=Shape]; +"1816 2659" [id=1816, type=Gather]; +"1817 2663" [id=1817, type=Unsqueeze]; +"1818 2655" [id=1818, type=Shape]; +"1819 2656" [id=1819, type=Gather]; +"1820 2662" [id=1820, type=Unsqueeze]; +"1821 2652" [id=1821, type=Shape]; +"1822 2653" [id=1822, type=Gather]; +"1823 2661" [id=1823, type=Unsqueeze]; +"1824 2641" [id=1824, type=Equal]; +"1825 2643" [id=1825, type=Cast]; +"1826 2644" [id=1826, type=NonZero]; +"1827 2645" [id=1827, type=Transpose]; +"1828 2647" [id=1828, type=Reshape]; +"1829 2649" [id=1829, type=Shape]; +"1830 2650" [id=1830, type=Gather]; +"1831 2660" [id=1831, type=Unsqueeze]; +"1832 2664" [id=1832, type=Concat]; +"1833 2665" [id=1833, type=Expand]; +"1834 2666" [id=1834, type=Cast]; +"1835 2632" [id=1835, type=Shape]; +"1836 2633" [id=1836, type=Gather]; +"1837 2637" [id=1837, type=Unsqueeze]; +"1838 2629" [id=1838, type=Shape]; +"1839 2630" [id=1839, type=Gather]; +"1840 2636" [id=1840, type=Unsqueeze]; +"1841 2626" [id=1841, type=Shape]; +"1842 2627" [id=1842, type=Gather]; +"1843 2635" [id=1843, type=Unsqueeze]; +"1844 2623" [id=1844, type=Shape]; +"1845 2624" [id=1845, type=Gather]; +"1846 2634" [id=1846, type=Unsqueeze]; +"1847 2638" [id=1847, type=Concat]; +"1848 2639" [id=1848, type=ConstantOfShape]; +"1849 2667" [id=1849, type=ScatterElements]; +"1850 2572" [id=1850, type=Equal]; +"1851 2574" [id=1851, type=Cast]; +"1852 2575" [id=1852, type=NonZero]; +"1853 2576" [id=1853, type=Transpose]; +"1854 2577" [id=1854, type=Squeeze]; +"1855 2578" [id=1855, type=Cast]; +"1856 2579" [id=1856, type=Gather]; +"1857 2585" [id=1857, type=Gather]; +"1858 2581" [id=1858, type=Gather]; +"1859 2582" [id=1859, type=Squeeze]; +"1860 2583" [id=1860, type=Cast]; +"1861 2586" [id=1861, type=RoiAlign]; +"1862 2587" [id=1862, type=Cast]; +"1863 2686" [id=1863, type=Shape]; +"1864 2687" [id=1864, type=Gather]; +"1865 2691" [id=1865, type=Unsqueeze]; +"1866 2683" [id=1866, type=Shape]; +"1867 2684" [id=1867, type=Gather]; +"1868 2690" [id=1868, type=Unsqueeze]; +"1869 2680" [id=1869, type=Shape]; +"1870 2681" [id=1870, type=Gather]; +"1871 2689" [id=1871, type=Unsqueeze]; +"1872 2669" [id=1872, type=Equal]; +"1873 2671" [id=1873, type=Cast]; +"1874 2672" [id=1874, type=NonZero]; +"1875 2673" [id=1875, type=Transpose]; +"1876 2675" [id=1876, type=Reshape]; +"1877 2677" [id=1877, type=Shape]; +"1878 2678" [id=1878, type=Gather]; +"1879 2688" [id=1879, type=Unsqueeze]; +"1880 2692" [id=1880, type=Concat]; +"1881 2693" [id=1881, type=Expand]; +"1882 2694" [id=1882, type=Cast]; +"1883 2695" [id=1883, type=ScatterElements]; +"1884 2589" [id=1884, type=Equal]; +"1885 2591" [id=1885, type=Cast]; +"1886 2592" [id=1886, type=NonZero]; +"1887 2593" [id=1887, type=Transpose]; +"1888 2594" [id=1888, type=Squeeze]; +"1889 2595" [id=1889, type=Cast]; +"1890 2596" [id=1890, type=Gather]; +"1891 2602" [id=1891, type=Gather]; +"1892 2598" [id=1892, type=Gather]; +"1893 2599" [id=1893, type=Squeeze]; +"1894 2600" [id=1894, type=Cast]; +"1895 2603" [id=1895, type=RoiAlign]; +"1896 2604" [id=1896, type=Cast]; +"1897 2714" [id=1897, type=Shape]; +"1898 2715" [id=1898, type=Gather]; +"1899 2719" [id=1899, type=Unsqueeze]; +"1900 2711" [id=1900, type=Shape]; +"1901 2712" [id=1901, type=Gather]; +"1902 2718" [id=1902, type=Unsqueeze]; +"1903 2708" [id=1903, type=Shape]; +"1904 2709" [id=1904, type=Gather]; +"1905 2717" [id=1905, type=Unsqueeze]; +"1906 2697" [id=1906, type=Equal]; +"1907 2699" [id=1907, type=Cast]; +"1908 2700" [id=1908, type=NonZero]; +"1909 2701" [id=1909, type=Transpose]; +"1910 2703" [id=1910, type=Reshape]; +"1911 2705" [id=1911, type=Shape]; +"1912 2706" [id=1912, type=Gather]; +"1913 2716" [id=1913, type=Unsqueeze]; +"1914 2720" [id=1914, type=Concat]; +"1915 2721" [id=1915, type=Expand]; +"1916 2722" [id=1916, type=Cast]; +"1917 2723" [id=1917, type=ScatterElements]; +"1918 2606" [id=1918, type=Equal]; +"1919 2608" [id=1919, type=Cast]; +"1920 2609" [id=1920, type=NonZero]; +"1921 2610" [id=1921, type=Transpose]; +"1922 2611" [id=1922, type=Squeeze]; +"1923 2612" [id=1923, type=Cast]; +"1924 2613" [id=1924, type=Gather]; +"1925 2619" [id=1925, type=Gather]; +"1926 2615" [id=1926, type=Gather]; +"1927 2616" [id=1927, type=Squeeze]; +"1928 2617" [id=1928, type=Cast]; +"1929 2620" [id=1929, type=RoiAlign]; +"1930 2621" [id=1930, type=Cast]; +"1931 2742" [id=1931, type=Shape]; +"1932 2743" [id=1932, type=Gather]; +"1933 2747" [id=1933, type=Unsqueeze]; +"1934 2739" [id=1934, type=Shape]; +"1935 2740" [id=1935, type=Gather]; +"1936 2746" [id=1936, type=Unsqueeze]; +"1937 2736" [id=1937, type=Shape]; +"1938 2737" [id=1938, type=Gather]; +"1939 2745" [id=1939, type=Unsqueeze]; +"1940 2725" [id=1940, type=Equal]; +"1941 2727" [id=1941, type=Cast]; +"1942 2728" [id=1942, type=NonZero]; +"1943 2729" [id=1943, type=Transpose]; +"1944 2731" [id=1944, type=Reshape]; +"1945 2733" [id=1945, type=Shape]; +"1946 2734" [id=1946, type=Gather]; +"1947 2744" [id=1947, type=Unsqueeze]; +"1948 2748" [id=1948, type=Concat]; +"1949 2749" [id=1949, type=Expand]; +"1950 2750" [id=1950, type=Cast]; +"1951 2751" [id=1951, type=ScatterElements]; +"1952 2757" [id=1952, type=Unsqueeze]; +"1953 QuantizeLinear_2788_1" [id=1953, type=QuantizeLinear]; +"1954 DequantizeLinear_2788_1" [id=1954, type=DequantizeLinear]; +"1955 2753" [id=1955, type=Shape]; +"1956 2754" [id=1956, type=Gather]; +"1957 2756" [id=1957, type=Unsqueeze]; +"1958 2758" [id=1958, type=Concat]; +"1959 2759" [id=1959, type=Reshape]; +"1960 QuantizeLinear_2797_1" [id=1960, type=QuantizeLinear]; +"1961 DequantizeLinear_2797_1" [id=1961, type=DequantizeLinear]; +"1962 2762_MatMul" [id=1962, type=MatMul]; +"1963 2762_Add" [id=1963, type=Add]; +"1964 2763" [id=1964, type=Relu]; +"1965 QuantizeLinear_2800_1" [id=1965, type=QuantizeLinear]; +"1966 DequantizeLinear_2800_1" [id=1966, type=DequantizeLinear]; +"1967 QuantizeLinear_2801_1" [id=1967, type=QuantizeLinear]; +"1968 DequantizeLinear_2801_1" [id=1968, type=DequantizeLinear]; +"1969 2766_MatMul" [id=1969, type=MatMul]; +"1970 2766_Add" [id=1970, type=Add]; +"1971 2767" [id=1971, type=Relu]; +"1972 QuantizeLinear_2804_1" [id=1972, type=QuantizeLinear]; +"1973 DequantizeLinear_2804_1" [id=1973, type=DequantizeLinear]; +"1974 QuantizeLinear_2805_1" [id=1974, type=QuantizeLinear]; +"1975 DequantizeLinear_2805_1" [id=1975, type=DequantizeLinear]; +"1976 2770_MatMul" [id=1976, type=MatMul]; +"1977 2770_Add" [id=1977, type=Add]; +"1978 2774" [id=1978, type=Softmax]; +"1979 2950" [id=1979, type=Shape]; +"1980 2951" [id=1980, type=Gather]; +"1981 2992" [id=1981, type=Unsqueeze]; +"1982 2991" [id=1982, type=Unsqueeze]; +"1983 2993" [id=1983, type=Concat]; +"1984 2955" [id=1984, type=Reshape]; +"1985 2994" [id=1985, type=Reshape]; +"1986 2996" [id=1986, type=Greater]; +"1987 2997" [id=1987, type=Cast]; +"1988 6478" [id=1988, type=Slice]; +"1989 6480" [id=1989, type=Gather]; +"1990 6481" [id=1990, type=Cast]; +"1991 6482" [id=1991, type=NonZero]; +"1992 6483" [id=1992, type=Transpose]; +"1993 6484" [id=1993, type=Squeeze]; +"1994 6487" [id=1994, type=Cast]; +"1995 6486" [id=1995, type=Gather]; +"1996 6488" [id=1996, type=Gather]; +"1997 6497" [id=1997, type=Unsqueeze]; +"1998 6498" [id=1998, type=Unsqueeze]; +"1999 2984" [id=1999, type=Mul]; +"2000 2987" [id=2000, type=Unsqueeze]; +"2001 2986" [id=2001, type=Unsqueeze]; +"2002 2988" [id=2002, type=Concat]; +"2003 QuantizeLinear_2808_1" [id=2003, type=QuantizeLinear]; +"2004 DequantizeLinear_2808_1" [id=2004, type=DequantizeLinear]; +"2005 2773_MatMul" [id=2005, type=MatMul]; +"2006 2773_Add" [id=2006, type=Add]; +"2007 2776" [id=2007, type=Flatten]; +"2008 2947" [id=2008, type=Shape]; +"2009 2775" [id=2009, type=Concat]; +"2010 2777" [id=2010, type=Cast]; +"2011 2806" [id=2011, type=Slice]; +"2012 2808" [id=2012, type=Gather]; +"2013 2799" [id=2013, type=Slice]; +"2014 2801" [id=2014, type=Gather]; +"2015 2809" [id=2015, type=Sub]; +"2016 2811" [id=2016, type=Add]; +"2017 2923" [id=2017, type=Slice]; +"2018 2924" [id=2018, type=Unsqueeze]; +"2019 2872" [id=2019, type=Slice]; +"2020 2877" [id=2020, type=Slice]; +"2021 2879" [id=2021, type=Div]; +"2022 2881" [id=2022, type=Clip]; +"2023 2918" [id=2023, type=Exp]; +"2024 2925" [id=2024, type=Mul]; +"2025 2938" [id=2025, type=Mul]; +"2026 2830" [id=2026, type=Mul]; +"2027 2826" [id=2027, type=Slice]; +"2028 2828" [id=2028, type=Gather]; +"2029 2831" [id=2029, type=Add]; +"2030 2907" [id=2030, type=Slice]; +"2031 2908" [id=2031, type=Unsqueeze]; +"2032 2900" [id=2032, type=Slice]; +"2033 2901" [id=2033, type=Unsqueeze]; +"2034 2848" [id=2034, type=Slice]; +"2035 2853" [id=2035, type=Slice]; +"2036 2855" [id=2036, type=Div]; +"2037 2902" [id=2037, type=Mul]; +"2038 2909" [id=2038, type=Add]; +"2039 2939" [id=2039, type=Add]; +"2040 2941" [id=2040, type=Sub]; +"2041 2945" [id=2041, type=Unsqueeze]; +"2042 2789" [id=2042, type=Slice]; +"2043 2791" [id=2043, type=Gather]; +"2044 2782" [id=2044, type=Slice]; +"2045 2784" [id=2045, type=Gather]; +"2046 2792" [id=2046, type=Sub]; +"2047 2794" [id=2047, type=Add]; +"2048 2915" [id=2048, type=Slice]; +"2049 2916" [id=2049, type=Unsqueeze]; +"2050 2860" [id=2050, type=Slice]; +"2051 2865" [id=2051, type=Slice]; +"2052 2867" [id=2052, type=Div]; +"2053 2880" [id=2053, type=Clip]; +"2054 2910" [id=2054, type=Exp]; +"2055 2917" [id=2055, type=Mul]; +"2056 2933" [id=2056, type=Mul]; +"2057 2820" [id=2057, type=Mul]; +"2058 2816" [id=2058, type=Slice]; +"2059 2818" [id=2059, type=Gather]; +"2060 2821" [id=2060, type=Add]; +"2061 2893" [id=2061, type=Slice]; +"2062 2894" [id=2062, type=Unsqueeze]; +"2063 2886" [id=2063, type=Slice]; +"2064 2887" [id=2064, type=Unsqueeze]; +"2065 2836" [id=2065, type=Slice]; +"2066 2841" [id=2066, type=Slice]; +"2067 2843" [id=2067, type=Div]; +"2068 2888" [id=2068, type=Mul]; +"2069 2895" [id=2069, type=Add]; +"2070 2934" [id=2070, type=Add]; +"2071 2936" [id=2071, type=Sub]; +"2072 2944" [id=2072, type=Unsqueeze]; +"2073 2930" [id=2073, type=Mul]; +"2074 2931" [id=2074, type=Sub]; +"2075 2943" [id=2075, type=Unsqueeze]; +"2076 2927" [id=2076, type=Mul]; +"2077 2928" [id=2077, type=Sub]; +"2078 2942" [id=2078, type=Unsqueeze]; +"2079 2946" [id=2079, type=Concat]; +"2080 2948" [id=2080, type=Reshape]; +"2081 2953" [id=2081, type=Reshape]; +"2082 2971" [id=2082, type=Slice]; +"2083 2976" [id=2083, type=Slice]; +"2084 2977" [id=2084, type=Clip]; +"2085 2979" [id=2085, type=Unsqueeze]; +"2086 2960" [id=2086, type=Slice]; +"2087 2965" [id=2087, type=Slice]; +"2088 2966" [id=2088, type=Clip]; +"2089 2978" [id=2089, type=Unsqueeze]; +"2090 2980" [id=2090, type=Concat]; +"2091 2982" [id=2091, type=Reshape]; +"2092 2989" [id=2092, type=Reshape]; +"2093 6493" [id=2093, type=Slice]; +"2094 6495" [id=2094, type=Gather]; +"2095 6496" [id=2095, type=Unsqueeze]; +"2096 6501" [id=2096, type=NonMaxSuppression]; +"2097 6503" [id=2097, type=Gather]; +"2098 6504" [id=2098, type=Squeeze]; +"2099 6508" [id=2099, type=Gather]; +"2100 6434" [id=2100, type=Slice]; +"2101 6436" [id=2101, type=Gather]; +"2102 6437" [id=2102, type=Cast]; +"2103 6438" [id=2103, type=NonZero]; +"2104 6439" [id=2104, type=Transpose]; +"2105 6440" [id=2105, type=Squeeze]; +"2106 6443" [id=2106, type=Cast]; +"2107 6442" [id=2107, type=Gather]; +"2108 6444" [id=2108, type=Gather]; +"2109 6453" [id=2109, type=Unsqueeze]; +"2110 6454" [id=2110, type=Unsqueeze]; +"2111 6449" [id=2111, type=Slice]; +"2112 6451" [id=2112, type=Gather]; +"2113 6452" [id=2113, type=Unsqueeze]; +"2114 6457" [id=2114, type=NonMaxSuppression]; +"2115 6459" [id=2115, type=Gather]; +"2116 6460" [id=2116, type=Squeeze]; +"2117 6464" [id=2117, type=Gather]; +"2118 6390" [id=2118, type=Slice]; +"2119 6392" [id=2119, type=Gather]; +"2120 6393" [id=2120, type=Cast]; +"2121 6394" [id=2121, type=NonZero]; +"2122 6395" [id=2122, type=Transpose]; +"2123 6396" [id=2123, type=Squeeze]; +"2124 6399" [id=2124, type=Cast]; +"2125 6398" [id=2125, type=Gather]; +"2126 6400" [id=2126, type=Gather]; +"2127 6409" [id=2127, type=Unsqueeze]; +"2128 6410" [id=2128, type=Unsqueeze]; +"2129 6405" [id=2129, type=Slice]; +"2130 6407" [id=2130, type=Gather]; +"2131 6408" [id=2131, type=Unsqueeze]; +"2132 6413" [id=2132, type=NonMaxSuppression]; +"2133 6415" [id=2133, type=Gather]; +"2134 6416" [id=2134, type=Squeeze]; +"2135 6420" [id=2135, type=Gather]; +"2136 6346" [id=2136, type=Slice]; +"2137 6348" [id=2137, type=Gather]; +"2138 6349" [id=2138, type=Cast]; +"2139 6350" [id=2139, type=NonZero]; +"2140 6351" [id=2140, type=Transpose]; +"2141 6352" [id=2141, type=Squeeze]; +"2142 6355" [id=2142, type=Cast]; +"2143 6354" [id=2143, type=Gather]; +"2144 6356" [id=2144, type=Gather]; +"2145 6365" [id=2145, type=Unsqueeze]; +"2146 6366" [id=2146, type=Unsqueeze]; +"2147 6361" [id=2147, type=Slice]; +"2148 6363" [id=2148, type=Gather]; +"2149 6364" [id=2149, type=Unsqueeze]; +"2150 6369" [id=2150, type=NonMaxSuppression]; +"2151 6371" [id=2151, type=Gather]; +"2152 6372" [id=2152, type=Squeeze]; +"2153 6376" [id=2153, type=Gather]; +"2154 6302" [id=2154, type=Slice]; +"2155 6304" [id=2155, type=Gather]; +"2156 6305" [id=2156, type=Cast]; +"2157 6306" [id=2157, type=NonZero]; +"2158 6307" [id=2158, type=Transpose]; +"2159 6308" [id=2159, type=Squeeze]; +"2160 6311" [id=2160, type=Cast]; +"2161 6310" [id=2161, type=Gather]; +"2162 6312" [id=2162, type=Gather]; +"2163 6321" [id=2163, type=Unsqueeze]; +"2164 6322" [id=2164, type=Unsqueeze]; +"2165 6317" [id=2165, type=Slice]; +"2166 6319" [id=2166, type=Gather]; +"2167 6320" [id=2167, type=Unsqueeze]; +"2168 6325" [id=2168, type=NonMaxSuppression]; +"2169 6327" [id=2169, type=Gather]; +"2170 6328" [id=2170, type=Squeeze]; +"2171 6332" [id=2171, type=Gather]; +"2172 6258" [id=2172, type=Slice]; +"2173 6260" [id=2173, type=Gather]; +"2174 6261" [id=2174, type=Cast]; +"2175 6262" [id=2175, type=NonZero]; +"2176 6263" [id=2176, type=Transpose]; +"2177 6264" [id=2177, type=Squeeze]; +"2178 6267" [id=2178, type=Cast]; +"2179 6266" [id=2179, type=Gather]; +"2180 6268" [id=2180, type=Gather]; +"2181 6277" [id=2181, type=Unsqueeze]; +"2182 6278" [id=2182, type=Unsqueeze]; +"2183 6273" [id=2183, type=Slice]; +"2184 6275" [id=2184, type=Gather]; +"2185 6276" [id=2185, type=Unsqueeze]; +"2186 6281" [id=2186, type=NonMaxSuppression]; +"2187 6283" [id=2187, type=Gather]; +"2188 6284" [id=2188, type=Squeeze]; +"2189 6288" [id=2189, type=Gather]; +"2190 6214" [id=2190, type=Slice]; +"2191 6216" [id=2191, type=Gather]; +"2192 6217" [id=2192, type=Cast]; +"2193 6218" [id=2193, type=NonZero]; +"2194 6219" [id=2194, type=Transpose]; +"2195 6220" [id=2195, type=Squeeze]; +"2196 6223" [id=2196, type=Cast]; +"2197 6222" [id=2197, type=Gather]; +"2198 6224" [id=2198, type=Gather]; +"2199 6233" [id=2199, type=Unsqueeze]; +"2200 6234" [id=2200, type=Unsqueeze]; +"2201 6229" [id=2201, type=Slice]; +"2202 6231" [id=2202, type=Gather]; +"2203 6232" [id=2203, type=Unsqueeze]; +"2204 6237" [id=2204, type=NonMaxSuppression]; +"2205 6239" [id=2205, type=Gather]; +"2206 6240" [id=2206, type=Squeeze]; +"2207 6244" [id=2207, type=Gather]; +"2208 6170" [id=2208, type=Slice]; +"2209 6172" [id=2209, type=Gather]; +"2210 6173" [id=2210, type=Cast]; +"2211 6174" [id=2211, type=NonZero]; +"2212 6175" [id=2212, type=Transpose]; +"2213 6176" [id=2213, type=Squeeze]; +"2214 6179" [id=2214, type=Cast]; +"2215 6178" [id=2215, type=Gather]; +"2216 6180" [id=2216, type=Gather]; +"2217 6189" [id=2217, type=Unsqueeze]; +"2218 6190" [id=2218, type=Unsqueeze]; +"2219 6185" [id=2219, type=Slice]; +"2220 6187" [id=2220, type=Gather]; +"2221 6188" [id=2221, type=Unsqueeze]; +"2222 6193" [id=2222, type=NonMaxSuppression]; +"2223 6195" [id=2223, type=Gather]; +"2224 6196" [id=2224, type=Squeeze]; +"2225 6200" [id=2225, type=Gather]; +"2226 6126" [id=2226, type=Slice]; +"2227 6128" [id=2227, type=Gather]; +"2228 6129" [id=2228, type=Cast]; +"2229 6130" [id=2229, type=NonZero]; +"2230 6131" [id=2230, type=Transpose]; +"2231 6132" [id=2231, type=Squeeze]; +"2232 6135" [id=2232, type=Cast]; +"2233 6134" [id=2233, type=Gather]; +"2234 6136" [id=2234, type=Gather]; +"2235 6145" [id=2235, type=Unsqueeze]; +"2236 6146" [id=2236, type=Unsqueeze]; +"2237 6141" [id=2237, type=Slice]; +"2238 6143" [id=2238, type=Gather]; +"2239 6144" [id=2239, type=Unsqueeze]; +"2240 6149" [id=2240, type=NonMaxSuppression]; +"2241 6151" [id=2241, type=Gather]; +"2242 6152" [id=2242, type=Squeeze]; +"2243 6156" [id=2243, type=Gather]; +"2244 6082" [id=2244, type=Slice]; +"2245 6084" [id=2245, type=Gather]; +"2246 6085" [id=2246, type=Cast]; +"2247 6086" [id=2247, type=NonZero]; +"2248 6087" [id=2248, type=Transpose]; +"2249 6088" [id=2249, type=Squeeze]; +"2250 6091" [id=2250, type=Cast]; +"2251 6090" [id=2251, type=Gather]; +"2252 6092" [id=2252, type=Gather]; +"2253 6101" [id=2253, type=Unsqueeze]; +"2254 6102" [id=2254, type=Unsqueeze]; +"2255 6097" [id=2255, type=Slice]; +"2256 6099" [id=2256, type=Gather]; +"2257 6100" [id=2257, type=Unsqueeze]; +"2258 6105" [id=2258, type=NonMaxSuppression]; +"2259 6107" [id=2259, type=Gather]; +"2260 6108" [id=2260, type=Squeeze]; +"2261 6112" [id=2261, type=Gather]; +"2262 6038" [id=2262, type=Slice]; +"2263 6040" [id=2263, type=Gather]; +"2264 6041" [id=2264, type=Cast]; +"2265 6042" [id=2265, type=NonZero]; +"2266 6043" [id=2266, type=Transpose]; +"2267 6044" [id=2267, type=Squeeze]; +"2268 6047" [id=2268, type=Cast]; +"2269 6046" [id=2269, type=Gather]; +"2270 6048" [id=2270, type=Gather]; +"2271 6057" [id=2271, type=Unsqueeze]; +"2272 6058" [id=2272, type=Unsqueeze]; +"2273 6053" [id=2273, type=Slice]; +"2274 6055" [id=2274, type=Gather]; +"2275 6056" [id=2275, type=Unsqueeze]; +"2276 6061" [id=2276, type=NonMaxSuppression]; +"2277 6063" [id=2277, type=Gather]; +"2278 6064" [id=2278, type=Squeeze]; +"2279 6068" [id=2279, type=Gather]; +"2280 5994" [id=2280, type=Slice]; +"2281 5996" [id=2281, type=Gather]; +"2282 5997" [id=2282, type=Cast]; +"2283 5998" [id=2283, type=NonZero]; +"2284 5999" [id=2284, type=Transpose]; +"2285 6000" [id=2285, type=Squeeze]; +"2286 6003" [id=2286, type=Cast]; +"2287 6002" [id=2287, type=Gather]; +"2288 6004" [id=2288, type=Gather]; +"2289 6013" [id=2289, type=Unsqueeze]; +"2290 6014" [id=2290, type=Unsqueeze]; +"2291 6009" [id=2291, type=Slice]; +"2292 6011" [id=2292, type=Gather]; +"2293 6012" [id=2293, type=Unsqueeze]; +"2294 6017" [id=2294, type=NonMaxSuppression]; +"2295 6019" [id=2295, type=Gather]; +"2296 6020" [id=2296, type=Squeeze]; +"2297 6024" [id=2297, type=Gather]; +"2298 5950" [id=2298, type=Slice]; +"2299 5952" [id=2299, type=Gather]; +"2300 5953" [id=2300, type=Cast]; +"2301 5954" [id=2301, type=NonZero]; +"2302 5955" [id=2302, type=Transpose]; +"2303 5956" [id=2303, type=Squeeze]; +"2304 5959" [id=2304, type=Cast]; +"2305 5958" [id=2305, type=Gather]; +"2306 5960" [id=2306, type=Gather]; +"2307 5969" [id=2307, type=Unsqueeze]; +"2308 5970" [id=2308, type=Unsqueeze]; +"2309 5965" [id=2309, type=Slice]; +"2310 5967" [id=2310, type=Gather]; +"2311 5968" [id=2311, type=Unsqueeze]; +"2312 5973" [id=2312, type=NonMaxSuppression]; +"2313 5975" [id=2313, type=Gather]; +"2314 5976" [id=2314, type=Squeeze]; +"2315 5980" [id=2315, type=Gather]; +"2316 5906" [id=2316, type=Slice]; +"2317 5908" [id=2317, type=Gather]; +"2318 5909" [id=2318, type=Cast]; +"2319 5910" [id=2319, type=NonZero]; +"2320 5911" [id=2320, type=Transpose]; +"2321 5912" [id=2321, type=Squeeze]; +"2322 5915" [id=2322, type=Cast]; +"2323 5914" [id=2323, type=Gather]; +"2324 5916" [id=2324, type=Gather]; +"2325 5925" [id=2325, type=Unsqueeze]; +"2326 5926" [id=2326, type=Unsqueeze]; +"2327 5921" [id=2327, type=Slice]; +"2328 5923" [id=2328, type=Gather]; +"2329 5924" [id=2329, type=Unsqueeze]; +"2330 5929" [id=2330, type=NonMaxSuppression]; +"2331 5931" [id=2331, type=Gather]; +"2332 5932" [id=2332, type=Squeeze]; +"2333 5936" [id=2333, type=Gather]; +"2334 5862" [id=2334, type=Slice]; +"2335 5864" [id=2335, type=Gather]; +"2336 5865" [id=2336, type=Cast]; +"2337 5866" [id=2337, type=NonZero]; +"2338 5867" [id=2338, type=Transpose]; +"2339 5868" [id=2339, type=Squeeze]; +"2340 5871" [id=2340, type=Cast]; +"2341 5870" [id=2341, type=Gather]; +"2342 5872" [id=2342, type=Gather]; +"2343 5881" [id=2343, type=Unsqueeze]; +"2344 5882" [id=2344, type=Unsqueeze]; +"2345 5877" [id=2345, type=Slice]; +"2346 5879" [id=2346, type=Gather]; +"2347 5880" [id=2347, type=Unsqueeze]; +"2348 5885" [id=2348, type=NonMaxSuppression]; +"2349 5887" [id=2349, type=Gather]; +"2350 5888" [id=2350, type=Squeeze]; +"2351 5892" [id=2351, type=Gather]; +"2352 5818" [id=2352, type=Slice]; +"2353 5820" [id=2353, type=Gather]; +"2354 5821" [id=2354, type=Cast]; +"2355 5822" [id=2355, type=NonZero]; +"2356 5823" [id=2356, type=Transpose]; +"2357 5824" [id=2357, type=Squeeze]; +"2358 5827" [id=2358, type=Cast]; +"2359 5826" [id=2359, type=Gather]; +"2360 5828" [id=2360, type=Gather]; +"2361 5837" [id=2361, type=Unsqueeze]; +"2362 5838" [id=2362, type=Unsqueeze]; +"2363 5833" [id=2363, type=Slice]; +"2364 5835" [id=2364, type=Gather]; +"2365 5836" [id=2365, type=Unsqueeze]; +"2366 5841" [id=2366, type=NonMaxSuppression]; +"2367 5843" [id=2367, type=Gather]; +"2368 5844" [id=2368, type=Squeeze]; +"2369 5848" [id=2369, type=Gather]; +"2370 5774" [id=2370, type=Slice]; +"2371 5776" [id=2371, type=Gather]; +"2372 5777" [id=2372, type=Cast]; +"2373 5778" [id=2373, type=NonZero]; +"2374 5779" [id=2374, type=Transpose]; +"2375 5780" [id=2375, type=Squeeze]; +"2376 5783" [id=2376, type=Cast]; +"2377 5782" [id=2377, type=Gather]; +"2378 5784" [id=2378, type=Gather]; +"2379 5793" [id=2379, type=Unsqueeze]; +"2380 5794" [id=2380, type=Unsqueeze]; +"2381 5789" [id=2381, type=Slice]; +"2382 5791" [id=2382, type=Gather]; +"2383 5792" [id=2383, type=Unsqueeze]; +"2384 5797" [id=2384, type=NonMaxSuppression]; +"2385 5799" [id=2385, type=Gather]; +"2386 5800" [id=2386, type=Squeeze]; +"2387 5804" [id=2387, type=Gather]; +"2388 5730" [id=2388, type=Slice]; +"2389 5732" [id=2389, type=Gather]; +"2390 5733" [id=2390, type=Cast]; +"2391 5734" [id=2391, type=NonZero]; +"2392 5735" [id=2392, type=Transpose]; +"2393 5736" [id=2393, type=Squeeze]; +"2394 5739" [id=2394, type=Cast]; +"2395 5738" [id=2395, type=Gather]; +"2396 5740" [id=2396, type=Gather]; +"2397 5749" [id=2397, type=Unsqueeze]; +"2398 5750" [id=2398, type=Unsqueeze]; +"2399 5745" [id=2399, type=Slice]; +"2400 5747" [id=2400, type=Gather]; +"2401 5748" [id=2401, type=Unsqueeze]; +"2402 5753" [id=2402, type=NonMaxSuppression]; +"2403 5755" [id=2403, type=Gather]; +"2404 5756" [id=2404, type=Squeeze]; +"2405 5760" [id=2405, type=Gather]; +"2406 5686" [id=2406, type=Slice]; +"2407 5688" [id=2407, type=Gather]; +"2408 5689" [id=2408, type=Cast]; +"2409 5690" [id=2409, type=NonZero]; +"2410 5691" [id=2410, type=Transpose]; +"2411 5692" [id=2411, type=Squeeze]; +"2412 5695" [id=2412, type=Cast]; +"2413 5694" [id=2413, type=Gather]; +"2414 5696" [id=2414, type=Gather]; +"2415 5705" [id=2415, type=Unsqueeze]; +"2416 5706" [id=2416, type=Unsqueeze]; +"2417 5701" [id=2417, type=Slice]; +"2418 5703" [id=2418, type=Gather]; +"2419 5704" [id=2419, type=Unsqueeze]; +"2420 5709" [id=2420, type=NonMaxSuppression]; +"2421 5711" [id=2421, type=Gather]; +"2422 5712" [id=2422, type=Squeeze]; +"2423 5716" [id=2423, type=Gather]; +"2424 5642" [id=2424, type=Slice]; +"2425 5644" [id=2425, type=Gather]; +"2426 5645" [id=2426, type=Cast]; +"2427 5646" [id=2427, type=NonZero]; +"2428 5647" [id=2428, type=Transpose]; +"2429 5648" [id=2429, type=Squeeze]; +"2430 5651" [id=2430, type=Cast]; +"2431 5650" [id=2431, type=Gather]; +"2432 5652" [id=2432, type=Gather]; +"2433 5661" [id=2433, type=Unsqueeze]; +"2434 5662" [id=2434, type=Unsqueeze]; +"2435 5657" [id=2435, type=Slice]; +"2436 5659" [id=2436, type=Gather]; +"2437 5660" [id=2437, type=Unsqueeze]; +"2438 5665" [id=2438, type=NonMaxSuppression]; +"2439 5667" [id=2439, type=Gather]; +"2440 5668" [id=2440, type=Squeeze]; +"2441 5672" [id=2441, type=Gather]; +"2442 5598" [id=2442, type=Slice]; +"2443 5600" [id=2443, type=Gather]; +"2444 5601" [id=2444, type=Cast]; +"2445 5602" [id=2445, type=NonZero]; +"2446 5603" [id=2446, type=Transpose]; +"2447 5604" [id=2447, type=Squeeze]; +"2448 5607" [id=2448, type=Cast]; +"2449 5606" [id=2449, type=Gather]; +"2450 5608" [id=2450, type=Gather]; +"2451 5617" [id=2451, type=Unsqueeze]; +"2452 5618" [id=2452, type=Unsqueeze]; +"2453 5613" [id=2453, type=Slice]; +"2454 5615" [id=2454, type=Gather]; +"2455 5616" [id=2455, type=Unsqueeze]; +"2456 5621" [id=2456, type=NonMaxSuppression]; +"2457 5623" [id=2457, type=Gather]; +"2458 5624" [id=2458, type=Squeeze]; +"2459 5628" [id=2459, type=Gather]; +"2460 5554" [id=2460, type=Slice]; +"2461 5556" [id=2461, type=Gather]; +"2462 5557" [id=2462, type=Cast]; +"2463 5558" [id=2463, type=NonZero]; +"2464 5559" [id=2464, type=Transpose]; +"2465 5560" [id=2465, type=Squeeze]; +"2466 5563" [id=2466, type=Cast]; +"2467 5562" [id=2467, type=Gather]; +"2468 5564" [id=2468, type=Gather]; +"2469 5573" [id=2469, type=Unsqueeze]; +"2470 5574" [id=2470, type=Unsqueeze]; +"2471 5569" [id=2471, type=Slice]; +"2472 5571" [id=2472, type=Gather]; +"2473 5572" [id=2473, type=Unsqueeze]; +"2474 5577" [id=2474, type=NonMaxSuppression]; +"2475 5579" [id=2475, type=Gather]; +"2476 5580" [id=2476, type=Squeeze]; +"2477 5584" [id=2477, type=Gather]; +"2478 5510" [id=2478, type=Slice]; +"2479 5512" [id=2479, type=Gather]; +"2480 5513" [id=2480, type=Cast]; +"2481 5514" [id=2481, type=NonZero]; +"2482 5515" [id=2482, type=Transpose]; +"2483 5516" [id=2483, type=Squeeze]; +"2484 5519" [id=2484, type=Cast]; +"2485 5518" [id=2485, type=Gather]; +"2486 5520" [id=2486, type=Gather]; +"2487 5529" [id=2487, type=Unsqueeze]; +"2488 5530" [id=2488, type=Unsqueeze]; +"2489 5525" [id=2489, type=Slice]; +"2490 5527" [id=2490, type=Gather]; +"2491 5528" [id=2491, type=Unsqueeze]; +"2492 5533" [id=2492, type=NonMaxSuppression]; +"2493 5535" [id=2493, type=Gather]; +"2494 5536" [id=2494, type=Squeeze]; +"2495 5540" [id=2495, type=Gather]; +"2496 5466" [id=2496, type=Slice]; +"2497 5468" [id=2497, type=Gather]; +"2498 5469" [id=2498, type=Cast]; +"2499 5470" [id=2499, type=NonZero]; +"2500 5471" [id=2500, type=Transpose]; +"2501 5472" [id=2501, type=Squeeze]; +"2502 5475" [id=2502, type=Cast]; +"2503 5474" [id=2503, type=Gather]; +"2504 5476" [id=2504, type=Gather]; +"2505 5485" [id=2505, type=Unsqueeze]; +"2506 5486" [id=2506, type=Unsqueeze]; +"2507 5481" [id=2507, type=Slice]; +"2508 5483" [id=2508, type=Gather]; +"2509 5484" [id=2509, type=Unsqueeze]; +"2510 5489" [id=2510, type=NonMaxSuppression]; +"2511 5491" [id=2511, type=Gather]; +"2512 5492" [id=2512, type=Squeeze]; +"2513 5496" [id=2513, type=Gather]; +"2514 5422" [id=2514, type=Slice]; +"2515 5424" [id=2515, type=Gather]; +"2516 5425" [id=2516, type=Cast]; +"2517 5426" [id=2517, type=NonZero]; +"2518 5427" [id=2518, type=Transpose]; +"2519 5428" [id=2519, type=Squeeze]; +"2520 5431" [id=2520, type=Cast]; +"2521 5430" [id=2521, type=Gather]; +"2522 5432" [id=2522, type=Gather]; +"2523 5441" [id=2523, type=Unsqueeze]; +"2524 5442" [id=2524, type=Unsqueeze]; +"2525 5437" [id=2525, type=Slice]; +"2526 5439" [id=2526, type=Gather]; +"2527 5440" [id=2527, type=Unsqueeze]; +"2528 5445" [id=2528, type=NonMaxSuppression]; +"2529 5447" [id=2529, type=Gather]; +"2530 5448" [id=2530, type=Squeeze]; +"2531 5452" [id=2531, type=Gather]; +"2532 5378" [id=2532, type=Slice]; +"2533 5380" [id=2533, type=Gather]; +"2534 5381" [id=2534, type=Cast]; +"2535 5382" [id=2535, type=NonZero]; +"2536 5383" [id=2536, type=Transpose]; +"2537 5384" [id=2537, type=Squeeze]; +"2538 5387" [id=2538, type=Cast]; +"2539 5386" [id=2539, type=Gather]; +"2540 5388" [id=2540, type=Gather]; +"2541 5397" [id=2541, type=Unsqueeze]; +"2542 5398" [id=2542, type=Unsqueeze]; +"2543 5393" [id=2543, type=Slice]; +"2544 5395" [id=2544, type=Gather]; +"2545 5396" [id=2545, type=Unsqueeze]; +"2546 5401" [id=2546, type=NonMaxSuppression]; +"2547 5403" [id=2547, type=Gather]; +"2548 5404" [id=2548, type=Squeeze]; +"2549 5408" [id=2549, type=Gather]; +"2550 5334" [id=2550, type=Slice]; +"2551 5336" [id=2551, type=Gather]; +"2552 5337" [id=2552, type=Cast]; +"2553 5338" [id=2553, type=NonZero]; +"2554 5339" [id=2554, type=Transpose]; +"2555 5340" [id=2555, type=Squeeze]; +"2556 5343" [id=2556, type=Cast]; +"2557 5342" [id=2557, type=Gather]; +"2558 5344" [id=2558, type=Gather]; +"2559 5353" [id=2559, type=Unsqueeze]; +"2560 5354" [id=2560, type=Unsqueeze]; +"2561 5349" [id=2561, type=Slice]; +"2562 5351" [id=2562, type=Gather]; +"2563 5352" [id=2563, type=Unsqueeze]; +"2564 5357" [id=2564, type=NonMaxSuppression]; +"2565 5359" [id=2565, type=Gather]; +"2566 5360" [id=2566, type=Squeeze]; +"2567 5364" [id=2567, type=Gather]; +"2568 5290" [id=2568, type=Slice]; +"2569 5292" [id=2569, type=Gather]; +"2570 5293" [id=2570, type=Cast]; +"2571 5294" [id=2571, type=NonZero]; +"2572 5295" [id=2572, type=Transpose]; +"2573 5296" [id=2573, type=Squeeze]; +"2574 5299" [id=2574, type=Cast]; +"2575 5298" [id=2575, type=Gather]; +"2576 5300" [id=2576, type=Gather]; +"2577 5309" [id=2577, type=Unsqueeze]; +"2578 5310" [id=2578, type=Unsqueeze]; +"2579 5305" [id=2579, type=Slice]; +"2580 5307" [id=2580, type=Gather]; +"2581 5308" [id=2581, type=Unsqueeze]; +"2582 5313" [id=2582, type=NonMaxSuppression]; +"2583 5315" [id=2583, type=Gather]; +"2584 5316" [id=2584, type=Squeeze]; +"2585 5320" [id=2585, type=Gather]; +"2586 5246" [id=2586, type=Slice]; +"2587 5248" [id=2587, type=Gather]; +"2588 5249" [id=2588, type=Cast]; +"2589 5250" [id=2589, type=NonZero]; +"2590 5251" [id=2590, type=Transpose]; +"2591 5252" [id=2591, type=Squeeze]; +"2592 5255" [id=2592, type=Cast]; +"2593 5254" [id=2593, type=Gather]; +"2594 5256" [id=2594, type=Gather]; +"2595 5265" [id=2595, type=Unsqueeze]; +"2596 5266" [id=2596, type=Unsqueeze]; +"2597 5261" [id=2597, type=Slice]; +"2598 5263" [id=2598, type=Gather]; +"2599 5264" [id=2599, type=Unsqueeze]; +"2600 5269" [id=2600, type=NonMaxSuppression]; +"2601 5271" [id=2601, type=Gather]; +"2602 5272" [id=2602, type=Squeeze]; +"2603 5276" [id=2603, type=Gather]; +"2604 5202" [id=2604, type=Slice]; +"2605 5204" [id=2605, type=Gather]; +"2606 5205" [id=2606, type=Cast]; +"2607 5206" [id=2607, type=NonZero]; +"2608 5207" [id=2608, type=Transpose]; +"2609 5208" [id=2609, type=Squeeze]; +"2610 5211" [id=2610, type=Cast]; +"2611 5210" [id=2611, type=Gather]; +"2612 5212" [id=2612, type=Gather]; +"2613 5221" [id=2613, type=Unsqueeze]; +"2614 5222" [id=2614, type=Unsqueeze]; +"2615 5217" [id=2615, type=Slice]; +"2616 5219" [id=2616, type=Gather]; +"2617 5220" [id=2617, type=Unsqueeze]; +"2618 5225" [id=2618, type=NonMaxSuppression]; +"2619 5227" [id=2619, type=Gather]; +"2620 5228" [id=2620, type=Squeeze]; +"2621 5232" [id=2621, type=Gather]; +"2622 5158" [id=2622, type=Slice]; +"2623 5160" [id=2623, type=Gather]; +"2624 5161" [id=2624, type=Cast]; +"2625 5162" [id=2625, type=NonZero]; +"2626 5163" [id=2626, type=Transpose]; +"2627 5164" [id=2627, type=Squeeze]; +"2628 5167" [id=2628, type=Cast]; +"2629 5166" [id=2629, type=Gather]; +"2630 5168" [id=2630, type=Gather]; +"2631 5177" [id=2631, type=Unsqueeze]; +"2632 5178" [id=2632, type=Unsqueeze]; +"2633 5173" [id=2633, type=Slice]; +"2634 5175" [id=2634, type=Gather]; +"2635 5176" [id=2635, type=Unsqueeze]; +"2636 5181" [id=2636, type=NonMaxSuppression]; +"2637 5183" [id=2637, type=Gather]; +"2638 5184" [id=2638, type=Squeeze]; +"2639 5188" [id=2639, type=Gather]; +"2640 5114" [id=2640, type=Slice]; +"2641 5116" [id=2641, type=Gather]; +"2642 5117" [id=2642, type=Cast]; +"2643 5118" [id=2643, type=NonZero]; +"2644 5119" [id=2644, type=Transpose]; +"2645 5120" [id=2645, type=Squeeze]; +"2646 5123" [id=2646, type=Cast]; +"2647 5122" [id=2647, type=Gather]; +"2648 5124" [id=2648, type=Gather]; +"2649 5133" [id=2649, type=Unsqueeze]; +"2650 5134" [id=2650, type=Unsqueeze]; +"2651 5129" [id=2651, type=Slice]; +"2652 5131" [id=2652, type=Gather]; +"2653 5132" [id=2653, type=Unsqueeze]; +"2654 5137" [id=2654, type=NonMaxSuppression]; +"2655 5139" [id=2655, type=Gather]; +"2656 5140" [id=2656, type=Squeeze]; +"2657 5144" [id=2657, type=Gather]; +"2658 5070" [id=2658, type=Slice]; +"2659 5072" [id=2659, type=Gather]; +"2660 5073" [id=2660, type=Cast]; +"2661 5074" [id=2661, type=NonZero]; +"2662 5075" [id=2662, type=Transpose]; +"2663 5076" [id=2663, type=Squeeze]; +"2664 5079" [id=2664, type=Cast]; +"2665 5078" [id=2665, type=Gather]; +"2666 5080" [id=2666, type=Gather]; +"2667 5089" [id=2667, type=Unsqueeze]; +"2668 5090" [id=2668, type=Unsqueeze]; +"2669 5085" [id=2669, type=Slice]; +"2670 5087" [id=2670, type=Gather]; +"2671 5088" [id=2671, type=Unsqueeze]; +"2672 5093" [id=2672, type=NonMaxSuppression]; +"2673 5095" [id=2673, type=Gather]; +"2674 5096" [id=2674, type=Squeeze]; +"2675 5100" [id=2675, type=Gather]; +"2676 5026" [id=2676, type=Slice]; +"2677 5028" [id=2677, type=Gather]; +"2678 5029" [id=2678, type=Cast]; +"2679 5030" [id=2679, type=NonZero]; +"2680 5031" [id=2680, type=Transpose]; +"2681 5032" [id=2681, type=Squeeze]; +"2682 5035" [id=2682, type=Cast]; +"2683 5034" [id=2683, type=Gather]; +"2684 5036" [id=2684, type=Gather]; +"2685 5045" [id=2685, type=Unsqueeze]; +"2686 5046" [id=2686, type=Unsqueeze]; +"2687 5041" [id=2687, type=Slice]; +"2688 5043" [id=2688, type=Gather]; +"2689 5044" [id=2689, type=Unsqueeze]; +"2690 5049" [id=2690, type=NonMaxSuppression]; +"2691 5051" [id=2691, type=Gather]; +"2692 5052" [id=2692, type=Squeeze]; +"2693 5056" [id=2693, type=Gather]; +"2694 4982" [id=2694, type=Slice]; +"2695 4984" [id=2695, type=Gather]; +"2696 4985" [id=2696, type=Cast]; +"2697 4986" [id=2697, type=NonZero]; +"2698 4987" [id=2698, type=Transpose]; +"2699 4988" [id=2699, type=Squeeze]; +"2700 4991" [id=2700, type=Cast]; +"2701 4990" [id=2701, type=Gather]; +"2702 4992" [id=2702, type=Gather]; +"2703 5001" [id=2703, type=Unsqueeze]; +"2704 5002" [id=2704, type=Unsqueeze]; +"2705 4997" [id=2705, type=Slice]; +"2706 4999" [id=2706, type=Gather]; +"2707 5000" [id=2707, type=Unsqueeze]; +"2708 5005" [id=2708, type=NonMaxSuppression]; +"2709 5007" [id=2709, type=Gather]; +"2710 5008" [id=2710, type=Squeeze]; +"2711 5012" [id=2711, type=Gather]; +"2712 4938" [id=2712, type=Slice]; +"2713 4940" [id=2713, type=Gather]; +"2714 4941" [id=2714, type=Cast]; +"2715 4942" [id=2715, type=NonZero]; +"2716 4943" [id=2716, type=Transpose]; +"2717 4944" [id=2717, type=Squeeze]; +"2718 4947" [id=2718, type=Cast]; +"2719 4946" [id=2719, type=Gather]; +"2720 4948" [id=2720, type=Gather]; +"2721 4957" [id=2721, type=Unsqueeze]; +"2722 4958" [id=2722, type=Unsqueeze]; +"2723 4953" [id=2723, type=Slice]; +"2724 4955" [id=2724, type=Gather]; +"2725 4956" [id=2725, type=Unsqueeze]; +"2726 4961" [id=2726, type=NonMaxSuppression]; +"2727 4963" [id=2727, type=Gather]; +"2728 4964" [id=2728, type=Squeeze]; +"2729 4968" [id=2729, type=Gather]; +"2730 4894" [id=2730, type=Slice]; +"2731 4896" [id=2731, type=Gather]; +"2732 4897" [id=2732, type=Cast]; +"2733 4898" [id=2733, type=NonZero]; +"2734 4899" [id=2734, type=Transpose]; +"2735 4900" [id=2735, type=Squeeze]; +"2736 4903" [id=2736, type=Cast]; +"2737 4902" [id=2737, type=Gather]; +"2738 4904" [id=2738, type=Gather]; +"2739 4913" [id=2739, type=Unsqueeze]; +"2740 4914" [id=2740, type=Unsqueeze]; +"2741 4909" [id=2741, type=Slice]; +"2742 4911" [id=2742, type=Gather]; +"2743 4912" [id=2743, type=Unsqueeze]; +"2744 4917" [id=2744, type=NonMaxSuppression]; +"2745 4919" [id=2745, type=Gather]; +"2746 4920" [id=2746, type=Squeeze]; +"2747 4924" [id=2747, type=Gather]; +"2748 4850" [id=2748, type=Slice]; +"2749 4852" [id=2749, type=Gather]; +"2750 4853" [id=2750, type=Cast]; +"2751 4854" [id=2751, type=NonZero]; +"2752 4855" [id=2752, type=Transpose]; +"2753 4856" [id=2753, type=Squeeze]; +"2754 4859" [id=2754, type=Cast]; +"2755 4858" [id=2755, type=Gather]; +"2756 4860" [id=2756, type=Gather]; +"2757 4869" [id=2757, type=Unsqueeze]; +"2758 4870" [id=2758, type=Unsqueeze]; +"2759 4865" [id=2759, type=Slice]; +"2760 4867" [id=2760, type=Gather]; +"2761 4868" [id=2761, type=Unsqueeze]; +"2762 4873" [id=2762, type=NonMaxSuppression]; +"2763 4875" [id=2763, type=Gather]; +"2764 4876" [id=2764, type=Squeeze]; +"2765 4880" [id=2765, type=Gather]; +"2766 4806" [id=2766, type=Slice]; +"2767 4808" [id=2767, type=Gather]; +"2768 4809" [id=2768, type=Cast]; +"2769 4810" [id=2769, type=NonZero]; +"2770 4811" [id=2770, type=Transpose]; +"2771 4812" [id=2771, type=Squeeze]; +"2772 4815" [id=2772, type=Cast]; +"2773 4814" [id=2773, type=Gather]; +"2774 4816" [id=2774, type=Gather]; +"2775 4825" [id=2775, type=Unsqueeze]; +"2776 4826" [id=2776, type=Unsqueeze]; +"2777 4821" [id=2777, type=Slice]; +"2778 4823" [id=2778, type=Gather]; +"2779 4824" [id=2779, type=Unsqueeze]; +"2780 4829" [id=2780, type=NonMaxSuppression]; +"2781 4831" [id=2781, type=Gather]; +"2782 4832" [id=2782, type=Squeeze]; +"2783 4836" [id=2783, type=Gather]; +"2784 4762" [id=2784, type=Slice]; +"2785 4764" [id=2785, type=Gather]; +"2786 4765" [id=2786, type=Cast]; +"2787 4766" [id=2787, type=NonZero]; +"2788 4767" [id=2788, type=Transpose]; +"2789 4768" [id=2789, type=Squeeze]; +"2790 4771" [id=2790, type=Cast]; +"2791 4770" [id=2791, type=Gather]; +"2792 4772" [id=2792, type=Gather]; +"2793 4781" [id=2793, type=Unsqueeze]; +"2794 4782" [id=2794, type=Unsqueeze]; +"2795 4777" [id=2795, type=Slice]; +"2796 4779" [id=2796, type=Gather]; +"2797 4780" [id=2797, type=Unsqueeze]; +"2798 4785" [id=2798, type=NonMaxSuppression]; +"2799 4787" [id=2799, type=Gather]; +"2800 4788" [id=2800, type=Squeeze]; +"2801 4792" [id=2801, type=Gather]; +"2802 4718" [id=2802, type=Slice]; +"2803 4720" [id=2803, type=Gather]; +"2804 4721" [id=2804, type=Cast]; +"2805 4722" [id=2805, type=NonZero]; +"2806 4723" [id=2806, type=Transpose]; +"2807 4724" [id=2807, type=Squeeze]; +"2808 4727" [id=2808, type=Cast]; +"2809 4726" [id=2809, type=Gather]; +"2810 4728" [id=2810, type=Gather]; +"2811 4737" [id=2811, type=Unsqueeze]; +"2812 4738" [id=2812, type=Unsqueeze]; +"2813 4733" [id=2813, type=Slice]; +"2814 4735" [id=2814, type=Gather]; +"2815 4736" [id=2815, type=Unsqueeze]; +"2816 4741" [id=2816, type=NonMaxSuppression]; +"2817 4743" [id=2817, type=Gather]; +"2818 4744" [id=2818, type=Squeeze]; +"2819 4748" [id=2819, type=Gather]; +"2820 4674" [id=2820, type=Slice]; +"2821 4676" [id=2821, type=Gather]; +"2822 4677" [id=2822, type=Cast]; +"2823 4678" [id=2823, type=NonZero]; +"2824 4679" [id=2824, type=Transpose]; +"2825 4680" [id=2825, type=Squeeze]; +"2826 4683" [id=2826, type=Cast]; +"2827 4682" [id=2827, type=Gather]; +"2828 4684" [id=2828, type=Gather]; +"2829 4693" [id=2829, type=Unsqueeze]; +"2830 4694" [id=2830, type=Unsqueeze]; +"2831 4689" [id=2831, type=Slice]; +"2832 4691" [id=2832, type=Gather]; +"2833 4692" [id=2833, type=Unsqueeze]; +"2834 4697" [id=2834, type=NonMaxSuppression]; +"2835 4699" [id=2835, type=Gather]; +"2836 4700" [id=2836, type=Squeeze]; +"2837 4704" [id=2837, type=Gather]; +"2838 4630" [id=2838, type=Slice]; +"2839 4632" [id=2839, type=Gather]; +"2840 4633" [id=2840, type=Cast]; +"2841 4634" [id=2841, type=NonZero]; +"2842 4635" [id=2842, type=Transpose]; +"2843 4636" [id=2843, type=Squeeze]; +"2844 4639" [id=2844, type=Cast]; +"2845 4638" [id=2845, type=Gather]; +"2846 4640" [id=2846, type=Gather]; +"2847 4649" [id=2847, type=Unsqueeze]; +"2848 4650" [id=2848, type=Unsqueeze]; +"2849 4645" [id=2849, type=Slice]; +"2850 4647" [id=2850, type=Gather]; +"2851 4648" [id=2851, type=Unsqueeze]; +"2852 4653" [id=2852, type=NonMaxSuppression]; +"2853 4655" [id=2853, type=Gather]; +"2854 4656" [id=2854, type=Squeeze]; +"2855 4660" [id=2855, type=Gather]; +"2856 4586" [id=2856, type=Slice]; +"2857 4588" [id=2857, type=Gather]; +"2858 4589" [id=2858, type=Cast]; +"2859 4590" [id=2859, type=NonZero]; +"2860 4591" [id=2860, type=Transpose]; +"2861 4592" [id=2861, type=Squeeze]; +"2862 4595" [id=2862, type=Cast]; +"2863 4594" [id=2863, type=Gather]; +"2864 4596" [id=2864, type=Gather]; +"2865 4605" [id=2865, type=Unsqueeze]; +"2866 4606" [id=2866, type=Unsqueeze]; +"2867 4601" [id=2867, type=Slice]; +"2868 4603" [id=2868, type=Gather]; +"2869 4604" [id=2869, type=Unsqueeze]; +"2870 4609" [id=2870, type=NonMaxSuppression]; +"2871 4611" [id=2871, type=Gather]; +"2872 4612" [id=2872, type=Squeeze]; +"2873 4616" [id=2873, type=Gather]; +"2874 4542" [id=2874, type=Slice]; +"2875 4544" [id=2875, type=Gather]; +"2876 4545" [id=2876, type=Cast]; +"2877 4546" [id=2877, type=NonZero]; +"2878 4547" [id=2878, type=Transpose]; +"2879 4548" [id=2879, type=Squeeze]; +"2880 4551" [id=2880, type=Cast]; +"2881 4550" [id=2881, type=Gather]; +"2882 4552" [id=2882, type=Gather]; +"2883 4561" [id=2883, type=Unsqueeze]; +"2884 4562" [id=2884, type=Unsqueeze]; +"2885 4557" [id=2885, type=Slice]; +"2886 4559" [id=2886, type=Gather]; +"2887 4560" [id=2887, type=Unsqueeze]; +"2888 4565" [id=2888, type=NonMaxSuppression]; +"2889 4567" [id=2889, type=Gather]; +"2890 4568" [id=2890, type=Squeeze]; +"2891 4572" [id=2891, type=Gather]; +"2892 4498" [id=2892, type=Slice]; +"2893 4500" [id=2893, type=Gather]; +"2894 4501" [id=2894, type=Cast]; +"2895 4502" [id=2895, type=NonZero]; +"2896 4503" [id=2896, type=Transpose]; +"2897 4504" [id=2897, type=Squeeze]; +"2898 4507" [id=2898, type=Cast]; +"2899 4506" [id=2899, type=Gather]; +"2900 4508" [id=2900, type=Gather]; +"2901 4517" [id=2901, type=Unsqueeze]; +"2902 4518" [id=2902, type=Unsqueeze]; +"2903 4513" [id=2903, type=Slice]; +"2904 4515" [id=2904, type=Gather]; +"2905 4516" [id=2905, type=Unsqueeze]; +"2906 4521" [id=2906, type=NonMaxSuppression]; +"2907 4523" [id=2907, type=Gather]; +"2908 4524" [id=2908, type=Squeeze]; +"2909 4528" [id=2909, type=Gather]; +"2910 4454" [id=2910, type=Slice]; +"2911 4456" [id=2911, type=Gather]; +"2912 4457" [id=2912, type=Cast]; +"2913 4458" [id=2913, type=NonZero]; +"2914 4459" [id=2914, type=Transpose]; +"2915 4460" [id=2915, type=Squeeze]; +"2916 4463" [id=2916, type=Cast]; +"2917 4462" [id=2917, type=Gather]; +"2918 4464" [id=2918, type=Gather]; +"2919 4473" [id=2919, type=Unsqueeze]; +"2920 4474" [id=2920, type=Unsqueeze]; +"2921 4469" [id=2921, type=Slice]; +"2922 4471" [id=2922, type=Gather]; +"2923 4472" [id=2923, type=Unsqueeze]; +"2924 4477" [id=2924, type=NonMaxSuppression]; +"2925 4479" [id=2925, type=Gather]; +"2926 4480" [id=2926, type=Squeeze]; +"2927 4484" [id=2927, type=Gather]; +"2928 4410" [id=2928, type=Slice]; +"2929 4412" [id=2929, type=Gather]; +"2930 4413" [id=2930, type=Cast]; +"2931 4414" [id=2931, type=NonZero]; +"2932 4415" [id=2932, type=Transpose]; +"2933 4416" [id=2933, type=Squeeze]; +"2934 4419" [id=2934, type=Cast]; +"2935 4418" [id=2935, type=Gather]; +"2936 4420" [id=2936, type=Gather]; +"2937 4429" [id=2937, type=Unsqueeze]; +"2938 4430" [id=2938, type=Unsqueeze]; +"2939 4425" [id=2939, type=Slice]; +"2940 4427" [id=2940, type=Gather]; +"2941 4428" [id=2941, type=Unsqueeze]; +"2942 4433" [id=2942, type=NonMaxSuppression]; +"2943 4435" [id=2943, type=Gather]; +"2944 4436" [id=2944, type=Squeeze]; +"2945 4440" [id=2945, type=Gather]; +"2946 4366" [id=2946, type=Slice]; +"2947 4368" [id=2947, type=Gather]; +"2948 4369" [id=2948, type=Cast]; +"2949 4370" [id=2949, type=NonZero]; +"2950 4371" [id=2950, type=Transpose]; +"2951 4372" [id=2951, type=Squeeze]; +"2952 4375" [id=2952, type=Cast]; +"2953 4374" [id=2953, type=Gather]; +"2954 4376" [id=2954, type=Gather]; +"2955 4385" [id=2955, type=Unsqueeze]; +"2956 4386" [id=2956, type=Unsqueeze]; +"2957 4381" [id=2957, type=Slice]; +"2958 4383" [id=2958, type=Gather]; +"2959 4384" [id=2959, type=Unsqueeze]; +"2960 4389" [id=2960, type=NonMaxSuppression]; +"2961 4391" [id=2961, type=Gather]; +"2962 4392" [id=2962, type=Squeeze]; +"2963 4396" [id=2963, type=Gather]; +"2964 4322" [id=2964, type=Slice]; +"2965 4324" [id=2965, type=Gather]; +"2966 4325" [id=2966, type=Cast]; +"2967 4326" [id=2967, type=NonZero]; +"2968 4327" [id=2968, type=Transpose]; +"2969 4328" [id=2969, type=Squeeze]; +"2970 4331" [id=2970, type=Cast]; +"2971 4330" [id=2971, type=Gather]; +"2972 4332" [id=2972, type=Gather]; +"2973 4341" [id=2973, type=Unsqueeze]; +"2974 4342" [id=2974, type=Unsqueeze]; +"2975 4337" [id=2975, type=Slice]; +"2976 4339" [id=2976, type=Gather]; +"2977 4340" [id=2977, type=Unsqueeze]; +"2978 4345" [id=2978, type=NonMaxSuppression]; +"2979 4347" [id=2979, type=Gather]; +"2980 4348" [id=2980, type=Squeeze]; +"2981 4352" [id=2981, type=Gather]; +"2982 4278" [id=2982, type=Slice]; +"2983 4280" [id=2983, type=Gather]; +"2984 4281" [id=2984, type=Cast]; +"2985 4282" [id=2985, type=NonZero]; +"2986 4283" [id=2986, type=Transpose]; +"2987 4284" [id=2987, type=Squeeze]; +"2988 4287" [id=2988, type=Cast]; +"2989 4286" [id=2989, type=Gather]; +"2990 4288" [id=2990, type=Gather]; +"2991 4297" [id=2991, type=Unsqueeze]; +"2992 4298" [id=2992, type=Unsqueeze]; +"2993 4293" [id=2993, type=Slice]; +"2994 4295" [id=2994, type=Gather]; +"2995 4296" [id=2995, type=Unsqueeze]; +"2996 4301" [id=2996, type=NonMaxSuppression]; +"2997 4303" [id=2997, type=Gather]; +"2998 4304" [id=2998, type=Squeeze]; +"2999 4308" [id=2999, type=Gather]; +"3000 4234" [id=3000, type=Slice]; +"3001 4236" [id=3001, type=Gather]; +"3002 4237" [id=3002, type=Cast]; +"3003 4238" [id=3003, type=NonZero]; +"3004 4239" [id=3004, type=Transpose]; +"3005 4240" [id=3005, type=Squeeze]; +"3006 4243" [id=3006, type=Cast]; +"3007 4242" [id=3007, type=Gather]; +"3008 4244" [id=3008, type=Gather]; +"3009 4253" [id=3009, type=Unsqueeze]; +"3010 4254" [id=3010, type=Unsqueeze]; +"3011 4249" [id=3011, type=Slice]; +"3012 4251" [id=3012, type=Gather]; +"3013 4252" [id=3013, type=Unsqueeze]; +"3014 4257" [id=3014, type=NonMaxSuppression]; +"3015 4259" [id=3015, type=Gather]; +"3016 4260" [id=3016, type=Squeeze]; +"3017 4264" [id=3017, type=Gather]; +"3018 4190" [id=3018, type=Slice]; +"3019 4192" [id=3019, type=Gather]; +"3020 4193" [id=3020, type=Cast]; +"3021 4194" [id=3021, type=NonZero]; +"3022 4195" [id=3022, type=Transpose]; +"3023 4196" [id=3023, type=Squeeze]; +"3024 4199" [id=3024, type=Cast]; +"3025 4198" [id=3025, type=Gather]; +"3026 4200" [id=3026, type=Gather]; +"3027 4209" [id=3027, type=Unsqueeze]; +"3028 4210" [id=3028, type=Unsqueeze]; +"3029 4205" [id=3029, type=Slice]; +"3030 4207" [id=3030, type=Gather]; +"3031 4208" [id=3031, type=Unsqueeze]; +"3032 4213" [id=3032, type=NonMaxSuppression]; +"3033 4215" [id=3033, type=Gather]; +"3034 4216" [id=3034, type=Squeeze]; +"3035 4220" [id=3035, type=Gather]; +"3036 4146" [id=3036, type=Slice]; +"3037 4148" [id=3037, type=Gather]; +"3038 4149" [id=3038, type=Cast]; +"3039 4150" [id=3039, type=NonZero]; +"3040 4151" [id=3040, type=Transpose]; +"3041 4152" [id=3041, type=Squeeze]; +"3042 4155" [id=3042, type=Cast]; +"3043 4154" [id=3043, type=Gather]; +"3044 4156" [id=3044, type=Gather]; +"3045 4165" [id=3045, type=Unsqueeze]; +"3046 4166" [id=3046, type=Unsqueeze]; +"3047 4161" [id=3047, type=Slice]; +"3048 4163" [id=3048, type=Gather]; +"3049 4164" [id=3049, type=Unsqueeze]; +"3050 4169" [id=3050, type=NonMaxSuppression]; +"3051 4171" [id=3051, type=Gather]; +"3052 4172" [id=3052, type=Squeeze]; +"3053 4176" [id=3053, type=Gather]; +"3054 4102" [id=3054, type=Slice]; +"3055 4104" [id=3055, type=Gather]; +"3056 4105" [id=3056, type=Cast]; +"3057 4106" [id=3057, type=NonZero]; +"3058 4107" [id=3058, type=Transpose]; +"3059 4108" [id=3059, type=Squeeze]; +"3060 4111" [id=3060, type=Cast]; +"3061 4110" [id=3061, type=Gather]; +"3062 4112" [id=3062, type=Gather]; +"3063 4121" [id=3063, type=Unsqueeze]; +"3064 4122" [id=3064, type=Unsqueeze]; +"3065 4117" [id=3065, type=Slice]; +"3066 4119" [id=3066, type=Gather]; +"3067 4120" [id=3067, type=Unsqueeze]; +"3068 4125" [id=3068, type=NonMaxSuppression]; +"3069 4127" [id=3069, type=Gather]; +"3070 4128" [id=3070, type=Squeeze]; +"3071 4132" [id=3071, type=Gather]; +"3072 4058" [id=3072, type=Slice]; +"3073 4060" [id=3073, type=Gather]; +"3074 4061" [id=3074, type=Cast]; +"3075 4062" [id=3075, type=NonZero]; +"3076 4063" [id=3076, type=Transpose]; +"3077 4064" [id=3077, type=Squeeze]; +"3078 4067" [id=3078, type=Cast]; +"3079 4066" [id=3079, type=Gather]; +"3080 4068" [id=3080, type=Gather]; +"3081 4077" [id=3081, type=Unsqueeze]; +"3082 4078" [id=3082, type=Unsqueeze]; +"3083 4073" [id=3083, type=Slice]; +"3084 4075" [id=3084, type=Gather]; +"3085 4076" [id=3085, type=Unsqueeze]; +"3086 4081" [id=3086, type=NonMaxSuppression]; +"3087 4083" [id=3087, type=Gather]; +"3088 4084" [id=3088, type=Squeeze]; +"3089 4088" [id=3089, type=Gather]; +"3090 4014" [id=3090, type=Slice]; +"3091 4016" [id=3091, type=Gather]; +"3092 4017" [id=3092, type=Cast]; +"3093 4018" [id=3093, type=NonZero]; +"3094 4019" [id=3094, type=Transpose]; +"3095 4020" [id=3095, type=Squeeze]; +"3096 4023" [id=3096, type=Cast]; +"3097 4022" [id=3097, type=Gather]; +"3098 4024" [id=3098, type=Gather]; +"3099 4033" [id=3099, type=Unsqueeze]; +"3100 4034" [id=3100, type=Unsqueeze]; +"3101 4029" [id=3101, type=Slice]; +"3102 4031" [id=3102, type=Gather]; +"3103 4032" [id=3103, type=Unsqueeze]; +"3104 4037" [id=3104, type=NonMaxSuppression]; +"3105 4039" [id=3105, type=Gather]; +"3106 4040" [id=3106, type=Squeeze]; +"3107 4044" [id=3107, type=Gather]; +"3108 3970" [id=3108, type=Slice]; +"3109 3972" [id=3109, type=Gather]; +"3110 3973" [id=3110, type=Cast]; +"3111 3974" [id=3111, type=NonZero]; +"3112 3975" [id=3112, type=Transpose]; +"3113 3976" [id=3113, type=Squeeze]; +"3114 3979" [id=3114, type=Cast]; +"3115 3978" [id=3115, type=Gather]; +"3116 3980" [id=3116, type=Gather]; +"3117 3989" [id=3117, type=Unsqueeze]; +"3118 3990" [id=3118, type=Unsqueeze]; +"3119 3985" [id=3119, type=Slice]; +"3120 3987" [id=3120, type=Gather]; +"3121 3988" [id=3121, type=Unsqueeze]; +"3122 3993" [id=3122, type=NonMaxSuppression]; +"3123 3995" [id=3123, type=Gather]; +"3124 3996" [id=3124, type=Squeeze]; +"3125 4000" [id=3125, type=Gather]; +"3126 3926" [id=3126, type=Slice]; +"3127 3928" [id=3127, type=Gather]; +"3128 3929" [id=3128, type=Cast]; +"3129 3930" [id=3129, type=NonZero]; +"3130 3931" [id=3130, type=Transpose]; +"3131 3932" [id=3131, type=Squeeze]; +"3132 3935" [id=3132, type=Cast]; +"3133 3934" [id=3133, type=Gather]; +"3134 3936" [id=3134, type=Gather]; +"3135 3945" [id=3135, type=Unsqueeze]; +"3136 3946" [id=3136, type=Unsqueeze]; +"3137 3941" [id=3137, type=Slice]; +"3138 3943" [id=3138, type=Gather]; +"3139 3944" [id=3139, type=Unsqueeze]; +"3140 3949" [id=3140, type=NonMaxSuppression]; +"3141 3951" [id=3141, type=Gather]; +"3142 3952" [id=3142, type=Squeeze]; +"3143 3956" [id=3143, type=Gather]; +"3144 3882" [id=3144, type=Slice]; +"3145 3884" [id=3145, type=Gather]; +"3146 3885" [id=3146, type=Cast]; +"3147 3886" [id=3147, type=NonZero]; +"3148 3887" [id=3148, type=Transpose]; +"3149 3888" [id=3149, type=Squeeze]; +"3150 3891" [id=3150, type=Cast]; +"3151 3890" [id=3151, type=Gather]; +"3152 3892" [id=3152, type=Gather]; +"3153 3901" [id=3153, type=Unsqueeze]; +"3154 3902" [id=3154, type=Unsqueeze]; +"3155 3897" [id=3155, type=Slice]; +"3156 3899" [id=3156, type=Gather]; +"3157 3900" [id=3157, type=Unsqueeze]; +"3158 3905" [id=3158, type=NonMaxSuppression]; +"3159 3907" [id=3159, type=Gather]; +"3160 3908" [id=3160, type=Squeeze]; +"3161 3912" [id=3161, type=Gather]; +"3162 3838" [id=3162, type=Slice]; +"3163 3840" [id=3163, type=Gather]; +"3164 3841" [id=3164, type=Cast]; +"3165 3842" [id=3165, type=NonZero]; +"3166 3843" [id=3166, type=Transpose]; +"3167 3844" [id=3167, type=Squeeze]; +"3168 3847" [id=3168, type=Cast]; +"3169 3846" [id=3169, type=Gather]; +"3170 3848" [id=3170, type=Gather]; +"3171 3857" [id=3171, type=Unsqueeze]; +"3172 3858" [id=3172, type=Unsqueeze]; +"3173 3853" [id=3173, type=Slice]; +"3174 3855" [id=3174, type=Gather]; +"3175 3856" [id=3175, type=Unsqueeze]; +"3176 3861" [id=3176, type=NonMaxSuppression]; +"3177 3863" [id=3177, type=Gather]; +"3178 3864" [id=3178, type=Squeeze]; +"3179 3868" [id=3179, type=Gather]; +"3180 3794" [id=3180, type=Slice]; +"3181 3796" [id=3181, type=Gather]; +"3182 3797" [id=3182, type=Cast]; +"3183 3798" [id=3183, type=NonZero]; +"3184 3799" [id=3184, type=Transpose]; +"3185 3800" [id=3185, type=Squeeze]; +"3186 3803" [id=3186, type=Cast]; +"3187 3802" [id=3187, type=Gather]; +"3188 3804" [id=3188, type=Gather]; +"3189 3813" [id=3189, type=Unsqueeze]; +"3190 3814" [id=3190, type=Unsqueeze]; +"3191 3809" [id=3191, type=Slice]; +"3192 3811" [id=3192, type=Gather]; +"3193 3812" [id=3193, type=Unsqueeze]; +"3194 3817" [id=3194, type=NonMaxSuppression]; +"3195 3819" [id=3195, type=Gather]; +"3196 3820" [id=3196, type=Squeeze]; +"3197 3824" [id=3197, type=Gather]; +"3198 3750" [id=3198, type=Slice]; +"3199 3752" [id=3199, type=Gather]; +"3200 3753" [id=3200, type=Cast]; +"3201 3754" [id=3201, type=NonZero]; +"3202 3755" [id=3202, type=Transpose]; +"3203 3756" [id=3203, type=Squeeze]; +"3204 3759" [id=3204, type=Cast]; +"3205 3758" [id=3205, type=Gather]; +"3206 3760" [id=3206, type=Gather]; +"3207 3769" [id=3207, type=Unsqueeze]; +"3208 3770" [id=3208, type=Unsqueeze]; +"3209 3765" [id=3209, type=Slice]; +"3210 3767" [id=3210, type=Gather]; +"3211 3768" [id=3211, type=Unsqueeze]; +"3212 3773" [id=3212, type=NonMaxSuppression]; +"3213 3775" [id=3213, type=Gather]; +"3214 3776" [id=3214, type=Squeeze]; +"3215 3780" [id=3215, type=Gather]; +"3216 3706" [id=3216, type=Slice]; +"3217 3708" [id=3217, type=Gather]; +"3218 3709" [id=3218, type=Cast]; +"3219 3710" [id=3219, type=NonZero]; +"3220 3711" [id=3220, type=Transpose]; +"3221 3712" [id=3221, type=Squeeze]; +"3222 3715" [id=3222, type=Cast]; +"3223 3714" [id=3223, type=Gather]; +"3224 3716" [id=3224, type=Gather]; +"3225 3725" [id=3225, type=Unsqueeze]; +"3226 3726" [id=3226, type=Unsqueeze]; +"3227 3721" [id=3227, type=Slice]; +"3228 3723" [id=3228, type=Gather]; +"3229 3724" [id=3229, type=Unsqueeze]; +"3230 3729" [id=3230, type=NonMaxSuppression]; +"3231 3731" [id=3231, type=Gather]; +"3232 3732" [id=3232, type=Squeeze]; +"3233 3736" [id=3233, type=Gather]; +"3234 3662" [id=3234, type=Slice]; +"3235 3664" [id=3235, type=Gather]; +"3236 3665" [id=3236, type=Cast]; +"3237 3666" [id=3237, type=NonZero]; +"3238 3667" [id=3238, type=Transpose]; +"3239 3668" [id=3239, type=Squeeze]; +"3240 3671" [id=3240, type=Cast]; +"3241 3670" [id=3241, type=Gather]; +"3242 3672" [id=3242, type=Gather]; +"3243 3681" [id=3243, type=Unsqueeze]; +"3244 3682" [id=3244, type=Unsqueeze]; +"3245 3677" [id=3245, type=Slice]; +"3246 3679" [id=3246, type=Gather]; +"3247 3680" [id=3247, type=Unsqueeze]; +"3248 3685" [id=3248, type=NonMaxSuppression]; +"3249 3687" [id=3249, type=Gather]; +"3250 3688" [id=3250, type=Squeeze]; +"3251 3692" [id=3251, type=Gather]; +"3252 3618" [id=3252, type=Slice]; +"3253 3620" [id=3253, type=Gather]; +"3254 3621" [id=3254, type=Cast]; +"3255 3622" [id=3255, type=NonZero]; +"3256 3623" [id=3256, type=Transpose]; +"3257 3624" [id=3257, type=Squeeze]; +"3258 3627" [id=3258, type=Cast]; +"3259 3626" [id=3259, type=Gather]; +"3260 3628" [id=3260, type=Gather]; +"3261 3637" [id=3261, type=Unsqueeze]; +"3262 3638" [id=3262, type=Unsqueeze]; +"3263 3633" [id=3263, type=Slice]; +"3264 3635" [id=3264, type=Gather]; +"3265 3636" [id=3265, type=Unsqueeze]; +"3266 3641" [id=3266, type=NonMaxSuppression]; +"3267 3643" [id=3267, type=Gather]; +"3268 3644" [id=3268, type=Squeeze]; +"3269 3648" [id=3269, type=Gather]; +"3270 3574" [id=3270, type=Slice]; +"3271 3576" [id=3271, type=Gather]; +"3272 3577" [id=3272, type=Cast]; +"3273 3578" [id=3273, type=NonZero]; +"3274 3579" [id=3274, type=Transpose]; +"3275 3580" [id=3275, type=Squeeze]; +"3276 3583" [id=3276, type=Cast]; +"3277 3582" [id=3277, type=Gather]; +"3278 3584" [id=3278, type=Gather]; +"3279 3593" [id=3279, type=Unsqueeze]; +"3280 3594" [id=3280, type=Unsqueeze]; +"3281 3589" [id=3281, type=Slice]; +"3282 3591" [id=3282, type=Gather]; +"3283 3592" [id=3283, type=Unsqueeze]; +"3284 3597" [id=3284, type=NonMaxSuppression]; +"3285 3599" [id=3285, type=Gather]; +"3286 3600" [id=3286, type=Squeeze]; +"3287 3604" [id=3287, type=Gather]; +"3288 3530" [id=3288, type=Slice]; +"3289 3532" [id=3289, type=Gather]; +"3290 3533" [id=3290, type=Cast]; +"3291 3534" [id=3291, type=NonZero]; +"3292 3535" [id=3292, type=Transpose]; +"3293 3536" [id=3293, type=Squeeze]; +"3294 3539" [id=3294, type=Cast]; +"3295 3538" [id=3295, type=Gather]; +"3296 3540" [id=3296, type=Gather]; +"3297 3549" [id=3297, type=Unsqueeze]; +"3298 3550" [id=3298, type=Unsqueeze]; +"3299 3545" [id=3299, type=Slice]; +"3300 3547" [id=3300, type=Gather]; +"3301 3548" [id=3301, type=Unsqueeze]; +"3302 3553" [id=3302, type=NonMaxSuppression]; +"3303 3555" [id=3303, type=Gather]; +"3304 3556" [id=3304, type=Squeeze]; +"3305 3560" [id=3305, type=Gather]; +"3306 3486" [id=3306, type=Slice]; +"3307 3488" [id=3307, type=Gather]; +"3308 3489" [id=3308, type=Cast]; +"3309 3490" [id=3309, type=NonZero]; +"3310 3491" [id=3310, type=Transpose]; +"3311 3492" [id=3311, type=Squeeze]; +"3312 3495" [id=3312, type=Cast]; +"3313 3494" [id=3313, type=Gather]; +"3314 3496" [id=3314, type=Gather]; +"3315 3505" [id=3315, type=Unsqueeze]; +"3316 3506" [id=3316, type=Unsqueeze]; +"3317 3501" [id=3317, type=Slice]; +"3318 3503" [id=3318, type=Gather]; +"3319 3504" [id=3319, type=Unsqueeze]; +"3320 3509" [id=3320, type=NonMaxSuppression]; +"3321 3511" [id=3321, type=Gather]; +"3322 3512" [id=3322, type=Squeeze]; +"3323 3516" [id=3323, type=Gather]; +"3324 3442" [id=3324, type=Slice]; +"3325 3444" [id=3325, type=Gather]; +"3326 3445" [id=3326, type=Cast]; +"3327 3446" [id=3327, type=NonZero]; +"3328 3447" [id=3328, type=Transpose]; +"3329 3448" [id=3329, type=Squeeze]; +"3330 3451" [id=3330, type=Cast]; +"3331 3450" [id=3331, type=Gather]; +"3332 3452" [id=3332, type=Gather]; +"3333 3461" [id=3333, type=Unsqueeze]; +"3334 3462" [id=3334, type=Unsqueeze]; +"3335 3457" [id=3335, type=Slice]; +"3336 3459" [id=3336, type=Gather]; +"3337 3460" [id=3337, type=Unsqueeze]; +"3338 3465" [id=3338, type=NonMaxSuppression]; +"3339 3467" [id=3339, type=Gather]; +"3340 3468" [id=3340, type=Squeeze]; +"3341 3472" [id=3341, type=Gather]; +"3342 3398" [id=3342, type=Slice]; +"3343 3400" [id=3343, type=Gather]; +"3344 3401" [id=3344, type=Cast]; +"3345 3402" [id=3345, type=NonZero]; +"3346 3403" [id=3346, type=Transpose]; +"3347 3404" [id=3347, type=Squeeze]; +"3348 3407" [id=3348, type=Cast]; +"3349 3406" [id=3349, type=Gather]; +"3350 3408" [id=3350, type=Gather]; +"3351 3417" [id=3351, type=Unsqueeze]; +"3352 3418" [id=3352, type=Unsqueeze]; +"3353 3413" [id=3353, type=Slice]; +"3354 3415" [id=3354, type=Gather]; +"3355 3416" [id=3355, type=Unsqueeze]; +"3356 3421" [id=3356, type=NonMaxSuppression]; +"3357 3423" [id=3357, type=Gather]; +"3358 3424" [id=3358, type=Squeeze]; +"3359 3428" [id=3359, type=Gather]; +"3360 3354" [id=3360, type=Slice]; +"3361 3356" [id=3361, type=Gather]; +"3362 3357" [id=3362, type=Cast]; +"3363 3358" [id=3363, type=NonZero]; +"3364 3359" [id=3364, type=Transpose]; +"3365 3360" [id=3365, type=Squeeze]; +"3366 3363" [id=3366, type=Cast]; +"3367 3362" [id=3367, type=Gather]; +"3368 3364" [id=3368, type=Gather]; +"3369 3373" [id=3369, type=Unsqueeze]; +"3370 3374" [id=3370, type=Unsqueeze]; +"3371 3369" [id=3371, type=Slice]; +"3372 3371" [id=3372, type=Gather]; +"3373 3372" [id=3373, type=Unsqueeze]; +"3374 3377" [id=3374, type=NonMaxSuppression]; +"3375 3379" [id=3375, type=Gather]; +"3376 3380" [id=3376, type=Squeeze]; +"3377 3384" [id=3377, type=Gather]; +"3378 3310" [id=3378, type=Slice]; +"3379 3312" [id=3379, type=Gather]; +"3380 3313" [id=3380, type=Cast]; +"3381 3314" [id=3381, type=NonZero]; +"3382 3315" [id=3382, type=Transpose]; +"3383 3316" [id=3383, type=Squeeze]; +"3384 3319" [id=3384, type=Cast]; +"3385 3318" [id=3385, type=Gather]; +"3386 3320" [id=3386, type=Gather]; +"3387 3329" [id=3387, type=Unsqueeze]; +"3388 3330" [id=3388, type=Unsqueeze]; +"3389 3325" [id=3389, type=Slice]; +"3390 3327" [id=3390, type=Gather]; +"3391 3328" [id=3391, type=Unsqueeze]; +"3392 3333" [id=3392, type=NonMaxSuppression]; +"3393 3335" [id=3393, type=Gather]; +"3394 3336" [id=3394, type=Squeeze]; +"3395 3340" [id=3395, type=Gather]; +"3396 3266" [id=3396, type=Slice]; +"3397 3268" [id=3397, type=Gather]; +"3398 3269" [id=3398, type=Cast]; +"3399 3270" [id=3399, type=NonZero]; +"3400 3271" [id=3400, type=Transpose]; +"3401 3272" [id=3401, type=Squeeze]; +"3402 3275" [id=3402, type=Cast]; +"3403 3274" [id=3403, type=Gather]; +"3404 3276" [id=3404, type=Gather]; +"3405 3285" [id=3405, type=Unsqueeze]; +"3406 3286" [id=3406, type=Unsqueeze]; +"3407 3281" [id=3407, type=Slice]; +"3408 3283" [id=3408, type=Gather]; +"3409 3284" [id=3409, type=Unsqueeze]; +"3410 3289" [id=3410, type=NonMaxSuppression]; +"3411 3291" [id=3411, type=Gather]; +"3412 3292" [id=3412, type=Squeeze]; +"3413 3296" [id=3413, type=Gather]; +"3414 3222" [id=3414, type=Slice]; +"3415 3224" [id=3415, type=Gather]; +"3416 3225" [id=3416, type=Cast]; +"3417 3226" [id=3417, type=NonZero]; +"3418 3227" [id=3418, type=Transpose]; +"3419 3228" [id=3419, type=Squeeze]; +"3420 3231" [id=3420, type=Cast]; +"3421 3230" [id=3421, type=Gather]; +"3422 3232" [id=3422, type=Gather]; +"3423 3241" [id=3423, type=Unsqueeze]; +"3424 3242" [id=3424, type=Unsqueeze]; +"3425 3237" [id=3425, type=Slice]; +"3426 3239" [id=3426, type=Gather]; +"3427 3240" [id=3427, type=Unsqueeze]; +"3428 3245" [id=3428, type=NonMaxSuppression]; +"3429 3247" [id=3429, type=Gather]; +"3430 3248" [id=3430, type=Squeeze]; +"3431 3252" [id=3431, type=Gather]; +"3432 3178" [id=3432, type=Slice]; +"3433 3180" [id=3433, type=Gather]; +"3434 3181" [id=3434, type=Cast]; +"3435 3182" [id=3435, type=NonZero]; +"3436 3183" [id=3436, type=Transpose]; +"3437 3184" [id=3437, type=Squeeze]; +"3438 3187" [id=3438, type=Cast]; +"3439 3186" [id=3439, type=Gather]; +"3440 3188" [id=3440, type=Gather]; +"3441 3197" [id=3441, type=Unsqueeze]; +"3442 3198" [id=3442, type=Unsqueeze]; +"3443 3193" [id=3443, type=Slice]; +"3444 3195" [id=3444, type=Gather]; +"3445 3196" [id=3445, type=Unsqueeze]; +"3446 3201" [id=3446, type=NonMaxSuppression]; +"3447 3203" [id=3447, type=Gather]; +"3448 3204" [id=3448, type=Squeeze]; +"3449 3208" [id=3449, type=Gather]; +"3450 3134" [id=3450, type=Slice]; +"3451 3136" [id=3451, type=Gather]; +"3452 3137" [id=3452, type=Cast]; +"3453 3138" [id=3453, type=NonZero]; +"3454 3139" [id=3454, type=Transpose]; +"3455 3140" [id=3455, type=Squeeze]; +"3456 3143" [id=3456, type=Cast]; +"3457 3142" [id=3457, type=Gather]; +"3458 3144" [id=3458, type=Gather]; +"3459 3153" [id=3459, type=Unsqueeze]; +"3460 3154" [id=3460, type=Unsqueeze]; +"3461 3149" [id=3461, type=Slice]; +"3462 3151" [id=3462, type=Gather]; +"3463 3152" [id=3463, type=Unsqueeze]; +"3464 3157" [id=3464, type=NonMaxSuppression]; +"3465 3159" [id=3465, type=Gather]; +"3466 3160" [id=3466, type=Squeeze]; +"3467 3164" [id=3467, type=Gather]; +"3468 3090" [id=3468, type=Slice]; +"3469 3092" [id=3469, type=Gather]; +"3470 3093" [id=3470, type=Cast]; +"3471 3094" [id=3471, type=NonZero]; +"3472 3095" [id=3472, type=Transpose]; +"3473 3096" [id=3473, type=Squeeze]; +"3474 3099" [id=3474, type=Cast]; +"3475 3098" [id=3475, type=Gather]; +"3476 3100" [id=3476, type=Gather]; +"3477 3109" [id=3477, type=Unsqueeze]; +"3478 3110" [id=3478, type=Unsqueeze]; +"3479 3105" [id=3479, type=Slice]; +"3480 3107" [id=3480, type=Gather]; +"3481 3108" [id=3481, type=Unsqueeze]; +"3482 3113" [id=3482, type=NonMaxSuppression]; +"3483 3115" [id=3483, type=Gather]; +"3484 3116" [id=3484, type=Squeeze]; +"3485 3120" [id=3485, type=Gather]; +"3486 3046" [id=3486, type=Slice]; +"3487 3048" [id=3487, type=Gather]; +"3488 3049" [id=3488, type=Cast]; +"3489 3050" [id=3489, type=NonZero]; +"3490 3051" [id=3490, type=Transpose]; +"3491 3052" [id=3491, type=Squeeze]; +"3492 3055" [id=3492, type=Cast]; +"3493 3054" [id=3493, type=Gather]; +"3494 3056" [id=3494, type=Gather]; +"3495 3065" [id=3495, type=Unsqueeze]; +"3496 3066" [id=3496, type=Unsqueeze]; +"3497 3061" [id=3497, type=Slice]; +"3498 3063" [id=3498, type=Gather]; +"3499 3064" [id=3499, type=Unsqueeze]; +"3500 3069" [id=3500, type=NonMaxSuppression]; +"3501 3071" [id=3501, type=Gather]; +"3502 3072" [id=3502, type=Squeeze]; +"3503 3076" [id=3503, type=Gather]; +"3504 3002" [id=3504, type=Slice]; +"3505 3004" [id=3505, type=Gather]; +"3506 3005" [id=3506, type=Cast]; +"3507 3006" [id=3507, type=NonZero]; +"3508 3007" [id=3508, type=Transpose]; +"3509 3008" [id=3509, type=Squeeze]; +"3510 3011" [id=3510, type=Cast]; +"3511 3010" [id=3511, type=Gather]; +"3512 3012" [id=3512, type=Gather]; +"3513 3021" [id=3513, type=Unsqueeze]; +"3514 3022" [id=3514, type=Unsqueeze]; +"3515 3017" [id=3515, type=Slice]; +"3516 3019" [id=3516, type=Gather]; +"3517 3020" [id=3517, type=Unsqueeze]; +"3518 3025" [id=3518, type=NonMaxSuppression]; +"3519 3027" [id=3519, type=Gather]; +"3520 3028" [id=3520, type=Squeeze]; +"3521 3032" [id=3521, type=Gather]; +"3522 6520" [id=3522, type=Concat]; +"3523 6521" [id=3523, type=Shape]; +"3524 6523" [id=3524, type=Concat]; +"3525 6524" [id=3525, type=Cast]; +"3526 6525" [id=3526, type=ReduceMin]; +"3527 6526" [id=3527, type=Cast]; +"3528 6527" [id=3528, type=Unsqueeze]; +"3529 6528" [id=3529, type=TopK]; +"3530 6529" [id=3530, type=Cast]; +"3531 6505" [id=3531, type=Cast]; +"3532 6506" [id=3532, type=Gather]; +"3533 6461" [id=3533, type=Cast]; +"3534 6462" [id=3534, type=Gather]; +"3535 6417" [id=3535, type=Cast]; +"3536 6418" [id=3536, type=Gather]; +"3537 6373" [id=3537, type=Cast]; +"3538 6374" [id=3538, type=Gather]; +"3539 6329" [id=3539, type=Cast]; +"3540 6330" [id=3540, type=Gather]; +"3541 6285" [id=3541, type=Cast]; +"3542 6286" [id=3542, type=Gather]; +"3543 6241" [id=3543, type=Cast]; +"3544 6242" [id=3544, type=Gather]; +"3545 6197" [id=3545, type=Cast]; +"3546 6198" [id=3546, type=Gather]; +"3547 6153" [id=3547, type=Cast]; +"3548 6154" [id=3548, type=Gather]; +"3549 6109" [id=3549, type=Cast]; +"3550 6110" [id=3550, type=Gather]; +"3551 6065" [id=3551, type=Cast]; +"3552 6066" [id=3552, type=Gather]; +"3553 6021" [id=3553, type=Cast]; +"3554 6022" [id=3554, type=Gather]; +"3555 5977" [id=3555, type=Cast]; +"3556 5978" [id=3556, type=Gather]; +"3557 5933" [id=3557, type=Cast]; +"3558 5934" [id=3558, type=Gather]; +"3559 5889" [id=3559, type=Cast]; +"3560 5890" [id=3560, type=Gather]; +"3561 5845" [id=3561, type=Cast]; +"3562 5846" [id=3562, type=Gather]; +"3563 5801" [id=3563, type=Cast]; +"3564 5802" [id=3564, type=Gather]; +"3565 5757" [id=3565, type=Cast]; +"3566 5758" [id=3566, type=Gather]; +"3567 5713" [id=3567, type=Cast]; +"3568 5714" [id=3568, type=Gather]; +"3569 5669" [id=3569, type=Cast]; +"3570 5670" [id=3570, type=Gather]; +"3571 5625" [id=3571, type=Cast]; +"3572 5626" [id=3572, type=Gather]; +"3573 5581" [id=3573, type=Cast]; +"3574 5582" [id=3574, type=Gather]; +"3575 5537" [id=3575, type=Cast]; +"3576 5538" [id=3576, type=Gather]; +"3577 5493" [id=3577, type=Cast]; +"3578 5494" [id=3578, type=Gather]; +"3579 5449" [id=3579, type=Cast]; +"3580 5450" [id=3580, type=Gather]; +"3581 5405" [id=3581, type=Cast]; +"3582 5406" [id=3582, type=Gather]; +"3583 5361" [id=3583, type=Cast]; +"3584 5362" [id=3584, type=Gather]; +"3585 5317" [id=3585, type=Cast]; +"3586 5318" [id=3586, type=Gather]; +"3587 5273" [id=3587, type=Cast]; +"3588 5274" [id=3588, type=Gather]; +"3589 5229" [id=3589, type=Cast]; +"3590 5230" [id=3590, type=Gather]; +"3591 5185" [id=3591, type=Cast]; +"3592 5186" [id=3592, type=Gather]; +"3593 5141" [id=3593, type=Cast]; +"3594 5142" [id=3594, type=Gather]; +"3595 5097" [id=3595, type=Cast]; +"3596 5098" [id=3596, type=Gather]; +"3597 5053" [id=3597, type=Cast]; +"3598 5054" [id=3598, type=Gather]; +"3599 5009" [id=3599, type=Cast]; +"3600 5010" [id=3600, type=Gather]; +"3601 4965" [id=3601, type=Cast]; +"3602 4966" [id=3602, type=Gather]; +"3603 4921" [id=3603, type=Cast]; +"3604 4922" [id=3604, type=Gather]; +"3605 4877" [id=3605, type=Cast]; +"3606 4878" [id=3606, type=Gather]; +"3607 4833" [id=3607, type=Cast]; +"3608 4834" [id=3608, type=Gather]; +"3609 4789" [id=3609, type=Cast]; +"3610 4790" [id=3610, type=Gather]; +"3611 4745" [id=3611, type=Cast]; +"3612 4746" [id=3612, type=Gather]; +"3613 4701" [id=3613, type=Cast]; +"3614 4702" [id=3614, type=Gather]; +"3615 4657" [id=3615, type=Cast]; +"3616 4658" [id=3616, type=Gather]; +"3617 4613" [id=3617, type=Cast]; +"3618 4614" [id=3618, type=Gather]; +"3619 4569" [id=3619, type=Cast]; +"3620 4570" [id=3620, type=Gather]; +"3621 4525" [id=3621, type=Cast]; +"3622 4526" [id=3622, type=Gather]; +"3623 4481" [id=3623, type=Cast]; +"3624 4482" [id=3624, type=Gather]; +"3625 4437" [id=3625, type=Cast]; +"3626 4438" [id=3626, type=Gather]; +"3627 4393" [id=3627, type=Cast]; +"3628 4394" [id=3628, type=Gather]; +"3629 4349" [id=3629, type=Cast]; +"3630 4350" [id=3630, type=Gather]; +"3631 4305" [id=3631, type=Cast]; +"3632 4306" [id=3632, type=Gather]; +"3633 4261" [id=3633, type=Cast]; +"3634 4262" [id=3634, type=Gather]; +"3635 4217" [id=3635, type=Cast]; +"3636 4218" [id=3636, type=Gather]; +"3637 4173" [id=3637, type=Cast]; +"3638 4174" [id=3638, type=Gather]; +"3639 4129" [id=3639, type=Cast]; +"3640 4130" [id=3640, type=Gather]; +"3641 4085" [id=3641, type=Cast]; +"3642 4086" [id=3642, type=Gather]; +"3643 4041" [id=3643, type=Cast]; +"3644 4042" [id=3644, type=Gather]; +"3645 3997" [id=3645, type=Cast]; +"3646 3998" [id=3646, type=Gather]; +"3647 3953" [id=3647, type=Cast]; +"3648 3954" [id=3648, type=Gather]; +"3649 3909" [id=3649, type=Cast]; +"3650 3910" [id=3650, type=Gather]; +"3651 3865" [id=3651, type=Cast]; +"3652 3866" [id=3652, type=Gather]; +"3653 3821" [id=3653, type=Cast]; +"3654 3822" [id=3654, type=Gather]; +"3655 3777" [id=3655, type=Cast]; +"3656 3778" [id=3656, type=Gather]; +"3657 3733" [id=3657, type=Cast]; +"3658 3734" [id=3658, type=Gather]; +"3659 3689" [id=3659, type=Cast]; +"3660 3690" [id=3660, type=Gather]; +"3661 3645" [id=3661, type=Cast]; +"3662 3646" [id=3662, type=Gather]; +"3663 3601" [id=3663, type=Cast]; +"3664 3602" [id=3664, type=Gather]; +"3665 3557" [id=3665, type=Cast]; +"3666 3558" [id=3666, type=Gather]; +"3667 3513" [id=3667, type=Cast]; +"3668 3514" [id=3668, type=Gather]; +"3669 3469" [id=3669, type=Cast]; +"3670 3470" [id=3670, type=Gather]; +"3671 3425" [id=3671, type=Cast]; +"3672 3426" [id=3672, type=Gather]; +"3673 3381" [id=3673, type=Cast]; +"3674 3382" [id=3674, type=Gather]; +"3675 3337" [id=3675, type=Cast]; +"3676 3338" [id=3676, type=Gather]; +"3677 3293" [id=3677, type=Cast]; +"3678 3294" [id=3678, type=Gather]; +"3679 3249" [id=3679, type=Cast]; +"3680 3250" [id=3680, type=Gather]; +"3681 3205" [id=3681, type=Cast]; +"3682 3206" [id=3682, type=Gather]; +"3683 3161" [id=3683, type=Cast]; +"3684 3162" [id=3684, type=Gather]; +"3685 3117" [id=3685, type=Cast]; +"3686 3118" [id=3686, type=Gather]; +"3687 3073" [id=3687, type=Cast]; +"3688 3074" [id=3688, type=Gather]; +"3689 3029" [id=3689, type=Cast]; +"3690 3030" [id=3690, type=Gather]; +"3691 6518" [id=3691, type=Concat]; +"3692 6530" [id=3692, type=Gather]; +"3693 QuantizeLinear_6568_4" [id=3693, type=QuantizeLinear]; +"3694 DequantizeLinear_6568_4" [id=3694, type=DequantizeLinear]; +"3695 QuantizeLinear_6568_3" [id=3695, type=QuantizeLinear]; +"3696 DequantizeLinear_6568_3" [id=3696, type=DequantizeLinear]; +"3697 QuantizeLinear_6568_2" [id=3697, type=QuantizeLinear]; +"3698 DequantizeLinear_6568_2" [id=3698, type=DequantizeLinear]; +"3699 QuantizeLinear_6568_1" [id=3699, type=QuantizeLinear]; +"3700 DequantizeLinear_6568_1" [id=3700, type=DequantizeLinear]; +"3701 6576" [id=3701, type=Slice]; +"3702 6578" [id=3702, type=Gather]; +"3703 6569" [id=3703, type=Slice]; +"3704 6571" [id=3704, type=Gather]; +"3705 6579" [id=3705, type=Sub]; +"3706 QuantizeLinear_6617_1" [id=3706, type=QuantizeLinear]; +"3707 DequantizeLinear_6617_1" [id=3707, type=DequantizeLinear]; +"3708 6581" [id=3708, type=Add]; +"3709 6559" [id=3709, type=Slice]; +"3710 6561" [id=3710, type=Gather]; +"3711 6552" [id=3711, type=Slice]; +"3712 6554" [id=3712, type=Gather]; +"3713 6562" [id=3713, type=Sub]; +"3714 QuantizeLinear_6600_1" [id=3714, type=QuantizeLinear]; +"3715 DequantizeLinear_6600_1" [id=3715, type=DequantizeLinear]; +"3716 6564" [id=3716, type=Add]; +"3717 QuantizeLinear_6619_1" [id=3717, type=QuantizeLinear]; +"3718 DequantizeLinear_6619_1" [id=3718, type=DequantizeLinear]; +"3719 QuantizeLinear_6602_1" [id=3719, type=QuantizeLinear]; +"3720 DequantizeLinear_6602_1" [id=3720, type=DequantizeLinear]; +"3721 6582" [id=3721, type=Mul]; +"3722 QuantizeLinear_6620_1" [id=3722, type=QuantizeLinear]; +"3723 DequantizeLinear_6620_1" [id=3723, type=DequantizeLinear]; +"3724 6583" [id=3724, type=Sqrt]; +"3725 6586" [id=3725, type=Div]; +"3726 QuantizeLinear_6624_1" [id=3726, type=QuantizeLinear]; +"3727 DequantizeLinear_6624_1" [id=3727, type=DequantizeLinear]; +"3728 6587" [id=3728, type=Add]; +"3729 6588" [id=3729, type=Log]; +"3730 6590" [id=3730, type=Div]; +"3731 QuantizeLinear_6628_1" [id=3731, type=QuantizeLinear]; +"3732 DequantizeLinear_6628_1" [id=3732, type=DequantizeLinear]; +"3733 6592" [id=3733, type=Add]; +"3734 QuantizeLinear_6630_1" [id=3734, type=QuantizeLinear]; +"3735 DequantizeLinear_6630_1" [id=3735, type=DequantizeLinear]; +"3736 6593" [id=3736, type=Floor]; +"3737 6594" [id=3737, type=Clip]; +"3738 6595" [id=3738, type=Cast]; +"3739 6597" [id=3739, type=Sub]; +"3740 6599" [id=3740, type=Equal]; +"3741 6601" [id=3741, type=Cast]; +"3742 6602" [id=3742, type=NonZero]; +"3743 6603" [id=3743, type=Transpose]; +"3744 6604" [id=3744, type=Squeeze]; +"3745 6605" [id=3745, type=Cast]; +"3746 6539" [id=3746, type=Slice]; +"3747 6544" [id=3747, type=Slice]; +"3748 6545" [id=3748, type=Shape]; +"3749 6546" [id=3749, type=ConstantOfShape]; +"3750 6547" [id=3750, type=Concat]; +"3751 6606" [id=3751, type=Gather]; +"3752 6612" [id=3752, type=Gather]; +"3753 6608" [id=3753, type=Gather]; +"3754 6609" [id=3754, type=Squeeze]; +"3755 6610" [id=3755, type=Cast]; +"3756 6613" [id=3756, type=RoiAlign]; +"3757 6614" [id=3757, type=Cast]; +"3758 6702" [id=3758, type=Shape]; +"3759 6703" [id=3759, type=Gather]; +"3760 6707" [id=3760, type=Unsqueeze]; +"3761 6699" [id=3761, type=Shape]; +"3762 6700" [id=3762, type=Gather]; +"3763 6706" [id=3763, type=Unsqueeze]; +"3764 6696" [id=3764, type=Shape]; +"3765 6697" [id=3765, type=Gather]; +"3766 6705" [id=3766, type=Unsqueeze]; +"3767 6685" [id=3767, type=Equal]; +"3768 6687" [id=3768, type=Cast]; +"3769 6688" [id=3769, type=NonZero]; +"3770 6689" [id=3770, type=Transpose]; +"3771 6691" [id=3771, type=Reshape]; +"3772 6693" [id=3772, type=Shape]; +"3773 6694" [id=3773, type=Gather]; +"3774 6704" [id=3774, type=Unsqueeze]; +"3775 6708" [id=3775, type=Concat]; +"3776 6709" [id=3776, type=Expand]; +"3777 6710" [id=3777, type=Cast]; +"3778 6676" [id=3778, type=Shape]; +"3779 6677" [id=3779, type=Gather]; +"3780 6681" [id=3780, type=Unsqueeze]; +"3781 6673" [id=3781, type=Shape]; +"3782 6674" [id=3782, type=Gather]; +"3783 6680" [id=3783, type=Unsqueeze]; +"3784 6670" [id=3784, type=Shape]; +"3785 6671" [id=3785, type=Gather]; +"3786 6679" [id=3786, type=Unsqueeze]; +"3787 6667" [id=3787, type=Shape]; +"3788 6668" [id=3788, type=Gather]; +"3789 6678" [id=3789, type=Unsqueeze]; +"3790 6682" [id=3790, type=Concat]; +"3791 6683" [id=3791, type=ConstantOfShape]; +"3792 6711" [id=3792, type=ScatterElements]; +"3793 6616" [id=3793, type=Equal]; +"3794 6618" [id=3794, type=Cast]; +"3795 6619" [id=3795, type=NonZero]; +"3796 6620" [id=3796, type=Transpose]; +"3797 6621" [id=3797, type=Squeeze]; +"3798 6622" [id=3798, type=Cast]; +"3799 6623" [id=3799, type=Gather]; +"3800 6629" [id=3800, type=Gather]; +"3801 6625" [id=3801, type=Gather]; +"3802 6626" [id=3802, type=Squeeze]; +"3803 6627" [id=3803, type=Cast]; +"3804 6630" [id=3804, type=RoiAlign]; +"3805 6631" [id=3805, type=Cast]; +"3806 6730" [id=3806, type=Shape]; +"3807 6731" [id=3807, type=Gather]; +"3808 6735" [id=3808, type=Unsqueeze]; +"3809 6727" [id=3809, type=Shape]; +"3810 6728" [id=3810, type=Gather]; +"3811 6734" [id=3811, type=Unsqueeze]; +"3812 6724" [id=3812, type=Shape]; +"3813 6725" [id=3813, type=Gather]; +"3814 6733" [id=3814, type=Unsqueeze]; +"3815 6713" [id=3815, type=Equal]; +"3816 6715" [id=3816, type=Cast]; +"3817 6716" [id=3817, type=NonZero]; +"3818 6717" [id=3818, type=Transpose]; +"3819 6719" [id=3819, type=Reshape]; +"3820 6721" [id=3820, type=Shape]; +"3821 6722" [id=3821, type=Gather]; +"3822 6732" [id=3822, type=Unsqueeze]; +"3823 6736" [id=3823, type=Concat]; +"3824 6737" [id=3824, type=Expand]; +"3825 6738" [id=3825, type=Cast]; +"3826 6739" [id=3826, type=ScatterElements]; +"3827 6633" [id=3827, type=Equal]; +"3828 6635" [id=3828, type=Cast]; +"3829 6636" [id=3829, type=NonZero]; +"3830 6637" [id=3830, type=Transpose]; +"3831 6638" [id=3831, type=Squeeze]; +"3832 6639" [id=3832, type=Cast]; +"3833 6640" [id=3833, type=Gather]; +"3834 6646" [id=3834, type=Gather]; +"3835 6642" [id=3835, type=Gather]; +"3836 6643" [id=3836, type=Squeeze]; +"3837 6644" [id=3837, type=Cast]; +"3838 6647" [id=3838, type=RoiAlign]; +"3839 6648" [id=3839, type=Cast]; +"3840 6758" [id=3840, type=Shape]; +"3841 6759" [id=3841, type=Gather]; +"3842 6763" [id=3842, type=Unsqueeze]; +"3843 6755" [id=3843, type=Shape]; +"3844 6756" [id=3844, type=Gather]; +"3845 6762" [id=3845, type=Unsqueeze]; +"3846 6752" [id=3846, type=Shape]; +"3847 6753" [id=3847, type=Gather]; +"3848 6761" [id=3848, type=Unsqueeze]; +"3849 6741" [id=3849, type=Equal]; +"3850 6743" [id=3850, type=Cast]; +"3851 6744" [id=3851, type=NonZero]; +"3852 6745" [id=3852, type=Transpose]; +"3853 6747" [id=3853, type=Reshape]; +"3854 6749" [id=3854, type=Shape]; +"3855 6750" [id=3855, type=Gather]; +"3856 6760" [id=3856, type=Unsqueeze]; +"3857 6764" [id=3857, type=Concat]; +"3858 6765" [id=3858, type=Expand]; +"3859 6766" [id=3859, type=Cast]; +"3860 6767" [id=3860, type=ScatterElements]; +"3861 6650" [id=3861, type=Equal]; +"3862 6652" [id=3862, type=Cast]; +"3863 6653" [id=3863, type=NonZero]; +"3864 6654" [id=3864, type=Transpose]; +"3865 6655" [id=3865, type=Squeeze]; +"3866 6656" [id=3866, type=Cast]; +"3867 6657" [id=3867, type=Gather]; +"3868 6663" [id=3868, type=Gather]; +"3869 6659" [id=3869, type=Gather]; +"3870 6660" [id=3870, type=Squeeze]; +"3871 6661" [id=3871, type=Cast]; +"3872 6664" [id=3872, type=RoiAlign]; +"3873 6665" [id=3873, type=Cast]; +"3874 6786" [id=3874, type=Shape]; +"3875 6787" [id=3875, type=Gather]; +"3876 6791" [id=3876, type=Unsqueeze]; +"3877 6783" [id=3877, type=Shape]; +"3878 6784" [id=3878, type=Gather]; +"3879 6790" [id=3879, type=Unsqueeze]; +"3880 6780" [id=3880, type=Shape]; +"3881 6781" [id=3881, type=Gather]; +"3882 6789" [id=3882, type=Unsqueeze]; +"3883 6769" [id=3883, type=Equal]; +"3884 6771" [id=3884, type=Cast]; +"3885 6772" [id=3885, type=NonZero]; +"3886 6773" [id=3886, type=Transpose]; +"3887 6775" [id=3887, type=Reshape]; +"3888 6777" [id=3888, type=Shape]; +"3889 6778" [id=3889, type=Gather]; +"3890 6788" [id=3890, type=Unsqueeze]; +"3891 6792" [id=3891, type=Concat]; +"3892 6793" [id=3892, type=Expand]; +"3893 6794" [id=3893, type=Cast]; +"3894 6795" [id=3894, type=ScatterElements]; +"3895 QuantizeLinear_6833_1" [id=3895, type=QuantizeLinear]; +"3896 DequantizeLinear_6833_1" [id=3896, type=DequantizeLinear]; +"3897 QuantizeLinear_6834_1" [id=3897, type=QuantizeLinear]; +"3898 DequantizeLinear_6834_1" [id=3898, type=DequantizeLinear]; +"3899 6798" [id=3899, type=Conv]; +"3900 6799" [id=3900, type=Relu]; +"3901 QuantizeLinear_6837_1" [id=3901, type=QuantizeLinear]; +"3902 DequantizeLinear_6837_1" [id=3902, type=DequantizeLinear]; +"3903 QuantizeLinear_6838_1" [id=3903, type=QuantizeLinear]; +"3904 DequantizeLinear_6838_1" [id=3904, type=DequantizeLinear]; +"3905 6802" [id=3905, type=Conv]; +"3906 6803" [id=3906, type=Relu]; +"3907 QuantizeLinear_6841_1" [id=3907, type=QuantizeLinear]; +"3908 DequantizeLinear_6841_1" [id=3908, type=DequantizeLinear]; +"3909 QuantizeLinear_6842_1" [id=3909, type=QuantizeLinear]; +"3910 DequantizeLinear_6842_1" [id=3910, type=DequantizeLinear]; +"3911 6806" [id=3911, type=Conv]; +"3912 6807" [id=3912, type=Relu]; +"3913 QuantizeLinear_6845_1" [id=3913, type=QuantizeLinear]; +"3914 DequantizeLinear_6845_1" [id=3914, type=DequantizeLinear]; +"3915 QuantizeLinear_6846_1" [id=3915, type=QuantizeLinear]; +"3916 DequantizeLinear_6846_1" [id=3916, type=DequantizeLinear]; +"3917 6810" [id=3917, type=Conv]; +"3918 6811" [id=3918, type=Relu]; +"3919 QuantizeLinear_6849_1" [id=3919, type=QuantizeLinear]; +"3920 DequantizeLinear_6849_1" [id=3920, type=DequantizeLinear]; +"3921 QuantizeLinear_6850_1" [id=3921, type=QuantizeLinear]; +"3922 DequantizeLinear_6850_1" [id=3922, type=DequantizeLinear]; +"3923 6814" [id=3923, type=ConvTranspose]; +"3924 6815" [id=3924, type=Relu]; +"3925 QuantizeLinear_6853_1" [id=3925, type=QuantizeLinear]; +"3926 DequantizeLinear_6853_1" [id=3926, type=DequantizeLinear]; +"3927 QuantizeLinear_6854_1" [id=3927, type=QuantizeLinear]; +"3928 DequantizeLinear_6854_1" [id=3928, type=DequantizeLinear]; +"3929 6818" [id=3929, type=Conv]; +"3930 6819" [id=3930, type=Sigmoid]; +"3931 6844" [id=3931, type=Shape]; +"3932 6845" [id=3932, type=Gather]; +"3933 6822" [id=3933, type=Shape]; +"3934 6823" [id=3934, type=Gather]; +"3935 6824" [id=3935, type=Unsqueeze]; +"3936 6825" [id=3936, type=Concat]; +"3937 6826" [id=3937, type=ConstantOfShape]; +"3938 6827" [id=3938, type=Cast]; +"3939 6828" [id=3939, type=NonZero]; +"3940 6829" [id=3940, type=Transpose]; +"3941 6830" [id=3941, type=Squeeze]; +"3942 6846" [id=3942, type=Mul]; +"3943 6513" [id=3943, type=Slice]; +"3944 6515" [id=3944, type=Gather]; +"3945 6516" [id=3945, type=Shape]; +"3946 6517" [id=3946, type=ConstantOfShape]; +"3947 6469" [id=3947, type=Slice]; +"3948 6471" [id=3948, type=Gather]; +"3949 6472" [id=3949, type=Shape]; +"3950 6473" [id=3950, type=ConstantOfShape]; +"3951 6425" [id=3951, type=Slice]; +"3952 6427" [id=3952, type=Gather]; +"3953 6428" [id=3953, type=Shape]; +"3954 6429" [id=3954, type=ConstantOfShape]; +"3955 6381" [id=3955, type=Slice]; +"3956 6383" [id=3956, type=Gather]; +"3957 6384" [id=3957, type=Shape]; +"3958 6385" [id=3958, type=ConstantOfShape]; +"3959 6337" [id=3959, type=Slice]; +"3960 6339" [id=3960, type=Gather]; +"3961 6340" [id=3961, type=Shape]; +"3962 6341" [id=3962, type=ConstantOfShape]; +"3963 6293" [id=3963, type=Slice]; +"3964 6295" [id=3964, type=Gather]; +"3965 6296" [id=3965, type=Shape]; +"3966 6297" [id=3966, type=ConstantOfShape]; +"3967 6249" [id=3967, type=Slice]; +"3968 6251" [id=3968, type=Gather]; +"3969 6252" [id=3969, type=Shape]; +"3970 6253" [id=3970, type=ConstantOfShape]; +"3971 6205" [id=3971, type=Slice]; +"3972 6207" [id=3972, type=Gather]; +"3973 6208" [id=3973, type=Shape]; +"3974 6209" [id=3974, type=ConstantOfShape]; +"3975 6161" [id=3975, type=Slice]; +"3976 6163" [id=3976, type=Gather]; +"3977 6164" [id=3977, type=Shape]; +"3978 6165" [id=3978, type=ConstantOfShape]; +"3979 6117" [id=3979, type=Slice]; +"3980 6119" [id=3980, type=Gather]; +"3981 6120" [id=3981, type=Shape]; +"3982 6121" [id=3982, type=ConstantOfShape]; +"3983 6073" [id=3983, type=Slice]; +"3984 6075" [id=3984, type=Gather]; +"3985 6076" [id=3985, type=Shape]; +"3986 6077" [id=3986, type=ConstantOfShape]; +"3987 6029" [id=3987, type=Slice]; +"3988 6031" [id=3988, type=Gather]; +"3989 6032" [id=3989, type=Shape]; +"3990 6033" [id=3990, type=ConstantOfShape]; +"3991 5985" [id=3991, type=Slice]; +"3992 5987" [id=3992, type=Gather]; +"3993 5988" [id=3993, type=Shape]; +"3994 5989" [id=3994, type=ConstantOfShape]; +"3995 5941" [id=3995, type=Slice]; +"3996 5943" [id=3996, type=Gather]; +"3997 5944" [id=3997, type=Shape]; +"3998 5945" [id=3998, type=ConstantOfShape]; +"3999 5897" [id=3999, type=Slice]; +"4000 5899" [id=4000, type=Gather]; +"4001 5900" [id=4001, type=Shape]; +"4002 5901" [id=4002, type=ConstantOfShape]; +"4003 5853" [id=4003, type=Slice]; +"4004 5855" [id=4004, type=Gather]; +"4005 5856" [id=4005, type=Shape]; +"4006 5857" [id=4006, type=ConstantOfShape]; +"4007 5809" [id=4007, type=Slice]; +"4008 5811" [id=4008, type=Gather]; +"4009 5812" [id=4009, type=Shape]; +"4010 5813" [id=4010, type=ConstantOfShape]; +"4011 5765" [id=4011, type=Slice]; +"4012 5767" [id=4012, type=Gather]; +"4013 5768" [id=4013, type=Shape]; +"4014 5769" [id=4014, type=ConstantOfShape]; +"4015 5721" [id=4015, type=Slice]; +"4016 5723" [id=4016, type=Gather]; +"4017 5724" [id=4017, type=Shape]; +"4018 5725" [id=4018, type=ConstantOfShape]; +"4019 5677" [id=4019, type=Slice]; +"4020 5679" [id=4020, type=Gather]; +"4021 5680" [id=4021, type=Shape]; +"4022 5681" [id=4022, type=ConstantOfShape]; +"4023 5633" [id=4023, type=Slice]; +"4024 5635" [id=4024, type=Gather]; +"4025 5636" [id=4025, type=Shape]; +"4026 5637" [id=4026, type=ConstantOfShape]; +"4027 5589" [id=4027, type=Slice]; +"4028 5591" [id=4028, type=Gather]; +"4029 5592" [id=4029, type=Shape]; +"4030 5593" [id=4030, type=ConstantOfShape]; +"4031 5545" [id=4031, type=Slice]; +"4032 5547" [id=4032, type=Gather]; +"4033 5548" [id=4033, type=Shape]; +"4034 5549" [id=4034, type=ConstantOfShape]; +"4035 5501" [id=4035, type=Slice]; +"4036 5503" [id=4036, type=Gather]; +"4037 5504" [id=4037, type=Shape]; +"4038 5505" [id=4038, type=ConstantOfShape]; +"4039 5457" [id=4039, type=Slice]; +"4040 5459" [id=4040, type=Gather]; +"4041 5460" [id=4041, type=Shape]; +"4042 5461" [id=4042, type=ConstantOfShape]; +"4043 5413" [id=4043, type=Slice]; +"4044 5415" [id=4044, type=Gather]; +"4045 5416" [id=4045, type=Shape]; +"4046 5417" [id=4046, type=ConstantOfShape]; +"4047 5369" [id=4047, type=Slice]; +"4048 5371" [id=4048, type=Gather]; +"4049 5372" [id=4049, type=Shape]; +"4050 5373" [id=4050, type=ConstantOfShape]; +"4051 5325" [id=4051, type=Slice]; +"4052 5327" [id=4052, type=Gather]; +"4053 5328" [id=4053, type=Shape]; +"4054 5329" [id=4054, type=ConstantOfShape]; +"4055 5281" [id=4055, type=Slice]; +"4056 5283" [id=4056, type=Gather]; +"4057 5284" [id=4057, type=Shape]; +"4058 5285" [id=4058, type=ConstantOfShape]; +"4059 5237" [id=4059, type=Slice]; +"4060 5239" [id=4060, type=Gather]; +"4061 5240" [id=4061, type=Shape]; +"4062 5241" [id=4062, type=ConstantOfShape]; +"4063 5193" [id=4063, type=Slice]; +"4064 5195" [id=4064, type=Gather]; +"4065 5196" [id=4065, type=Shape]; +"4066 5197" [id=4066, type=ConstantOfShape]; +"4067 5149" [id=4067, type=Slice]; +"4068 5151" [id=4068, type=Gather]; +"4069 5152" [id=4069, type=Shape]; +"4070 5153" [id=4070, type=ConstantOfShape]; +"4071 5105" [id=4071, type=Slice]; +"4072 5107" [id=4072, type=Gather]; +"4073 5108" [id=4073, type=Shape]; +"4074 5109" [id=4074, type=ConstantOfShape]; +"4075 5061" [id=4075, type=Slice]; +"4076 5063" [id=4076, type=Gather]; +"4077 5064" [id=4077, type=Shape]; +"4078 5065" [id=4078, type=ConstantOfShape]; +"4079 5017" [id=4079, type=Slice]; +"4080 5019" [id=4080, type=Gather]; +"4081 5020" [id=4081, type=Shape]; +"4082 5021" [id=4082, type=ConstantOfShape]; +"4083 4973" [id=4083, type=Slice]; +"4084 4975" [id=4084, type=Gather]; +"4085 4976" [id=4085, type=Shape]; +"4086 4977" [id=4086, type=ConstantOfShape]; +"4087 4929" [id=4087, type=Slice]; +"4088 4931" [id=4088, type=Gather]; +"4089 4932" [id=4089, type=Shape]; +"4090 4933" [id=4090, type=ConstantOfShape]; +"4091 4885" [id=4091, type=Slice]; +"4092 4887" [id=4092, type=Gather]; +"4093 4888" [id=4093, type=Shape]; +"4094 4889" [id=4094, type=ConstantOfShape]; +"4095 4841" [id=4095, type=Slice]; +"4096 4843" [id=4096, type=Gather]; +"4097 4844" [id=4097, type=Shape]; +"4098 4845" [id=4098, type=ConstantOfShape]; +"4099 4797" [id=4099, type=Slice]; +"4100 4799" [id=4100, type=Gather]; +"4101 4800" [id=4101, type=Shape]; +"4102 4801" [id=4102, type=ConstantOfShape]; +"4103 4753" [id=4103, type=Slice]; +"4104 4755" [id=4104, type=Gather]; +"4105 4756" [id=4105, type=Shape]; +"4106 4757" [id=4106, type=ConstantOfShape]; +"4107 4709" [id=4107, type=Slice]; +"4108 4711" [id=4108, type=Gather]; +"4109 4712" [id=4109, type=Shape]; +"4110 4713" [id=4110, type=ConstantOfShape]; +"4111 4665" [id=4111, type=Slice]; +"4112 4667" [id=4112, type=Gather]; +"4113 4668" [id=4113, type=Shape]; +"4114 4669" [id=4114, type=ConstantOfShape]; +"4115 4621" [id=4115, type=Slice]; +"4116 4623" [id=4116, type=Gather]; +"4117 4624" [id=4117, type=Shape]; +"4118 4625" [id=4118, type=ConstantOfShape]; +"4119 4577" [id=4119, type=Slice]; +"4120 4579" [id=4120, type=Gather]; +"4121 4580" [id=4121, type=Shape]; +"4122 4581" [id=4122, type=ConstantOfShape]; +"4123 4533" [id=4123, type=Slice]; +"4124 4535" [id=4124, type=Gather]; +"4125 4536" [id=4125, type=Shape]; +"4126 4537" [id=4126, type=ConstantOfShape]; +"4127 4489" [id=4127, type=Slice]; +"4128 4491" [id=4128, type=Gather]; +"4129 4492" [id=4129, type=Shape]; +"4130 4493" [id=4130, type=ConstantOfShape]; +"4131 4445" [id=4131, type=Slice]; +"4132 4447" [id=4132, type=Gather]; +"4133 4448" [id=4133, type=Shape]; +"4134 4449" [id=4134, type=ConstantOfShape]; +"4135 4401" [id=4135, type=Slice]; +"4136 4403" [id=4136, type=Gather]; +"4137 4404" [id=4137, type=Shape]; +"4138 4405" [id=4138, type=ConstantOfShape]; +"4139 4357" [id=4139, type=Slice]; +"4140 4359" [id=4140, type=Gather]; +"4141 4360" [id=4141, type=Shape]; +"4142 4361" [id=4142, type=ConstantOfShape]; +"4143 4313" [id=4143, type=Slice]; +"4144 4315" [id=4144, type=Gather]; +"4145 4316" [id=4145, type=Shape]; +"4146 4317" [id=4146, type=ConstantOfShape]; +"4147 4269" [id=4147, type=Slice]; +"4148 4271" [id=4148, type=Gather]; +"4149 4272" [id=4149, type=Shape]; +"4150 4273" [id=4150, type=ConstantOfShape]; +"4151 4225" [id=4151, type=Slice]; +"4152 4227" [id=4152, type=Gather]; +"4153 4228" [id=4153, type=Shape]; +"4154 4229" [id=4154, type=ConstantOfShape]; +"4155 4181" [id=4155, type=Slice]; +"4156 4183" [id=4156, type=Gather]; +"4157 4184" [id=4157, type=Shape]; +"4158 4185" [id=4158, type=ConstantOfShape]; +"4159 4137" [id=4159, type=Slice]; +"4160 4139" [id=4160, type=Gather]; +"4161 4140" [id=4161, type=Shape]; +"4162 4141" [id=4162, type=ConstantOfShape]; +"4163 4093" [id=4163, type=Slice]; +"4164 4095" [id=4164, type=Gather]; +"4165 4096" [id=4165, type=Shape]; +"4166 4097" [id=4166, type=ConstantOfShape]; +"4167 4049" [id=4167, type=Slice]; +"4168 4051" [id=4168, type=Gather]; +"4169 4052" [id=4169, type=Shape]; +"4170 4053" [id=4170, type=ConstantOfShape]; +"4171 4005" [id=4171, type=Slice]; +"4172 4007" [id=4172, type=Gather]; +"4173 4008" [id=4173, type=Shape]; +"4174 4009" [id=4174, type=ConstantOfShape]; +"4175 3961" [id=4175, type=Slice]; +"4176 3963" [id=4176, type=Gather]; +"4177 3964" [id=4177, type=Shape]; +"4178 3965" [id=4178, type=ConstantOfShape]; +"4179 3917" [id=4179, type=Slice]; +"4180 3919" [id=4180, type=Gather]; +"4181 3920" [id=4181, type=Shape]; +"4182 3921" [id=4182, type=ConstantOfShape]; +"4183 3873" [id=4183, type=Slice]; +"4184 3875" [id=4184, type=Gather]; +"4185 3876" [id=4185, type=Shape]; +"4186 3877" [id=4186, type=ConstantOfShape]; +"4187 3829" [id=4187, type=Slice]; +"4188 3831" [id=4188, type=Gather]; +"4189 3832" [id=4189, type=Shape]; +"4190 3833" [id=4190, type=ConstantOfShape]; +"4191 3785" [id=4191, type=Slice]; +"4192 3787" [id=4192, type=Gather]; +"4193 3788" [id=4193, type=Shape]; +"4194 3789" [id=4194, type=ConstantOfShape]; +"4195 3741" [id=4195, type=Slice]; +"4196 3743" [id=4196, type=Gather]; +"4197 3744" [id=4197, type=Shape]; +"4198 3745" [id=4198, type=ConstantOfShape]; +"4199 3697" [id=4199, type=Slice]; +"4200 3699" [id=4200, type=Gather]; +"4201 3700" [id=4201, type=Shape]; +"4202 3701" [id=4202, type=ConstantOfShape]; +"4203 3653" [id=4203, type=Slice]; +"4204 3655" [id=4204, type=Gather]; +"4205 3656" [id=4205, type=Shape]; +"4206 3657" [id=4206, type=ConstantOfShape]; +"4207 3609" [id=4207, type=Slice]; +"4208 3611" [id=4208, type=Gather]; +"4209 3612" [id=4209, type=Shape]; +"4210 3613" [id=4210, type=ConstantOfShape]; +"4211 3565" [id=4211, type=Slice]; +"4212 3567" [id=4212, type=Gather]; +"4213 3568" [id=4213, type=Shape]; +"4214 3569" [id=4214, type=ConstantOfShape]; +"4215 3521" [id=4215, type=Slice]; +"4216 3523" [id=4216, type=Gather]; +"4217 3524" [id=4217, type=Shape]; +"4218 3525" [id=4218, type=ConstantOfShape]; +"4219 3477" [id=4219, type=Slice]; +"4220 3479" [id=4220, type=Gather]; +"4221 3480" [id=4221, type=Shape]; +"4222 3481" [id=4222, type=ConstantOfShape]; +"4223 3433" [id=4223, type=Slice]; +"4224 3435" [id=4224, type=Gather]; +"4225 3436" [id=4225, type=Shape]; +"4226 3437" [id=4226, type=ConstantOfShape]; +"4227 3389" [id=4227, type=Slice]; +"4228 3391" [id=4228, type=Gather]; +"4229 3392" [id=4229, type=Shape]; +"4230 3393" [id=4230, type=ConstantOfShape]; +"4231 3345" [id=4231, type=Slice]; +"4232 3347" [id=4232, type=Gather]; +"4233 3348" [id=4233, type=Shape]; +"4234 3349" [id=4234, type=ConstantOfShape]; +"4235 3301" [id=4235, type=Slice]; +"4236 3303" [id=4236, type=Gather]; +"4237 3304" [id=4237, type=Shape]; +"4238 3305" [id=4238, type=ConstantOfShape]; +"4239 3257" [id=4239, type=Slice]; +"4240 3259" [id=4240, type=Gather]; +"4241 3260" [id=4241, type=Shape]; +"4242 3261" [id=4242, type=ConstantOfShape]; +"4243 3213" [id=4243, type=Slice]; +"4244 3215" [id=4244, type=Gather]; +"4245 3216" [id=4245, type=Shape]; +"4246 3217" [id=4246, type=ConstantOfShape]; +"4247 3169" [id=4247, type=Slice]; +"4248 3171" [id=4248, type=Gather]; +"4249 3172" [id=4249, type=Shape]; +"4250 3173" [id=4250, type=ConstantOfShape]; +"4251 3125" [id=4251, type=Slice]; +"4252 3127" [id=4252, type=Gather]; +"4253 3128" [id=4253, type=Shape]; +"4254 3129" [id=4254, type=ConstantOfShape]; +"4255 3081" [id=4255, type=Slice]; +"4256 3083" [id=4256, type=Gather]; +"4257 3084" [id=4257, type=Shape]; +"4258 3085" [id=4258, type=ConstantOfShape]; +"4259 3037" [id=4259, type=Slice]; +"4260 3039" [id=4260, type=Gather]; +"4261 3040" [id=4261, type=Shape]; +"4262 3041" [id=4262, type=ConstantOfShape]; +"4263 6519" [id=4263, type=Concat]; +"4264 6532" [id=4264, type=Gather]; +"4265 6820" [id=4265, type=Concat]; +"4266 6847" [id=4266, type=Add]; +"4267 6835" [id=4267, type=Shape]; +"4268 6836" [id=4268, type=Gather]; +"4269 6840" [id=4269, type=Unsqueeze]; +"4270 6832" [id=4270, type=Shape]; +"4271 6833" [id=4271, type=Gather]; +"4272 6839" [id=4272, type=Unsqueeze]; +"4273 6838" [id=4273, type=Unsqueeze]; +"4274 6841" [id=4274, type=Concat]; +"4275 6842" [id=4275, type=Reshape]; +"4276 6848" [id=4276, type=Gather]; +"4277 6849" [id=4277, type=Unsqueeze]; +"4278 6533" [id=4278, type=Cast]; +"4279 6534" [id=4279, type=Gather]; +"4280 nncf_model_input_0" [id=4280, type=nncf_model_input]; +"4281 nncf_model_output_0" [id=4281, type=nncf_model_output]; +"4282 nncf_model_output_1" [id=4282, type=nncf_model_output]; +"4283 nncf_model_output_2" [id=4283, type=nncf_model_output]; +"4284 nncf_model_output_3" [id=4284, type=nncf_model_output]; "0 2396" -> "499 2397" [label="[1]", style=dashed]; "1 2395" -> "499 2397" [label="[1]", style=dashed]; "2 QuantizeLinear_image_1" -> "3 DequantizeLinear_image_1" [label="[3, -1, -1]", style=dashed]; @@ -4754,8 +4792,8 @@ strict digraph { "481 390" -> "484 QuantizeLinear_391_1" [label="[1, 256, -1, -1]", style=solid]; "481 390" -> "784 536" [label="[1, 256, -1, -1]", style=solid]; "481 390" -> "787 533" [label="[1, 256, -1, -1]", style=solid]; -"481 390" -> "1905 2620" [label="[1, 256, -1, -1]", style=solid]; -"481 390" -> "3834 6664" [label="[1, 256, -1, -1]", style=solid]; +"481 390" -> "1929 2620" [label="[1, 256, -1, -1]", style=solid]; +"481 390" -> "3872 6664" [label="[1, 256, -1, -1]", style=solid]; "482 QuantizeLinear_391_2" -> "483 DequantizeLinear_391_2" [label="[1, 256, -1, -1]", style=dashed]; "483 DequantizeLinear_391_2" -> "722 506" [label="[1, 256, -1, -1]", style=solid]; "484 QuantizeLinear_391_1" -> "485 DequantizeLinear_391_1" [label="[1, 256, -1, -1]", style=dashed]; @@ -5372,8 +5410,8 @@ strict digraph { "979 422" -> "980 QuantizeLinear_423_1" [label="[-1, 256, -1, -1]", style=solid]; "979 422" -> "1044 530" [label="[-1, 256, -1, -1]", style=solid]; "979 422" -> "1047 527" [label="[-1, 256, -1, -1]", style=solid]; -"979 422" -> "1871 2603" [label="[-1, 256, -1, -1]", style=solid]; -"979 422" -> "3800 6647" [label="[-1, 256, -1, -1]", style=solid]; +"979 422" -> "1895 2603" [label="[-1, 256, -1, -1]", style=solid]; +"979 422" -> "3838 6647" [label="[-1, 256, -1, -1]", style=solid]; "980 QuantizeLinear_423_1" -> "981 DequantizeLinear_423_1" [label="[-1, 256, -1, -1]", style=dashed]; "981 DequantizeLinear_423_1" -> "982 502" [label="[-1, 256, -1, -1]", style=solid]; "982 502" -> "983 503" [label="[-1, 256, -1, -1]", style=solid]; @@ -5689,8 +5727,8 @@ strict digraph { "1239 454" -> "1240 QuantizeLinear_455_1" [label="[-1, 256, -1, -1]", style=solid]; "1239 454" -> "1304 524" [label="[-1, 256, -1, -1]", style=solid]; "1239 454" -> "1307 521" [label="[-1, 256, -1, -1]", style=solid]; -"1239 454" -> "1837 2586" [label="[-1, 256, -1, -1]", style=solid]; -"1239 454" -> "3766 6630" [label="[-1, 256, -1, -1]", style=solid]; +"1239 454" -> "1861 2586" [label="[-1, 256, -1, -1]", style=solid]; +"1239 454" -> "3804 6630" [label="[-1, 256, -1, -1]", style=solid]; "1240 QuantizeLinear_455_1" -> "1241 DequantizeLinear_455_1" [label="[-1, 256, -1, -1]", style=dashed]; "1241 DequantizeLinear_455_1" -> "1242 498" [label="[-1, 256, -1, -1]", style=solid]; "1242 498" -> "1243 499" [label="[-1, 256, -1, -1]", style=solid]; @@ -6002,8 +6040,8 @@ strict digraph { "1499 486" -> "1500 QuantizeLinear_487_1" [label="[-1, 256, -1, -1]", style=solid]; "1499 486" -> "1564 518" [label="[-1, 256, -1, -1]", style=solid]; "1499 486" -> "1567 515" [label="[-1, 256, -1, -1]", style=solid]; -"1499 486" -> "1789 2569" [label="[-1, 256, -1, -1]", style=solid]; -"1499 486" -> "3718 6613" [label="[-1, 256, -1, -1]", style=solid]; +"1499 486" -> "1813 2569" [label="[-1, 256, -1, -1]", style=solid]; +"1499 486" -> "3756 6613" [label="[-1, 256, -1, -1]", style=solid]; "1500 QuantizeLinear_487_1" -> "1501 DequantizeLinear_487_1" [label="[-1, 256, -1, -1]", style=dashed]; "1501 DequantizeLinear_487_1" -> "1502 490" [label="[-1, 256, -1, -1]", style=solid]; "1502 490" -> "1503 491" [label="[-1, 256, -1, -1]", style=solid]; @@ -6303,3247 +6341,3285 @@ strict digraph { "1746 1171" -> "1747 1172" [label="[-1]", style=dashed]; "1747 1172" -> "1748 2479" [label="[-1, 4]", style=solid]; "1748 2479" -> "1749 2490" [label="[-1, 4]", style=solid]; -"1749 2490" -> "1750 2532" [label="[]", style=solid]; -"1749 2490" -> "1752 2525" [label="[]", style=solid]; -"1749 2490" -> "1756 2515" [label="[]", style=solid]; -"1749 2490" -> "1758 2508" [label="[]", style=solid]; -"1749 2490" -> "1779 2495" [label="[]", style=solid]; -"1749 2490" -> "1783 2503" [label="[]", style=solid]; -"1749 2490" -> "1971 2775" [label="[]", style=solid]; -"1750 2532" -> "1751 2534" [label="[]", style=solid]; -"1751 2534" -> "1754 2535" [label="[]", style=solid]; -"1752 2525" -> "1753 2527" [label="[]", style=solid]; -"1753 2527" -> "1754 2535" [label="[]", style=solid]; -"1754 2535" -> "1755 2537" [label="[]", style=solid]; -"1755 2537" -> "1762 2538" [label="[]", style=solid]; -"1756 2515" -> "1757 2517" [label="[]", style=solid]; -"1757 2517" -> "1760 2518" [label="[]", style=solid]; -"1758 2508" -> "1759 2510" [label="[]", style=solid]; -"1759 2510" -> "1760 2518" [label="[]", style=solid]; -"1760 2518" -> "1761 2520" [label="[]", style=solid]; -"1761 2520" -> "1762 2538" [label="[]", style=solid]; -"1762 2538" -> "1763 2539" [label="[]", style=solid]; -"1763 2539" -> "1764 2542" [label="[]", style=solid]; -"1764 2542" -> "1765 2543" [label="[]", style=solid]; -"1765 2543" -> "1766 2544" [label="[]", style=solid]; -"1766 2544" -> "1767 2546" [label="[]", style=solid]; -"1767 2546" -> "1768 2548" [label="[]", style=solid]; -"1768 2548" -> "1769 2549" [label="[]", style=solid]; -"1769 2549" -> "1770 2550" [label="[]", style=solid]; -"1770 2550" -> "1771 2551" [label="[]", style=solid]; -"1771 2551" -> "1772 2553" [label="[]", style=dashed]; -"1772 2553" -> "1773 2555" [label="[]", style=dashed]; -"1772 2553" -> "1800 2641" [label="[]", style=dashed]; -"1772 2553" -> "1820 2623" [label="[]", style=dashed]; -"1772 2553" -> "1826 2572" [label="[]", style=dashed]; -"1772 2553" -> "1848 2669" [label="[]", style=dashed]; -"1772 2553" -> "1860 2589" [label="[]", style=dashed]; -"1772 2553" -> "1882 2697" [label="[]", style=dashed]; -"1772 2553" -> "1894 2606" [label="[]", style=dashed]; -"1772 2553" -> "1916 2725" [label="[]", style=dashed]; -"1773 2555" -> "1774 2557" [label="[]", style=dashed]; -"1774 2557" -> "1775 2558" [label="[]", style=solid]; -"1775 2558" -> "1776 2559" [label="[-1, -1]", style=dashed]; -"1776 2559" -> "1777 2560" [label="[-1, -1]", style=dashed]; -"1777 2560" -> "1778 2561" [label="[-1]", style=dashed]; -"1778 2561" -> "1784 2562" [label="[-1]", style=dashed]; -"1779 2495" -> "1780 2500" [label="[]", style=solid]; -"1780 2500" -> "1781 2501" [label="[]", style=solid]; -"1781 2501" -> "1782 2502" [label="[-1]", style=dashed]; -"1782 2502" -> "1783 2503" [label="[]", style=solid]; -"1783 2503" -> "1784 2562" [label="[]", style=solid]; -"1783 2503" -> "1832 2579" [label="[]", style=solid]; -"1783 2503" -> "1866 2596" [label="[]", style=solid]; -"1783 2503" -> "1900 2613" [label="[]", style=solid]; -"1784 2562" -> "1785 2568" [label="[]", style=solid]; -"1784 2562" -> "1786 2564" [label="[]", style=solid]; -"1785 2568" -> "1789 2569" [label="[]", style=solid]; -"1786 2564" -> "1787 2565" [label="[]", style=solid]; -"1787 2565" -> "1788 2566" [label="[]", style=solid]; -"1788 2566" -> "1789 2569" [label="[]", style=dashed]; -"1789 2569" -> "1790 2570" [label="[-1, 256, 7, 7]", style=solid]; -"1790 2570" -> "1791 2658" [label="[-1, 256, 7, 7]", style=solid]; -"1790 2570" -> "1794 2655" [label="[-1, 256, 7, 7]", style=solid]; -"1790 2570" -> "1797 2652" [label="[-1, 256, 7, 7]", style=solid]; -"1790 2570" -> "1811 2632" [label="[-1, 256, 7, 7]", style=solid]; -"1790 2570" -> "1814 2629" [label="[-1, 256, 7, 7]", style=solid]; -"1790 2570" -> "1817 2626" [label="[-1, 256, 7, 7]", style=solid]; -"1790 2570" -> "1825 2667" [label="[-1, 256, 7, 7]", style=solid]; -"1791 2658" -> "1792 2659" [label="[4]", style=dashed]; -"1792 2659" -> "1793 2663" [label="[]", style=dashed]; -"1793 2663" -> "1808 2664" [label="[1]", style=dashed]; -"1794 2655" -> "1795 2656" [label="[4]", style=dashed]; -"1795 2656" -> "1796 2662" [label="[]", style=dashed]; -"1796 2662" -> "1808 2664" [label="[1]", style=dashed]; -"1797 2652" -> "1798 2653" [label="[4]", style=dashed]; -"1798 2653" -> "1799 2661" [label="[]", style=dashed]; -"1799 2661" -> "1808 2664" [label="[1]", style=dashed]; -"1800 2641" -> "1801 2643" [label="[]", style=dashed]; -"1801 2643" -> "1802 2644" [label="[]", style=solid]; -"1802 2644" -> "1803 2645" [label="[-1, -1]", style=dashed]; -"1803 2645" -> "1804 2647" [label="[-1, -1]", style=dashed]; -"1804 2647" -> "1805 2649" [label="[-1, 1, 1, 1]", style=dashed]; -"1804 2647" -> "1809 2665" [label="[-1, 1, 1, 1]", style=dashed]; -"1805 2649" -> "1806 2650" [label="[4]", style=dashed]; -"1806 2650" -> "1807 2660" [label="[]", style=dashed]; -"1807 2660" -> "1808 2664" [label="[1]", style=dashed]; -"1808 2664" -> "1809 2665" [label="[4]", style=dashed]; -"1809 2665" -> "1810 2666" [label="[-1, -1, -1, -1]", style=dashed]; -"1810 2666" -> "1825 2667" [label="[-1, -1, -1, -1]", style=dashed]; -"1811 2632" -> "1812 2633" [label="[4]", style=dashed]; -"1812 2633" -> "1813 2637" [label="[]", style=dashed]; -"1813 2637" -> "1823 2638" [label="[1]", style=dashed]; -"1814 2629" -> "1815 2630" [label="[4]", style=dashed]; -"1815 2630" -> "1816 2636" [label="[]", style=dashed]; -"1816 2636" -> "1823 2638" [label="[1]", style=dashed]; -"1817 2626" -> "1818 2627" [label="[4]", style=dashed]; -"1818 2627" -> "1819 2635" [label="[]", style=dashed]; -"1819 2635" -> "1823 2638" [label="[1]", style=dashed]; -"1820 2623" -> "1821 2624" [label="[-1]", style=dashed]; -"1821 2624" -> "1822 2634" [label="[]", style=dashed]; -"1822 2634" -> "1823 2638" [label="[1]", style=dashed]; -"1823 2638" -> "1824 2639" [label="[4]", style=dashed]; -"1824 2639" -> "1825 2667" [label="[-1, -1, -1, -1]", style=solid]; -"1825 2667" -> "1859 2695" [label="[-1, -1, -1, -1]", style=solid]; -"1826 2572" -> "1827 2574" [label="[]", style=dashed]; -"1827 2574" -> "1828 2575" [label="[]", style=solid]; -"1828 2575" -> "1829 2576" [label="[-1, -1]", style=dashed]; -"1829 2576" -> "1830 2577" [label="[-1, -1]", style=dashed]; -"1830 2577" -> "1831 2578" [label="[-1]", style=dashed]; -"1831 2578" -> "1832 2579" [label="[-1]", style=dashed]; -"1832 2579" -> "1833 2585" [label="[]", style=solid]; -"1832 2579" -> "1834 2581" [label="[]", style=solid]; -"1833 2585" -> "1837 2586" [label="[]", style=solid]; -"1834 2581" -> "1835 2582" [label="[]", style=solid]; -"1835 2582" -> "1836 2583" [label="[]", style=solid]; -"1836 2583" -> "1837 2586" [label="[]", style=dashed]; -"1837 2586" -> "1838 2587" [label="[-1, 256, 7, 7]", style=solid]; -"1838 2587" -> "1839 2686" [label="[-1, 256, 7, 7]", style=solid]; -"1838 2587" -> "1842 2683" [label="[-1, 256, 7, 7]", style=solid]; -"1838 2587" -> "1845 2680" [label="[-1, 256, 7, 7]", style=solid]; -"1838 2587" -> "1859 2695" [label="[-1, 256, 7, 7]", style=solid]; -"1839 2686" -> "1840 2687" [label="[4]", style=dashed]; -"1840 2687" -> "1841 2691" [label="[]", style=dashed]; -"1841 2691" -> "1856 2692" [label="[1]", style=dashed]; -"1842 2683" -> "1843 2684" [label="[4]", style=dashed]; -"1843 2684" -> "1844 2690" [label="[]", style=dashed]; -"1844 2690" -> "1856 2692" [label="[1]", style=dashed]; -"1845 2680" -> "1846 2681" [label="[4]", style=dashed]; -"1846 2681" -> "1847 2689" [label="[]", style=dashed]; -"1847 2689" -> "1856 2692" [label="[1]", style=dashed]; -"1848 2669" -> "1849 2671" [label="[]", style=dashed]; -"1849 2671" -> "1850 2672" [label="[]", style=solid]; -"1850 2672" -> "1851 2673" [label="[-1, -1]", style=dashed]; -"1851 2673" -> "1852 2675" [label="[-1, -1]", style=dashed]; -"1852 2675" -> "1853 2677" [label="[-1, 1, 1, 1]", style=dashed]; -"1852 2675" -> "1857 2693" [label="[-1, 1, 1, 1]", style=dashed]; -"1853 2677" -> "1854 2678" [label="[4]", style=dashed]; -"1854 2678" -> "1855 2688" [label="[]", style=dashed]; -"1855 2688" -> "1856 2692" [label="[1]", style=dashed]; -"1856 2692" -> "1857 2693" [label="[4]", style=dashed]; -"1857 2693" -> "1858 2694" [label="[-1, -1, -1, -1]", style=dashed]; -"1858 2694" -> "1859 2695" [label="[-1, -1, -1, -1]", style=dashed]; -"1859 2695" -> "1893 2723" [label="[-1, -1, -1, -1]", style=solid]; -"1860 2589" -> "1861 2591" [label="[]", style=dashed]; -"1861 2591" -> "1862 2592" [label="[]", style=solid]; -"1862 2592" -> "1863 2593" [label="[-1, -1]", style=dashed]; -"1863 2593" -> "1864 2594" [label="[-1, -1]", style=dashed]; -"1864 2594" -> "1865 2595" [label="[-1]", style=dashed]; -"1865 2595" -> "1866 2596" [label="[-1]", style=dashed]; -"1866 2596" -> "1867 2602" [label="[]", style=solid]; -"1866 2596" -> "1868 2598" [label="[]", style=solid]; -"1867 2602" -> "1871 2603" [label="[]", style=solid]; -"1868 2598" -> "1869 2599" [label="[]", style=solid]; -"1869 2599" -> "1870 2600" [label="[]", style=solid]; -"1870 2600" -> "1871 2603" [label="[]", style=dashed]; -"1871 2603" -> "1872 2604" [label="[-1, 256, 7, 7]", style=solid]; -"1872 2604" -> "1873 2714" [label="[-1, 256, 7, 7]", style=solid]; -"1872 2604" -> "1876 2711" [label="[-1, 256, 7, 7]", style=solid]; -"1872 2604" -> "1879 2708" [label="[-1, 256, 7, 7]", style=solid]; -"1872 2604" -> "1893 2723" [label="[-1, 256, 7, 7]", style=solid]; -"1873 2714" -> "1874 2715" [label="[4]", style=dashed]; -"1874 2715" -> "1875 2719" [label="[]", style=dashed]; -"1875 2719" -> "1890 2720" [label="[1]", style=dashed]; -"1876 2711" -> "1877 2712" [label="[4]", style=dashed]; -"1877 2712" -> "1878 2718" [label="[]", style=dashed]; -"1878 2718" -> "1890 2720" [label="[1]", style=dashed]; -"1879 2708" -> "1880 2709" [label="[4]", style=dashed]; -"1880 2709" -> "1881 2717" [label="[]", style=dashed]; -"1881 2717" -> "1890 2720" [label="[1]", style=dashed]; -"1882 2697" -> "1883 2699" [label="[]", style=dashed]; -"1883 2699" -> "1884 2700" [label="[]", style=solid]; -"1884 2700" -> "1885 2701" [label="[-1, -1]", style=dashed]; -"1885 2701" -> "1886 2703" [label="[-1, -1]", style=dashed]; -"1886 2703" -> "1887 2705" [label="[-1, 1, 1, 1]", style=dashed]; -"1886 2703" -> "1891 2721" [label="[-1, 1, 1, 1]", style=dashed]; -"1887 2705" -> "1888 2706" [label="[4]", style=dashed]; -"1888 2706" -> "1889 2716" [label="[]", style=dashed]; -"1889 2716" -> "1890 2720" [label="[1]", style=dashed]; -"1890 2720" -> "1891 2721" [label="[4]", style=dashed]; -"1891 2721" -> "1892 2722" [label="[-1, -1, -1, -1]", style=dashed]; -"1892 2722" -> "1893 2723" [label="[-1, -1, -1, -1]", style=dashed]; -"1893 2723" -> "1927 2751" [label="[-1, -1, -1, -1]", style=solid]; -"1894 2606" -> "1895 2608" [label="[]", style=dashed]; -"1895 2608" -> "1896 2609" [label="[]", style=solid]; -"1896 2609" -> "1897 2610" [label="[-1, -1]", style=dashed]; -"1897 2610" -> "1898 2611" [label="[-1, -1]", style=dashed]; -"1898 2611" -> "1899 2612" [label="[-1]", style=dashed]; -"1899 2612" -> "1900 2613" [label="[-1]", style=dashed]; -"1900 2613" -> "1901 2619" [label="[]", style=solid]; -"1900 2613" -> "1902 2615" [label="[]", style=solid]; -"1901 2619" -> "1905 2620" [label="[]", style=solid]; -"1902 2615" -> "1903 2616" [label="[]", style=solid]; -"1903 2616" -> "1904 2617" [label="[]", style=solid]; -"1904 2617" -> "1905 2620" [label="[]", style=dashed]; -"1905 2620" -> "1906 2621" [label="[-1, 256, 7, 7]", style=solid]; -"1906 2621" -> "1907 2742" [label="[-1, 256, 7, 7]", style=solid]; -"1906 2621" -> "1910 2739" [label="[-1, 256, 7, 7]", style=solid]; -"1906 2621" -> "1913 2736" [label="[-1, 256, 7, 7]", style=solid]; -"1906 2621" -> "1927 2751" [label="[-1, 256, 7, 7]", style=solid]; -"1907 2742" -> "1908 2743" [label="[4]", style=dashed]; -"1908 2743" -> "1909 2747" [label="[]", style=dashed]; -"1909 2747" -> "1924 2748" [label="[1]", style=dashed]; -"1910 2739" -> "1911 2740" [label="[4]", style=dashed]; -"1911 2740" -> "1912 2746" [label="[]", style=dashed]; -"1912 2746" -> "1924 2748" [label="[1]", style=dashed]; -"1913 2736" -> "1914 2737" [label="[4]", style=dashed]; -"1914 2737" -> "1915 2745" [label="[]", style=dashed]; -"1915 2745" -> "1924 2748" [label="[1]", style=dashed]; -"1916 2725" -> "1917 2727" [label="[]", style=dashed]; -"1917 2727" -> "1918 2728" [label="[]", style=solid]; -"1918 2728" -> "1919 2729" [label="[-1, -1]", style=dashed]; -"1919 2729" -> "1920 2731" [label="[-1, -1]", style=dashed]; -"1920 2731" -> "1921 2733" [label="[-1, 1, 1, 1]", style=dashed]; -"1920 2731" -> "1925 2749" [label="[-1, 1, 1, 1]", style=dashed]; -"1921 2733" -> "1922 2734" [label="[4]", style=dashed]; -"1922 2734" -> "1923 2744" [label="[]", style=dashed]; -"1923 2744" -> "1924 2748" [label="[1]", style=dashed]; -"1924 2748" -> "1925 2749" [label="[4]", style=dashed]; -"1925 2749" -> "1926 2750" [label="[-1, -1, -1, -1]", style=dashed]; -"1926 2750" -> "1927 2751" [label="[-1, -1, -1, -1]", style=dashed]; -"1927 2751" -> "1929 2753" [label="[-1, -1, -1, -1]", style=solid]; -"1927 2751" -> "1933 2759" [label="[-1, -1, -1, -1]", style=solid]; -"1928 2757" -> "1932 2758" [label="[1]", style=dashed]; -"1929 2753" -> "1930 2754" [label="[4]", style=dashed]; -"1930 2754" -> "1931 2756" [label="[]", style=dashed]; -"1931 2756" -> "1932 2758" [label="[1]", style=dashed]; -"1932 2758" -> "1933 2759" [label="[2]", style=dashed]; -"1933 2759" -> "1934 2762_MatMul" [label="[]", style=solid]; -"1934 2762_MatMul" -> "1935 2762_Add" [label="[]", style=solid]; -"1935 2762_Add" -> "1936 2763" [label="[]", style=solid]; -"1936 2763" -> "1937 2766_MatMul" [label="[]", style=solid]; -"1937 2766_MatMul" -> "1938 2766_Add" [label="[]", style=solid]; -"1938 2766_Add" -> "1939 2767" [label="[]", style=solid]; -"1939 2767" -> "1940 2770_MatMul" [label="[]", style=solid]; -"1939 2767" -> "1967 2773_MatMul" [label="[]", style=solid]; -"1940 2770_MatMul" -> "1941 2770_Add" [label="[]", style=solid]; -"1941 2770_Add" -> "1942 2774" [label="[]", style=solid]; -"1942 2774" -> "1943 2950" [label="[]", style=solid]; -"1942 2774" -> "1948 2955" [label="[]", style=solid]; -"1943 2950" -> "1944 2951" [label="[-1]", style=dashed]; -"1944 2951" -> "1945 2992" [label="[]", style=dashed]; -"1944 2951" -> "1963 2984" [label="[]", style=dashed]; -"1945 2992" -> "1947 2993" [label="[1]", style=dashed]; -"1946 2991" -> "1947 2993" [label="[1]", style=dashed]; -"1947 2993" -> "1949 2994" [label="[2]", style=dashed]; -"1948 2955" -> "1949 2994" [label="[-1]", style=solid]; -"1949 2994" -> "1950 2996" [label="[]", style=solid]; -"1949 2994" -> "1959 6486" [label="[]", style=solid]; -"1949 2994" -> "2069 6442" [label="[]", style=solid]; -"1949 2994" -> "2087 6398" [label="[]", style=solid]; -"1949 2994" -> "2105 6354" [label="[]", style=solid]; -"1949 2994" -> "2123 6310" [label="[]", style=solid]; -"1949 2994" -> "2141 6266" [label="[]", style=solid]; -"1949 2994" -> "2159 6222" [label="[]", style=solid]; -"1949 2994" -> "2177 6178" [label="[]", style=solid]; -"1949 2994" -> "2195 6134" [label="[]", style=solid]; -"1949 2994" -> "2213 6090" [label="[]", style=solid]; -"1949 2994" -> "2231 6046" [label="[]", style=solid]; -"1949 2994" -> "2249 6002" [label="[]", style=solid]; -"1949 2994" -> "2267 5958" [label="[]", style=solid]; -"1949 2994" -> "2285 5914" [label="[]", style=solid]; -"1949 2994" -> "2303 5870" [label="[]", style=solid]; -"1949 2994" -> "2321 5826" [label="[]", style=solid]; -"1949 2994" -> "2339 5782" [label="[]", style=solid]; -"1949 2994" -> "2357 5738" [label="[]", style=solid]; -"1949 2994" -> "2375 5694" [label="[]", style=solid]; -"1949 2994" -> "2393 5650" [label="[]", style=solid]; -"1949 2994" -> "2411 5606" [label="[]", style=solid]; -"1949 2994" -> "2429 5562" [label="[]", style=solid]; -"1949 2994" -> "2447 5518" [label="[]", style=solid]; -"1949 2994" -> "2465 5474" [label="[]", style=solid]; -"1949 2994" -> "2483 5430" [label="[]", style=solid]; -"1949 2994" -> "2501 5386" [label="[]", style=solid]; -"1949 2994" -> "2519 5342" [label="[]", style=solid]; -"1949 2994" -> "2537 5298" [label="[]", style=solid]; -"1949 2994" -> "2555 5254" [label="[]", style=solid]; -"1949 2994" -> "2573 5210" [label="[]", style=solid]; -"1949 2994" -> "2591 5166" [label="[]", style=solid]; -"1949 2994" -> "2609 5122" [label="[]", style=solid]; -"1949 2994" -> "2627 5078" [label="[]", style=solid]; -"1949 2994" -> "2645 5034" [label="[]", style=solid]; -"1949 2994" -> "2663 4990" [label="[]", style=solid]; -"1949 2994" -> "2681 4946" [label="[]", style=solid]; -"1949 2994" -> "2699 4902" [label="[]", style=solid]; -"1949 2994" -> "2717 4858" [label="[]", style=solid]; -"1949 2994" -> "2735 4814" [label="[]", style=solid]; -"1949 2994" -> "2753 4770" [label="[]", style=solid]; -"1949 2994" -> "2771 4726" [label="[]", style=solid]; -"1949 2994" -> "2789 4682" [label="[]", style=solid]; -"1949 2994" -> "2807 4638" [label="[]", style=solid]; -"1949 2994" -> "2825 4594" [label="[]", style=solid]; -"1949 2994" -> "2843 4550" [label="[]", style=solid]; -"1949 2994" -> "2861 4506" [label="[]", style=solid]; -"1949 2994" -> "2879 4462" [label="[]", style=solid]; -"1949 2994" -> "2897 4418" [label="[]", style=solid]; -"1949 2994" -> "2915 4374" [label="[]", style=solid]; -"1949 2994" -> "2933 4330" [label="[]", style=solid]; -"1949 2994" -> "2951 4286" [label="[]", style=solid]; -"1949 2994" -> "2969 4242" [label="[]", style=solid]; -"1949 2994" -> "2987 4198" [label="[]", style=solid]; -"1949 2994" -> "3005 4154" [label="[]", style=solid]; -"1949 2994" -> "3023 4110" [label="[]", style=solid]; -"1949 2994" -> "3041 4066" [label="[]", style=solid]; -"1949 2994" -> "3059 4022" [label="[]", style=solid]; -"1949 2994" -> "3077 3978" [label="[]", style=solid]; -"1949 2994" -> "3095 3934" [label="[]", style=solid]; -"1949 2994" -> "3113 3890" [label="[]", style=solid]; -"1949 2994" -> "3131 3846" [label="[]", style=solid]; -"1949 2994" -> "3149 3802" [label="[]", style=solid]; -"1949 2994" -> "3167 3758" [label="[]", style=solid]; -"1949 2994" -> "3185 3714" [label="[]", style=solid]; -"1949 2994" -> "3203 3670" [label="[]", style=solid]; -"1949 2994" -> "3221 3626" [label="[]", style=solid]; -"1949 2994" -> "3239 3582" [label="[]", style=solid]; -"1949 2994" -> "3257 3538" [label="[]", style=solid]; -"1949 2994" -> "3275 3494" [label="[]", style=solid]; -"1949 2994" -> "3293 3450" [label="[]", style=solid]; -"1949 2994" -> "3311 3406" [label="[]", style=solid]; -"1949 2994" -> "3329 3362" [label="[]", style=solid]; -"1949 2994" -> "3347 3318" [label="[]", style=solid]; -"1949 2994" -> "3365 3274" [label="[]", style=solid]; -"1949 2994" -> "3383 3230" [label="[]", style=solid]; -"1949 2994" -> "3401 3186" [label="[]", style=solid]; -"1949 2994" -> "3419 3142" [label="[]", style=solid]; -"1949 2994" -> "3437 3098" [label="[]", style=solid]; -"1949 2994" -> "3455 3054" [label="[]", style=solid]; -"1949 2994" -> "3473 3010" [label="[]", style=solid]; -"1950 2996" -> "1951 2997" [label="[]", style=dashed]; -"1951 2997" -> "1952 6478" [label="[]", style=dashed]; -"1951 2997" -> "2062 6434" [label="[]", style=dashed]; -"1951 2997" -> "2080 6390" [label="[]", style=dashed]; -"1951 2997" -> "2098 6346" [label="[]", style=dashed]; -"1951 2997" -> "2116 6302" [label="[]", style=dashed]; -"1951 2997" -> "2134 6258" [label="[]", style=dashed]; -"1951 2997" -> "2152 6214" [label="[]", style=dashed]; -"1951 2997" -> "2170 6170" [label="[]", style=dashed]; -"1951 2997" -> "2188 6126" [label="[]", style=dashed]; -"1951 2997" -> "2206 6082" [label="[]", style=dashed]; -"1951 2997" -> "2224 6038" [label="[]", style=dashed]; -"1951 2997" -> "2242 5994" [label="[]", style=dashed]; -"1951 2997" -> "2260 5950" [label="[]", style=dashed]; -"1951 2997" -> "2278 5906" [label="[]", style=dashed]; -"1951 2997" -> "2296 5862" [label="[]", style=dashed]; -"1951 2997" -> "2314 5818" [label="[]", style=dashed]; -"1951 2997" -> "2332 5774" [label="[]", style=dashed]; -"1951 2997" -> "2350 5730" [label="[]", style=dashed]; -"1951 2997" -> "2368 5686" [label="[]", style=dashed]; -"1951 2997" -> "2386 5642" [label="[]", style=dashed]; -"1951 2997" -> "2404 5598" [label="[]", style=dashed]; -"1951 2997" -> "2422 5554" [label="[]", style=dashed]; -"1951 2997" -> "2440 5510" [label="[]", style=dashed]; -"1951 2997" -> "2458 5466" [label="[]", style=dashed]; -"1951 2997" -> "2476 5422" [label="[]", style=dashed]; -"1951 2997" -> "2494 5378" [label="[]", style=dashed]; -"1951 2997" -> "2512 5334" [label="[]", style=dashed]; -"1951 2997" -> "2530 5290" [label="[]", style=dashed]; -"1951 2997" -> "2548 5246" [label="[]", style=dashed]; -"1951 2997" -> "2566 5202" [label="[]", style=dashed]; -"1951 2997" -> "2584 5158" [label="[]", style=dashed]; -"1951 2997" -> "2602 5114" [label="[]", style=dashed]; -"1951 2997" -> "2620 5070" [label="[]", style=dashed]; -"1951 2997" -> "2638 5026" [label="[]", style=dashed]; -"1951 2997" -> "2656 4982" [label="[]", style=dashed]; -"1951 2997" -> "2674 4938" [label="[]", style=dashed]; -"1951 2997" -> "2692 4894" [label="[]", style=dashed]; -"1951 2997" -> "2710 4850" [label="[]", style=dashed]; -"1951 2997" -> "2728 4806" [label="[]", style=dashed]; -"1951 2997" -> "2746 4762" [label="[]", style=dashed]; -"1951 2997" -> "2764 4718" [label="[]", style=dashed]; -"1951 2997" -> "2782 4674" [label="[]", style=dashed]; -"1951 2997" -> "2800 4630" [label="[]", style=dashed]; -"1951 2997" -> "2818 4586" [label="[]", style=dashed]; -"1951 2997" -> "2836 4542" [label="[]", style=dashed]; -"1951 2997" -> "2854 4498" [label="[]", style=dashed]; -"1951 2997" -> "2872 4454" [label="[]", style=dashed]; -"1951 2997" -> "2890 4410" [label="[]", style=dashed]; -"1951 2997" -> "2908 4366" [label="[]", style=dashed]; -"1951 2997" -> "2926 4322" [label="[]", style=dashed]; -"1951 2997" -> "2944 4278" [label="[]", style=dashed]; -"1951 2997" -> "2962 4234" [label="[]", style=dashed]; -"1951 2997" -> "2980 4190" [label="[]", style=dashed]; -"1951 2997" -> "2998 4146" [label="[]", style=dashed]; -"1951 2997" -> "3016 4102" [label="[]", style=dashed]; -"1951 2997" -> "3034 4058" [label="[]", style=dashed]; -"1951 2997" -> "3052 4014" [label="[]", style=dashed]; -"1951 2997" -> "3070 3970" [label="[]", style=dashed]; -"1951 2997" -> "3088 3926" [label="[]", style=dashed]; -"1951 2997" -> "3106 3882" [label="[]", style=dashed]; -"1951 2997" -> "3124 3838" [label="[]", style=dashed]; -"1951 2997" -> "3142 3794" [label="[]", style=dashed]; -"1951 2997" -> "3160 3750" [label="[]", style=dashed]; -"1951 2997" -> "3178 3706" [label="[]", style=dashed]; -"1951 2997" -> "3196 3662" [label="[]", style=dashed]; -"1951 2997" -> "3214 3618" [label="[]", style=dashed]; -"1951 2997" -> "3232 3574" [label="[]", style=dashed]; -"1951 2997" -> "3250 3530" [label="[]", style=dashed]; -"1951 2997" -> "3268 3486" [label="[]", style=dashed]; -"1951 2997" -> "3286 3442" [label="[]", style=dashed]; -"1951 2997" -> "3304 3398" [label="[]", style=dashed]; -"1951 2997" -> "3322 3354" [label="[]", style=dashed]; -"1951 2997" -> "3340 3310" [label="[]", style=dashed]; -"1951 2997" -> "3358 3266" [label="[]", style=dashed]; -"1951 2997" -> "3376 3222" [label="[]", style=dashed]; -"1951 2997" -> "3394 3178" [label="[]", style=dashed]; -"1951 2997" -> "3412 3134" [label="[]", style=dashed]; -"1951 2997" -> "3430 3090" [label="[]", style=dashed]; -"1951 2997" -> "3448 3046" [label="[]", style=dashed]; -"1951 2997" -> "3466 3002" [label="[]", style=dashed]; -"1952 6478" -> "1953 6480" [label="[]", style=dashed]; -"1953 6480" -> "1954 6481" [label="[]", style=dashed]; -"1954 6481" -> "1955 6482" [label="[]", style=solid]; -"1955 6482" -> "1956 6483" [label="[-1, -1]", style=dashed]; -"1956 6483" -> "1957 6484" [label="[-1, -1]", style=dashed]; -"1957 6484" -> "1958 6487" [label="[-1]", style=dashed]; -"1957 6484" -> "2056 6495" [label="[-1]", style=dashed]; -"1958 6487" -> "1960 6488" [label="[-1]", style=dashed]; -"1959 6486" -> "1960 6488" [label="[]", style=solid]; -"1960 6488" -> "1961 6497" [label="[]", style=solid]; -"1960 6488" -> "2061 6508" [label="[]", style=solid]; -"1961 6497" -> "1962 6498" [label="[]", style=solid]; -"1962 6498" -> "2058 6501" [label="[]", style=solid]; -"1963 2984" -> "1964 2987" [label="[]", style=dashed]; -"1964 2987" -> "1966 2988" [label="[1]", style=dashed]; -"1965 2986" -> "1966 2988" [label="[1]", style=dashed]; -"1966 2988" -> "2054 2989" [label="[2]", style=dashed]; -"1967 2773_MatMul" -> "1968 2773_Add" [label="[]", style=solid]; -"1968 2773_Add" -> "1969 2776" [label="[]", style=solid]; -"1969 2776" -> "1970 2947" [label="[]", style=solid]; -"1969 2776" -> "1981 2872" [label="[]", style=solid]; -"1969 2776" -> "1996 2848" [label="[]", style=solid]; -"1969 2776" -> "2012 2860" [label="[]", style=solid]; -"1969 2776" -> "2027 2836" [label="[]", style=solid]; -"1970 2947" -> "2042 2948" [label="[-1]", style=dashed]; -"1971 2775" -> "1972 2777" [label="[]", style=solid]; -"1972 2777" -> "1973 2806" [label="[]", style=solid]; -"1972 2777" -> "1975 2799" [label="[]", style=solid]; -"1972 2777" -> "1989 2826" [label="[]", style=solid]; -"1972 2777" -> "2004 2789" [label="[]", style=solid]; -"1972 2777" -> "2006 2782" [label="[]", style=solid]; -"1972 2777" -> "2020 2816" [label="[]", style=solid]; -"1973 2806" -> "1974 2808" [label="[]", style=solid]; -"1974 2808" -> "1977 2809" [label="[]", style=solid]; -"1975 2799" -> "1976 2801" [label="[]", style=solid]; -"1976 2801" -> "1977 2809" [label="[]", style=solid]; -"1977 2809" -> "1978 2811" [label="[]", style=solid]; -"1978 2811" -> "1979 2923" [label="[]", style=solid]; -"1978 2811" -> "1988 2830" [label="[]", style=solid]; -"1978 2811" -> "1994 2900" [label="[]", style=solid]; -"1979 2923" -> "1980 2924" [label="[]", style=solid]; -"1980 2924" -> "1986 2925" [label="[]", style=solid]; -"1981 2872" -> "1982 2877" [label="[]", style=solid]; -"1982 2877" -> "1983 2879" [label="[]", style=solid]; -"1983 2879" -> "1984 2881" [label="[]", style=solid]; -"1984 2881" -> "1985 2918" [label="[]", style=solid]; -"1985 2918" -> "1986 2925" [label="[]", style=solid]; -"1986 2925" -> "1987 2938" [label="[]", style=solid]; -"1986 2925" -> "2035 2930" [label="[]", style=solid]; -"1987 2938" -> "2001 2939" [label="[]", style=solid]; -"1988 2830" -> "1991 2831" [label="[]", style=solid]; -"1989 2826" -> "1990 2828" [label="[]", style=solid]; -"1990 2828" -> "1991 2831" [label="[]", style=solid]; -"1991 2831" -> "1992 2907" [label="[]", style=solid]; -"1992 2907" -> "1993 2908" [label="[]", style=solid]; -"1993 2908" -> "2000 2909" [label="[]", style=solid]; -"1994 2900" -> "1995 2901" [label="[]", style=solid]; -"1995 2901" -> "1999 2902" [label="[]", style=solid]; -"1996 2848" -> "1997 2853" [label="[]", style=solid]; -"1997 2853" -> "1998 2855" [label="[]", style=solid]; -"1998 2855" -> "1999 2902" [label="[]", style=solid]; -"1999 2902" -> "2000 2909" [label="[]", style=solid]; -"2000 2909" -> "2001 2939" [label="[]", style=solid]; -"2000 2909" -> "2036 2931" [label="[]", style=solid]; -"2001 2939" -> "2002 2941" [label="[]", style=solid]; -"2002 2941" -> "2003 2945" [label="[]", style=solid]; -"2003 2945" -> "2041 2946" [label="[]", style=solid]; -"2004 2789" -> "2005 2791" [label="[]", style=solid]; -"2005 2791" -> "2008 2792" [label="[]", style=solid]; -"2006 2782" -> "2007 2784" [label="[]", style=solid]; -"2007 2784" -> "2008 2792" [label="[]", style=solid]; -"2008 2792" -> "2009 2794" [label="[]", style=solid]; -"2009 2794" -> "2010 2915" [label="[]", style=solid]; -"2009 2794" -> "2019 2820" [label="[]", style=solid]; -"2009 2794" -> "2025 2886" [label="[]", style=solid]; -"2010 2915" -> "2011 2916" [label="[]", style=solid]; -"2011 2916" -> "2017 2917" [label="[]", style=solid]; -"2012 2860" -> "2013 2865" [label="[]", style=solid]; -"2013 2865" -> "2014 2867" [label="[]", style=solid]; -"2014 2867" -> "2015 2880" [label="[]", style=solid]; -"2015 2880" -> "2016 2910" [label="[]", style=solid]; -"2016 2910" -> "2017 2917" [label="[]", style=solid]; -"2017 2917" -> "2018 2933" [label="[]", style=solid]; -"2017 2917" -> "2038 2927" [label="[]", style=solid]; -"2018 2933" -> "2032 2934" [label="[]", style=solid]; -"2019 2820" -> "2022 2821" [label="[]", style=solid]; -"2020 2816" -> "2021 2818" [label="[]", style=solid]; -"2021 2818" -> "2022 2821" [label="[]", style=solid]; -"2022 2821" -> "2023 2893" [label="[]", style=solid]; -"2023 2893" -> "2024 2894" [label="[]", style=solid]; -"2024 2894" -> "2031 2895" [label="[]", style=solid]; -"2025 2886" -> "2026 2887" [label="[]", style=solid]; -"2026 2887" -> "2030 2888" [label="[]", style=solid]; -"2027 2836" -> "2028 2841" [label="[]", style=solid]; -"2028 2841" -> "2029 2843" [label="[]", style=solid]; -"2029 2843" -> "2030 2888" [label="[]", style=solid]; -"2030 2888" -> "2031 2895" [label="[]", style=solid]; -"2031 2895" -> "2032 2934" [label="[]", style=solid]; -"2031 2895" -> "2039 2928" [label="[]", style=solid]; -"2032 2934" -> "2033 2936" [label="[]", style=solid]; -"2033 2936" -> "2034 2944" [label="[]", style=solid]; -"2034 2944" -> "2041 2946" [label="[]", style=solid]; -"2035 2930" -> "2036 2931" [label="[]", style=solid]; -"2036 2931" -> "2037 2943" [label="[]", style=solid]; -"2037 2943" -> "2041 2946" [label="[]", style=solid]; -"2038 2927" -> "2039 2928" [label="[]", style=solid]; -"2039 2928" -> "2040 2942" [label="[]", style=solid]; -"2040 2942" -> "2041 2946" [label="[]", style=solid]; -"2041 2946" -> "2042 2948" [label="[]", style=solid]; -"2042 2948" -> "2043 2953" [label="[]", style=solid]; -"2043 2953" -> "2044 2971" [label="[-1, 4]", style=solid]; -"2043 2953" -> "2048 2960" [label="[-1, 4]", style=solid]; -"2044 2971" -> "2045 2976" [label="[-1, 4]", style=solid]; -"2045 2976" -> "2046 2977" [label="[-1, 2]", style=solid]; -"2046 2977" -> "2047 2979" [label="[-1, 2]", style=solid]; -"2047 2979" -> "2052 2980" [label="[-1, 2, 1]", style=solid]; -"2048 2960" -> "2049 2965" [label="[-1, 4]", style=solid]; -"2049 2965" -> "2050 2966" [label="[-1, 2]", style=solid]; -"2050 2966" -> "2051 2978" [label="[-1, 2]", style=solid]; -"2051 2978" -> "2052 2980" [label="[-1, 2, 1]", style=solid]; -"2052 2980" -> "2053 2982" [label="[-1, 2, 2]", style=solid]; -"2053 2982" -> "2054 2989" [label="[-1, 4]", style=solid]; -"2054 2989" -> "2055 6493" [label="[]", style=solid]; -"2054 2989" -> "2073 6449" [label="[]", style=solid]; -"2054 2989" -> "2091 6405" [label="[]", style=solid]; -"2054 2989" -> "2109 6361" [label="[]", style=solid]; -"2054 2989" -> "2127 6317" [label="[]", style=solid]; -"2054 2989" -> "2145 6273" [label="[]", style=solid]; -"2054 2989" -> "2163 6229" [label="[]", style=solid]; -"2054 2989" -> "2181 6185" [label="[]", style=solid]; -"2054 2989" -> "2199 6141" [label="[]", style=solid]; -"2054 2989" -> "2217 6097" [label="[]", style=solid]; -"2054 2989" -> "2235 6053" [label="[]", style=solid]; -"2054 2989" -> "2253 6009" [label="[]", style=solid]; -"2054 2989" -> "2271 5965" [label="[]", style=solid]; -"2054 2989" -> "2289 5921" [label="[]", style=solid]; -"2054 2989" -> "2307 5877" [label="[]", style=solid]; -"2054 2989" -> "2325 5833" [label="[]", style=solid]; -"2054 2989" -> "2343 5789" [label="[]", style=solid]; -"2054 2989" -> "2361 5745" [label="[]", style=solid]; -"2054 2989" -> "2379 5701" [label="[]", style=solid]; -"2054 2989" -> "2397 5657" [label="[]", style=solid]; -"2054 2989" -> "2415 5613" [label="[]", style=solid]; -"2054 2989" -> "2433 5569" [label="[]", style=solid]; -"2054 2989" -> "2451 5525" [label="[]", style=solid]; -"2054 2989" -> "2469 5481" [label="[]", style=solid]; -"2054 2989" -> "2487 5437" [label="[]", style=solid]; -"2054 2989" -> "2505 5393" [label="[]", style=solid]; -"2054 2989" -> "2523 5349" [label="[]", style=solid]; -"2054 2989" -> "2541 5305" [label="[]", style=solid]; -"2054 2989" -> "2559 5261" [label="[]", style=solid]; -"2054 2989" -> "2577 5217" [label="[]", style=solid]; -"2054 2989" -> "2595 5173" [label="[]", style=solid]; -"2054 2989" -> "2613 5129" [label="[]", style=solid]; -"2054 2989" -> "2631 5085" [label="[]", style=solid]; -"2054 2989" -> "2649 5041" [label="[]", style=solid]; -"2054 2989" -> "2667 4997" [label="[]", style=solid]; -"2054 2989" -> "2685 4953" [label="[]", style=solid]; -"2054 2989" -> "2703 4909" [label="[]", style=solid]; -"2054 2989" -> "2721 4865" [label="[]", style=solid]; -"2054 2989" -> "2739 4821" [label="[]", style=solid]; -"2054 2989" -> "2757 4777" [label="[]", style=solid]; -"2054 2989" -> "2775 4733" [label="[]", style=solid]; -"2054 2989" -> "2793 4689" [label="[]", style=solid]; -"2054 2989" -> "2811 4645" [label="[]", style=solid]; -"2054 2989" -> "2829 4601" [label="[]", style=solid]; -"2054 2989" -> "2847 4557" [label="[]", style=solid]; -"2054 2989" -> "2865 4513" [label="[]", style=solid]; -"2054 2989" -> "2883 4469" [label="[]", style=solid]; -"2054 2989" -> "2901 4425" [label="[]", style=solid]; -"2054 2989" -> "2919 4381" [label="[]", style=solid]; -"2054 2989" -> "2937 4337" [label="[]", style=solid]; -"2054 2989" -> "2955 4293" [label="[]", style=solid]; -"2054 2989" -> "2973 4249" [label="[]", style=solid]; -"2054 2989" -> "2991 4205" [label="[]", style=solid]; -"2054 2989" -> "3009 4161" [label="[]", style=solid]; -"2054 2989" -> "3027 4117" [label="[]", style=solid]; -"2054 2989" -> "3045 4073" [label="[]", style=solid]; -"2054 2989" -> "3063 4029" [label="[]", style=solid]; -"2054 2989" -> "3081 3985" [label="[]", style=solid]; -"2054 2989" -> "3099 3941" [label="[]", style=solid]; -"2054 2989" -> "3117 3897" [label="[]", style=solid]; -"2054 2989" -> "3135 3853" [label="[]", style=solid]; -"2054 2989" -> "3153 3809" [label="[]", style=solid]; -"2054 2989" -> "3171 3765" [label="[]", style=solid]; -"2054 2989" -> "3189 3721" [label="[]", style=solid]; -"2054 2989" -> "3207 3677" [label="[]", style=solid]; -"2054 2989" -> "3225 3633" [label="[]", style=solid]; -"2054 2989" -> "3243 3589" [label="[]", style=solid]; -"2054 2989" -> "3261 3545" [label="[]", style=solid]; -"2054 2989" -> "3279 3501" [label="[]", style=solid]; -"2054 2989" -> "3297 3457" [label="[]", style=solid]; -"2054 2989" -> "3315 3413" [label="[]", style=solid]; -"2054 2989" -> "3333 3369" [label="[]", style=solid]; -"2054 2989" -> "3351 3325" [label="[]", style=solid]; -"2054 2989" -> "3369 3281" [label="[]", style=solid]; -"2054 2989" -> "3387 3237" [label="[]", style=solid]; -"2054 2989" -> "3405 3193" [label="[]", style=solid]; -"2054 2989" -> "3423 3149" [label="[]", style=solid]; -"2054 2989" -> "3441 3105" [label="[]", style=solid]; -"2054 2989" -> "3459 3061" [label="[]", style=solid]; -"2054 2989" -> "3477 3017" [label="[]", style=solid]; -"2055 6493" -> "2056 6495" [label="[]", style=solid]; -"2056 6495" -> "2057 6496" [label="[]", style=solid]; -"2056 6495" -> "3494 6506" [label="[]", style=solid]; -"2057 6496" -> "2058 6501" [label="[]", style=solid]; -"2058 6501" -> "2059 6503" [label="[-1, 3]", style=dashed]; -"2059 6503" -> "2060 6504" [label="[-1, 1]", style=dashed]; -"2060 6504" -> "2061 6508" [label="[-1]", style=dashed]; -"2060 6504" -> "3493 6505" [label="[-1]", style=dashed]; -"2061 6508" -> "3484 6520" [label="[]", style=solid]; -"2062 6434" -> "2063 6436" [label="[]", style=dashed]; -"2063 6436" -> "2064 6437" [label="[]", style=dashed]; -"2064 6437" -> "2065 6438" [label="[]", style=solid]; -"2065 6438" -> "2066 6439" [label="[-1, -1]", style=dashed]; -"2066 6439" -> "2067 6440" [label="[-1, -1]", style=dashed]; -"2067 6440" -> "2068 6443" [label="[-1]", style=dashed]; -"2067 6440" -> "2074 6451" [label="[-1]", style=dashed]; -"2068 6443" -> "2070 6444" [label="[-1]", style=dashed]; -"2069 6442" -> "2070 6444" [label="[]", style=solid]; -"2070 6444" -> "2071 6453" [label="[]", style=solid]; -"2070 6444" -> "2079 6464" [label="[]", style=solid]; -"2071 6453" -> "2072 6454" [label="[]", style=solid]; -"2072 6454" -> "2076 6457" [label="[]", style=solid]; -"2073 6449" -> "2074 6451" [label="[]", style=solid]; -"2074 6451" -> "2075 6452" [label="[]", style=solid]; -"2074 6451" -> "3496 6462" [label="[]", style=solid]; -"2075 6452" -> "2076 6457" [label="[]", style=solid]; -"2076 6457" -> "2077 6459" [label="[-1, 3]", style=dashed]; -"2077 6459" -> "2078 6460" [label="[-1, 1]", style=dashed]; -"2078 6460" -> "2079 6464" [label="[-1]", style=dashed]; -"2078 6460" -> "3495 6461" [label="[-1]", style=dashed]; -"2079 6464" -> "3484 6520" [label="[]", style=solid]; -"2080 6390" -> "2081 6392" [label="[]", style=dashed]; -"2081 6392" -> "2082 6393" [label="[]", style=dashed]; -"2082 6393" -> "2083 6394" [label="[]", style=solid]; -"2083 6394" -> "2084 6395" [label="[-1, -1]", style=dashed]; -"2084 6395" -> "2085 6396" [label="[-1, -1]", style=dashed]; -"2085 6396" -> "2086 6399" [label="[-1]", style=dashed]; -"2085 6396" -> "2092 6407" [label="[-1]", style=dashed]; -"2086 6399" -> "2088 6400" [label="[-1]", style=dashed]; -"2087 6398" -> "2088 6400" [label="[]", style=solid]; -"2088 6400" -> "2089 6409" [label="[]", style=solid]; -"2088 6400" -> "2097 6420" [label="[]", style=solid]; -"2089 6409" -> "2090 6410" [label="[]", style=solid]; -"2090 6410" -> "2094 6413" [label="[]", style=solid]; -"2091 6405" -> "2092 6407" [label="[]", style=solid]; -"2092 6407" -> "2093 6408" [label="[]", style=solid]; -"2092 6407" -> "3498 6418" [label="[]", style=solid]; -"2093 6408" -> "2094 6413" [label="[]", style=solid]; -"2094 6413" -> "2095 6415" [label="[-1, 3]", style=dashed]; -"2095 6415" -> "2096 6416" [label="[-1, 1]", style=dashed]; -"2096 6416" -> "2097 6420" [label="[-1]", style=dashed]; -"2096 6416" -> "3497 6417" [label="[-1]", style=dashed]; -"2097 6420" -> "3484 6520" [label="[]", style=solid]; -"2098 6346" -> "2099 6348" [label="[]", style=dashed]; -"2099 6348" -> "2100 6349" [label="[]", style=dashed]; -"2100 6349" -> "2101 6350" [label="[]", style=solid]; -"2101 6350" -> "2102 6351" [label="[-1, -1]", style=dashed]; -"2102 6351" -> "2103 6352" [label="[-1, -1]", style=dashed]; -"2103 6352" -> "2104 6355" [label="[-1]", style=dashed]; -"2103 6352" -> "2110 6363" [label="[-1]", style=dashed]; -"2104 6355" -> "2106 6356" [label="[-1]", style=dashed]; -"2105 6354" -> "2106 6356" [label="[]", style=solid]; -"2106 6356" -> "2107 6365" [label="[]", style=solid]; -"2106 6356" -> "2115 6376" [label="[]", style=solid]; -"2107 6365" -> "2108 6366" [label="[]", style=solid]; -"2108 6366" -> "2112 6369" [label="[]", style=solid]; -"2109 6361" -> "2110 6363" [label="[]", style=solid]; -"2110 6363" -> "2111 6364" [label="[]", style=solid]; -"2110 6363" -> "3500 6374" [label="[]", style=solid]; -"2111 6364" -> "2112 6369" [label="[]", style=solid]; -"2112 6369" -> "2113 6371" [label="[-1, 3]", style=dashed]; -"2113 6371" -> "2114 6372" [label="[-1, 1]", style=dashed]; -"2114 6372" -> "2115 6376" [label="[-1]", style=dashed]; -"2114 6372" -> "3499 6373" [label="[-1]", style=dashed]; -"2115 6376" -> "3484 6520" [label="[]", style=solid]; -"2116 6302" -> "2117 6304" [label="[]", style=dashed]; -"2117 6304" -> "2118 6305" [label="[]", style=dashed]; -"2118 6305" -> "2119 6306" [label="[]", style=solid]; -"2119 6306" -> "2120 6307" [label="[-1, -1]", style=dashed]; -"2120 6307" -> "2121 6308" [label="[-1, -1]", style=dashed]; -"2121 6308" -> "2122 6311" [label="[-1]", style=dashed]; -"2121 6308" -> "2128 6319" [label="[-1]", style=dashed]; -"2122 6311" -> "2124 6312" [label="[-1]", style=dashed]; -"2123 6310" -> "2124 6312" [label="[]", style=solid]; -"2124 6312" -> "2125 6321" [label="[]", style=solid]; -"2124 6312" -> "2133 6332" [label="[]", style=solid]; -"2125 6321" -> "2126 6322" [label="[]", style=solid]; -"2126 6322" -> "2130 6325" [label="[]", style=solid]; -"2127 6317" -> "2128 6319" [label="[]", style=solid]; -"2128 6319" -> "2129 6320" [label="[]", style=solid]; -"2128 6319" -> "3502 6330" [label="[]", style=solid]; -"2129 6320" -> "2130 6325" [label="[]", style=solid]; -"2130 6325" -> "2131 6327" [label="[-1, 3]", style=dashed]; -"2131 6327" -> "2132 6328" [label="[-1, 1]", style=dashed]; -"2132 6328" -> "2133 6332" [label="[-1]", style=dashed]; -"2132 6328" -> "3501 6329" [label="[-1]", style=dashed]; -"2133 6332" -> "3484 6520" [label="[]", style=solid]; -"2134 6258" -> "2135 6260" [label="[]", style=dashed]; -"2135 6260" -> "2136 6261" [label="[]", style=dashed]; -"2136 6261" -> "2137 6262" [label="[]", style=solid]; -"2137 6262" -> "2138 6263" [label="[-1, -1]", style=dashed]; -"2138 6263" -> "2139 6264" [label="[-1, -1]", style=dashed]; -"2139 6264" -> "2140 6267" [label="[-1]", style=dashed]; -"2139 6264" -> "2146 6275" [label="[-1]", style=dashed]; -"2140 6267" -> "2142 6268" [label="[-1]", style=dashed]; -"2141 6266" -> "2142 6268" [label="[]", style=solid]; -"2142 6268" -> "2143 6277" [label="[]", style=solid]; -"2142 6268" -> "2151 6288" [label="[]", style=solid]; -"2143 6277" -> "2144 6278" [label="[]", style=solid]; -"2144 6278" -> "2148 6281" [label="[]", style=solid]; -"2145 6273" -> "2146 6275" [label="[]", style=solid]; -"2146 6275" -> "2147 6276" [label="[]", style=solid]; -"2146 6275" -> "3504 6286" [label="[]", style=solid]; -"2147 6276" -> "2148 6281" [label="[]", style=solid]; -"2148 6281" -> "2149 6283" [label="[-1, 3]", style=dashed]; -"2149 6283" -> "2150 6284" [label="[-1, 1]", style=dashed]; -"2150 6284" -> "2151 6288" [label="[-1]", style=dashed]; -"2150 6284" -> "3503 6285" [label="[-1]", style=dashed]; -"2151 6288" -> "3484 6520" [label="[]", style=solid]; -"2152 6214" -> "2153 6216" [label="[]", style=dashed]; -"2153 6216" -> "2154 6217" [label="[]", style=dashed]; -"2154 6217" -> "2155 6218" [label="[]", style=solid]; -"2155 6218" -> "2156 6219" [label="[-1, -1]", style=dashed]; -"2156 6219" -> "2157 6220" [label="[-1, -1]", style=dashed]; -"2157 6220" -> "2158 6223" [label="[-1]", style=dashed]; -"2157 6220" -> "2164 6231" [label="[-1]", style=dashed]; -"2158 6223" -> "2160 6224" [label="[-1]", style=dashed]; -"2159 6222" -> "2160 6224" [label="[]", style=solid]; -"2160 6224" -> "2161 6233" [label="[]", style=solid]; -"2160 6224" -> "2169 6244" [label="[]", style=solid]; -"2161 6233" -> "2162 6234" [label="[]", style=solid]; -"2162 6234" -> "2166 6237" [label="[]", style=solid]; -"2163 6229" -> "2164 6231" [label="[]", style=solid]; -"2164 6231" -> "2165 6232" [label="[]", style=solid]; -"2164 6231" -> "3506 6242" [label="[]", style=solid]; -"2165 6232" -> "2166 6237" [label="[]", style=solid]; -"2166 6237" -> "2167 6239" [label="[-1, 3]", style=dashed]; -"2167 6239" -> "2168 6240" [label="[-1, 1]", style=dashed]; -"2168 6240" -> "2169 6244" [label="[-1]", style=dashed]; -"2168 6240" -> "3505 6241" [label="[-1]", style=dashed]; -"2169 6244" -> "3484 6520" [label="[]", style=solid]; -"2170 6170" -> "2171 6172" [label="[]", style=dashed]; -"2171 6172" -> "2172 6173" [label="[]", style=dashed]; -"2172 6173" -> "2173 6174" [label="[]", style=solid]; -"2173 6174" -> "2174 6175" [label="[-1, -1]", style=dashed]; -"2174 6175" -> "2175 6176" [label="[-1, -1]", style=dashed]; -"2175 6176" -> "2176 6179" [label="[-1]", style=dashed]; -"2175 6176" -> "2182 6187" [label="[-1]", style=dashed]; -"2176 6179" -> "2178 6180" [label="[-1]", style=dashed]; -"2177 6178" -> "2178 6180" [label="[]", style=solid]; -"2178 6180" -> "2179 6189" [label="[]", style=solid]; -"2178 6180" -> "2187 6200" [label="[]", style=solid]; -"2179 6189" -> "2180 6190" [label="[]", style=solid]; -"2180 6190" -> "2184 6193" [label="[]", style=solid]; -"2181 6185" -> "2182 6187" [label="[]", style=solid]; -"2182 6187" -> "2183 6188" [label="[]", style=solid]; -"2182 6187" -> "3508 6198" [label="[]", style=solid]; -"2183 6188" -> "2184 6193" [label="[]", style=solid]; -"2184 6193" -> "2185 6195" [label="[-1, 3]", style=dashed]; -"2185 6195" -> "2186 6196" [label="[-1, 1]", style=dashed]; -"2186 6196" -> "2187 6200" [label="[-1]", style=dashed]; -"2186 6196" -> "3507 6197" [label="[-1]", style=dashed]; -"2187 6200" -> "3484 6520" [label="[]", style=solid]; -"2188 6126" -> "2189 6128" [label="[]", style=dashed]; -"2189 6128" -> "2190 6129" [label="[]", style=dashed]; -"2190 6129" -> "2191 6130" [label="[]", style=solid]; -"2191 6130" -> "2192 6131" [label="[-1, -1]", style=dashed]; -"2192 6131" -> "2193 6132" [label="[-1, -1]", style=dashed]; -"2193 6132" -> "2194 6135" [label="[-1]", style=dashed]; -"2193 6132" -> "2200 6143" [label="[-1]", style=dashed]; -"2194 6135" -> "2196 6136" [label="[-1]", style=dashed]; -"2195 6134" -> "2196 6136" [label="[]", style=solid]; -"2196 6136" -> "2197 6145" [label="[]", style=solid]; -"2196 6136" -> "2205 6156" [label="[]", style=solid]; -"2197 6145" -> "2198 6146" [label="[]", style=solid]; -"2198 6146" -> "2202 6149" [label="[]", style=solid]; -"2199 6141" -> "2200 6143" [label="[]", style=solid]; -"2200 6143" -> "2201 6144" [label="[]", style=solid]; -"2200 6143" -> "3510 6154" [label="[]", style=solid]; -"2201 6144" -> "2202 6149" [label="[]", style=solid]; -"2202 6149" -> "2203 6151" [label="[-1, 3]", style=dashed]; -"2203 6151" -> "2204 6152" [label="[-1, 1]", style=dashed]; -"2204 6152" -> "2205 6156" [label="[-1]", style=dashed]; -"2204 6152" -> "3509 6153" [label="[-1]", style=dashed]; -"2205 6156" -> "3484 6520" [label="[]", style=solid]; -"2206 6082" -> "2207 6084" [label="[]", style=dashed]; -"2207 6084" -> "2208 6085" [label="[]", style=dashed]; -"2208 6085" -> "2209 6086" [label="[]", style=solid]; -"2209 6086" -> "2210 6087" [label="[-1, -1]", style=dashed]; -"2210 6087" -> "2211 6088" [label="[-1, -1]", style=dashed]; -"2211 6088" -> "2212 6091" [label="[-1]", style=dashed]; -"2211 6088" -> "2218 6099" [label="[-1]", style=dashed]; -"2212 6091" -> "2214 6092" [label="[-1]", style=dashed]; -"2213 6090" -> "2214 6092" [label="[]", style=solid]; -"2214 6092" -> "2215 6101" [label="[]", style=solid]; -"2214 6092" -> "2223 6112" [label="[]", style=solid]; -"2215 6101" -> "2216 6102" [label="[]", style=solid]; -"2216 6102" -> "2220 6105" [label="[]", style=solid]; -"2217 6097" -> "2218 6099" [label="[]", style=solid]; -"2218 6099" -> "2219 6100" [label="[]", style=solid]; -"2218 6099" -> "3512 6110" [label="[]", style=solid]; -"2219 6100" -> "2220 6105" [label="[]", style=solid]; -"2220 6105" -> "2221 6107" [label="[-1, 3]", style=dashed]; -"2221 6107" -> "2222 6108" [label="[-1, 1]", style=dashed]; -"2222 6108" -> "2223 6112" [label="[-1]", style=dashed]; -"2222 6108" -> "3511 6109" [label="[-1]", style=dashed]; -"2223 6112" -> "3484 6520" [label="[]", style=solid]; -"2224 6038" -> "2225 6040" [label="[]", style=dashed]; -"2225 6040" -> "2226 6041" [label="[]", style=dashed]; -"2226 6041" -> "2227 6042" [label="[]", style=solid]; -"2227 6042" -> "2228 6043" [label="[-1, -1]", style=dashed]; -"2228 6043" -> "2229 6044" [label="[-1, -1]", style=dashed]; -"2229 6044" -> "2230 6047" [label="[-1]", style=dashed]; -"2229 6044" -> "2236 6055" [label="[-1]", style=dashed]; -"2230 6047" -> "2232 6048" [label="[-1]", style=dashed]; -"2231 6046" -> "2232 6048" [label="[]", style=solid]; -"2232 6048" -> "2233 6057" [label="[]", style=solid]; -"2232 6048" -> "2241 6068" [label="[]", style=solid]; -"2233 6057" -> "2234 6058" [label="[]", style=solid]; -"2234 6058" -> "2238 6061" [label="[]", style=solid]; -"2235 6053" -> "2236 6055" [label="[]", style=solid]; -"2236 6055" -> "2237 6056" [label="[]", style=solid]; -"2236 6055" -> "3514 6066" [label="[]", style=solid]; -"2237 6056" -> "2238 6061" [label="[]", style=solid]; -"2238 6061" -> "2239 6063" [label="[-1, 3]", style=dashed]; -"2239 6063" -> "2240 6064" [label="[-1, 1]", style=dashed]; -"2240 6064" -> "2241 6068" [label="[-1]", style=dashed]; -"2240 6064" -> "3513 6065" [label="[-1]", style=dashed]; -"2241 6068" -> "3484 6520" [label="[]", style=solid]; -"2242 5994" -> "2243 5996" [label="[]", style=dashed]; -"2243 5996" -> "2244 5997" [label="[]", style=dashed]; -"2244 5997" -> "2245 5998" [label="[]", style=solid]; -"2245 5998" -> "2246 5999" [label="[-1, -1]", style=dashed]; -"2246 5999" -> "2247 6000" [label="[-1, -1]", style=dashed]; -"2247 6000" -> "2248 6003" [label="[-1]", style=dashed]; -"2247 6000" -> "2254 6011" [label="[-1]", style=dashed]; -"2248 6003" -> "2250 6004" [label="[-1]", style=dashed]; -"2249 6002" -> "2250 6004" [label="[]", style=solid]; -"2250 6004" -> "2251 6013" [label="[]", style=solid]; -"2250 6004" -> "2259 6024" [label="[]", style=solid]; -"2251 6013" -> "2252 6014" [label="[]", style=solid]; -"2252 6014" -> "2256 6017" [label="[]", style=solid]; -"2253 6009" -> "2254 6011" [label="[]", style=solid]; -"2254 6011" -> "2255 6012" [label="[]", style=solid]; -"2254 6011" -> "3516 6022" [label="[]", style=solid]; -"2255 6012" -> "2256 6017" [label="[]", style=solid]; -"2256 6017" -> "2257 6019" [label="[-1, 3]", style=dashed]; -"2257 6019" -> "2258 6020" [label="[-1, 1]", style=dashed]; -"2258 6020" -> "2259 6024" [label="[-1]", style=dashed]; -"2258 6020" -> "3515 6021" [label="[-1]", style=dashed]; -"2259 6024" -> "3484 6520" [label="[]", style=solid]; -"2260 5950" -> "2261 5952" [label="[]", style=dashed]; -"2261 5952" -> "2262 5953" [label="[]", style=dashed]; -"2262 5953" -> "2263 5954" [label="[]", style=solid]; -"2263 5954" -> "2264 5955" [label="[-1, -1]", style=dashed]; -"2264 5955" -> "2265 5956" [label="[-1, -1]", style=dashed]; -"2265 5956" -> "2266 5959" [label="[-1]", style=dashed]; -"2265 5956" -> "2272 5967" [label="[-1]", style=dashed]; -"2266 5959" -> "2268 5960" [label="[-1]", style=dashed]; -"2267 5958" -> "2268 5960" [label="[]", style=solid]; -"2268 5960" -> "2269 5969" [label="[]", style=solid]; -"2268 5960" -> "2277 5980" [label="[]", style=solid]; -"2269 5969" -> "2270 5970" [label="[]", style=solid]; -"2270 5970" -> "2274 5973" [label="[]", style=solid]; -"2271 5965" -> "2272 5967" [label="[]", style=solid]; -"2272 5967" -> "2273 5968" [label="[]", style=solid]; -"2272 5967" -> "3518 5978" [label="[]", style=solid]; -"2273 5968" -> "2274 5973" [label="[]", style=solid]; -"2274 5973" -> "2275 5975" [label="[-1, 3]", style=dashed]; -"2275 5975" -> "2276 5976" [label="[-1, 1]", style=dashed]; -"2276 5976" -> "2277 5980" [label="[-1]", style=dashed]; -"2276 5976" -> "3517 5977" [label="[-1]", style=dashed]; -"2277 5980" -> "3484 6520" [label="[]", style=solid]; -"2278 5906" -> "2279 5908" [label="[]", style=dashed]; -"2279 5908" -> "2280 5909" [label="[]", style=dashed]; -"2280 5909" -> "2281 5910" [label="[]", style=solid]; -"2281 5910" -> "2282 5911" [label="[-1, -1]", style=dashed]; -"2282 5911" -> "2283 5912" [label="[-1, -1]", style=dashed]; -"2283 5912" -> "2284 5915" [label="[-1]", style=dashed]; -"2283 5912" -> "2290 5923" [label="[-1]", style=dashed]; -"2284 5915" -> "2286 5916" [label="[-1]", style=dashed]; -"2285 5914" -> "2286 5916" [label="[]", style=solid]; -"2286 5916" -> "2287 5925" [label="[]", style=solid]; -"2286 5916" -> "2295 5936" [label="[]", style=solid]; -"2287 5925" -> "2288 5926" [label="[]", style=solid]; -"2288 5926" -> "2292 5929" [label="[]", style=solid]; -"2289 5921" -> "2290 5923" [label="[]", style=solid]; -"2290 5923" -> "2291 5924" [label="[]", style=solid]; -"2290 5923" -> "3520 5934" [label="[]", style=solid]; -"2291 5924" -> "2292 5929" [label="[]", style=solid]; -"2292 5929" -> "2293 5931" [label="[-1, 3]", style=dashed]; -"2293 5931" -> "2294 5932" [label="[-1, 1]", style=dashed]; -"2294 5932" -> "2295 5936" [label="[-1]", style=dashed]; -"2294 5932" -> "3519 5933" [label="[-1]", style=dashed]; -"2295 5936" -> "3484 6520" [label="[]", style=solid]; -"2296 5862" -> "2297 5864" [label="[]", style=dashed]; -"2297 5864" -> "2298 5865" [label="[]", style=dashed]; -"2298 5865" -> "2299 5866" [label="[]", style=solid]; -"2299 5866" -> "2300 5867" [label="[-1, -1]", style=dashed]; -"2300 5867" -> "2301 5868" [label="[-1, -1]", style=dashed]; -"2301 5868" -> "2302 5871" [label="[-1]", style=dashed]; -"2301 5868" -> "2308 5879" [label="[-1]", style=dashed]; -"2302 5871" -> "2304 5872" [label="[-1]", style=dashed]; -"2303 5870" -> "2304 5872" [label="[]", style=solid]; -"2304 5872" -> "2305 5881" [label="[]", style=solid]; -"2304 5872" -> "2313 5892" [label="[]", style=solid]; -"2305 5881" -> "2306 5882" [label="[]", style=solid]; -"2306 5882" -> "2310 5885" [label="[]", style=solid]; -"2307 5877" -> "2308 5879" [label="[]", style=solid]; -"2308 5879" -> "2309 5880" [label="[]", style=solid]; -"2308 5879" -> "3522 5890" [label="[]", style=solid]; -"2309 5880" -> "2310 5885" [label="[]", style=solid]; -"2310 5885" -> "2311 5887" [label="[-1, 3]", style=dashed]; -"2311 5887" -> "2312 5888" [label="[-1, 1]", style=dashed]; -"2312 5888" -> "2313 5892" [label="[-1]", style=dashed]; -"2312 5888" -> "3521 5889" [label="[-1]", style=dashed]; -"2313 5892" -> "3484 6520" [label="[]", style=solid]; -"2314 5818" -> "2315 5820" [label="[]", style=dashed]; -"2315 5820" -> "2316 5821" [label="[]", style=dashed]; -"2316 5821" -> "2317 5822" [label="[]", style=solid]; -"2317 5822" -> "2318 5823" [label="[-1, -1]", style=dashed]; -"2318 5823" -> "2319 5824" [label="[-1, -1]", style=dashed]; -"2319 5824" -> "2320 5827" [label="[-1]", style=dashed]; -"2319 5824" -> "2326 5835" [label="[-1]", style=dashed]; -"2320 5827" -> "2322 5828" [label="[-1]", style=dashed]; -"2321 5826" -> "2322 5828" [label="[]", style=solid]; -"2322 5828" -> "2323 5837" [label="[]", style=solid]; -"2322 5828" -> "2331 5848" [label="[]", style=solid]; -"2323 5837" -> "2324 5838" [label="[]", style=solid]; -"2324 5838" -> "2328 5841" [label="[]", style=solid]; -"2325 5833" -> "2326 5835" [label="[]", style=solid]; -"2326 5835" -> "2327 5836" [label="[]", style=solid]; -"2326 5835" -> "3524 5846" [label="[]", style=solid]; -"2327 5836" -> "2328 5841" [label="[]", style=solid]; -"2328 5841" -> "2329 5843" [label="[-1, 3]", style=dashed]; -"2329 5843" -> "2330 5844" [label="[-1, 1]", style=dashed]; -"2330 5844" -> "2331 5848" [label="[-1]", style=dashed]; -"2330 5844" -> "3523 5845" [label="[-1]", style=dashed]; -"2331 5848" -> "3484 6520" [label="[]", style=solid]; -"2332 5774" -> "2333 5776" [label="[]", style=dashed]; -"2333 5776" -> "2334 5777" [label="[]", style=dashed]; -"2334 5777" -> "2335 5778" [label="[]", style=solid]; -"2335 5778" -> "2336 5779" [label="[-1, -1]", style=dashed]; -"2336 5779" -> "2337 5780" [label="[-1, -1]", style=dashed]; -"2337 5780" -> "2338 5783" [label="[-1]", style=dashed]; -"2337 5780" -> "2344 5791" [label="[-1]", style=dashed]; -"2338 5783" -> "2340 5784" [label="[-1]", style=dashed]; -"2339 5782" -> "2340 5784" [label="[]", style=solid]; -"2340 5784" -> "2341 5793" [label="[]", style=solid]; -"2340 5784" -> "2349 5804" [label="[]", style=solid]; -"2341 5793" -> "2342 5794" [label="[]", style=solid]; -"2342 5794" -> "2346 5797" [label="[]", style=solid]; -"2343 5789" -> "2344 5791" [label="[]", style=solid]; -"2344 5791" -> "2345 5792" [label="[]", style=solid]; -"2344 5791" -> "3526 5802" [label="[]", style=solid]; -"2345 5792" -> "2346 5797" [label="[]", style=solid]; -"2346 5797" -> "2347 5799" [label="[-1, 3]", style=dashed]; -"2347 5799" -> "2348 5800" [label="[-1, 1]", style=dashed]; -"2348 5800" -> "2349 5804" [label="[-1]", style=dashed]; -"2348 5800" -> "3525 5801" [label="[-1]", style=dashed]; -"2349 5804" -> "3484 6520" [label="[]", style=solid]; -"2350 5730" -> "2351 5732" [label="[]", style=dashed]; -"2351 5732" -> "2352 5733" [label="[]", style=dashed]; -"2352 5733" -> "2353 5734" [label="[]", style=solid]; -"2353 5734" -> "2354 5735" [label="[-1, -1]", style=dashed]; -"2354 5735" -> "2355 5736" [label="[-1, -1]", style=dashed]; -"2355 5736" -> "2356 5739" [label="[-1]", style=dashed]; -"2355 5736" -> "2362 5747" [label="[-1]", style=dashed]; -"2356 5739" -> "2358 5740" [label="[-1]", style=dashed]; -"2357 5738" -> "2358 5740" [label="[]", style=solid]; -"2358 5740" -> "2359 5749" [label="[]", style=solid]; -"2358 5740" -> "2367 5760" [label="[]", style=solid]; -"2359 5749" -> "2360 5750" [label="[]", style=solid]; -"2360 5750" -> "2364 5753" [label="[]", style=solid]; -"2361 5745" -> "2362 5747" [label="[]", style=solid]; -"2362 5747" -> "2363 5748" [label="[]", style=solid]; -"2362 5747" -> "3528 5758" [label="[]", style=solid]; -"2363 5748" -> "2364 5753" [label="[]", style=solid]; -"2364 5753" -> "2365 5755" [label="[-1, 3]", style=dashed]; -"2365 5755" -> "2366 5756" [label="[-1, 1]", style=dashed]; -"2366 5756" -> "2367 5760" [label="[-1]", style=dashed]; -"2366 5756" -> "3527 5757" [label="[-1]", style=dashed]; -"2367 5760" -> "3484 6520" [label="[]", style=solid]; -"2368 5686" -> "2369 5688" [label="[]", style=dashed]; -"2369 5688" -> "2370 5689" [label="[]", style=dashed]; -"2370 5689" -> "2371 5690" [label="[]", style=solid]; -"2371 5690" -> "2372 5691" [label="[-1, -1]", style=dashed]; -"2372 5691" -> "2373 5692" [label="[-1, -1]", style=dashed]; -"2373 5692" -> "2374 5695" [label="[-1]", style=dashed]; -"2373 5692" -> "2380 5703" [label="[-1]", style=dashed]; -"2374 5695" -> "2376 5696" [label="[-1]", style=dashed]; -"2375 5694" -> "2376 5696" [label="[]", style=solid]; -"2376 5696" -> "2377 5705" [label="[]", style=solid]; -"2376 5696" -> "2385 5716" [label="[]", style=solid]; -"2377 5705" -> "2378 5706" [label="[]", style=solid]; -"2378 5706" -> "2382 5709" [label="[]", style=solid]; -"2379 5701" -> "2380 5703" [label="[]", style=solid]; -"2380 5703" -> "2381 5704" [label="[]", style=solid]; -"2380 5703" -> "3530 5714" [label="[]", style=solid]; -"2381 5704" -> "2382 5709" [label="[]", style=solid]; -"2382 5709" -> "2383 5711" [label="[-1, 3]", style=dashed]; -"2383 5711" -> "2384 5712" [label="[-1, 1]", style=dashed]; -"2384 5712" -> "2385 5716" [label="[-1]", style=dashed]; -"2384 5712" -> "3529 5713" [label="[-1]", style=dashed]; -"2385 5716" -> "3484 6520" [label="[]", style=solid]; -"2386 5642" -> "2387 5644" [label="[]", style=dashed]; -"2387 5644" -> "2388 5645" [label="[]", style=dashed]; -"2388 5645" -> "2389 5646" [label="[]", style=solid]; -"2389 5646" -> "2390 5647" [label="[-1, -1]", style=dashed]; -"2390 5647" -> "2391 5648" [label="[-1, -1]", style=dashed]; -"2391 5648" -> "2392 5651" [label="[-1]", style=dashed]; -"2391 5648" -> "2398 5659" [label="[-1]", style=dashed]; -"2392 5651" -> "2394 5652" [label="[-1]", style=dashed]; -"2393 5650" -> "2394 5652" [label="[]", style=solid]; -"2394 5652" -> "2395 5661" [label="[]", style=solid]; -"2394 5652" -> "2403 5672" [label="[]", style=solid]; -"2395 5661" -> "2396 5662" [label="[]", style=solid]; -"2396 5662" -> "2400 5665" [label="[]", style=solid]; -"2397 5657" -> "2398 5659" [label="[]", style=solid]; -"2398 5659" -> "2399 5660" [label="[]", style=solid]; -"2398 5659" -> "3532 5670" [label="[]", style=solid]; -"2399 5660" -> "2400 5665" [label="[]", style=solid]; -"2400 5665" -> "2401 5667" [label="[-1, 3]", style=dashed]; -"2401 5667" -> "2402 5668" [label="[-1, 1]", style=dashed]; -"2402 5668" -> "2403 5672" [label="[-1]", style=dashed]; -"2402 5668" -> "3531 5669" [label="[-1]", style=dashed]; -"2403 5672" -> "3484 6520" [label="[]", style=solid]; -"2404 5598" -> "2405 5600" [label="[]", style=dashed]; -"2405 5600" -> "2406 5601" [label="[]", style=dashed]; -"2406 5601" -> "2407 5602" [label="[]", style=solid]; -"2407 5602" -> "2408 5603" [label="[-1, -1]", style=dashed]; -"2408 5603" -> "2409 5604" [label="[-1, -1]", style=dashed]; -"2409 5604" -> "2410 5607" [label="[-1]", style=dashed]; -"2409 5604" -> "2416 5615" [label="[-1]", style=dashed]; -"2410 5607" -> "2412 5608" [label="[-1]", style=dashed]; -"2411 5606" -> "2412 5608" [label="[]", style=solid]; -"2412 5608" -> "2413 5617" [label="[]", style=solid]; -"2412 5608" -> "2421 5628" [label="[]", style=solid]; -"2413 5617" -> "2414 5618" [label="[]", style=solid]; -"2414 5618" -> "2418 5621" [label="[]", style=solid]; -"2415 5613" -> "2416 5615" [label="[]", style=solid]; -"2416 5615" -> "2417 5616" [label="[]", style=solid]; -"2416 5615" -> "3534 5626" [label="[]", style=solid]; -"2417 5616" -> "2418 5621" [label="[]", style=solid]; -"2418 5621" -> "2419 5623" [label="[-1, 3]", style=dashed]; -"2419 5623" -> "2420 5624" [label="[-1, 1]", style=dashed]; -"2420 5624" -> "2421 5628" [label="[-1]", style=dashed]; -"2420 5624" -> "3533 5625" [label="[-1]", style=dashed]; -"2421 5628" -> "3484 6520" [label="[]", style=solid]; -"2422 5554" -> "2423 5556" [label="[]", style=dashed]; -"2423 5556" -> "2424 5557" [label="[]", style=dashed]; -"2424 5557" -> "2425 5558" [label="[]", style=solid]; -"2425 5558" -> "2426 5559" [label="[-1, -1]", style=dashed]; -"2426 5559" -> "2427 5560" [label="[-1, -1]", style=dashed]; -"2427 5560" -> "2428 5563" [label="[-1]", style=dashed]; -"2427 5560" -> "2434 5571" [label="[-1]", style=dashed]; -"2428 5563" -> "2430 5564" [label="[-1]", style=dashed]; -"2429 5562" -> "2430 5564" [label="[]", style=solid]; -"2430 5564" -> "2431 5573" [label="[]", style=solid]; -"2430 5564" -> "2439 5584" [label="[]", style=solid]; -"2431 5573" -> "2432 5574" [label="[]", style=solid]; -"2432 5574" -> "2436 5577" [label="[]", style=solid]; -"2433 5569" -> "2434 5571" [label="[]", style=solid]; -"2434 5571" -> "2435 5572" [label="[]", style=solid]; -"2434 5571" -> "3536 5582" [label="[]", style=solid]; -"2435 5572" -> "2436 5577" [label="[]", style=solid]; -"2436 5577" -> "2437 5579" [label="[-1, 3]", style=dashed]; -"2437 5579" -> "2438 5580" [label="[-1, 1]", style=dashed]; -"2438 5580" -> "2439 5584" [label="[-1]", style=dashed]; -"2438 5580" -> "3535 5581" [label="[-1]", style=dashed]; -"2439 5584" -> "3484 6520" [label="[]", style=solid]; -"2440 5510" -> "2441 5512" [label="[]", style=dashed]; -"2441 5512" -> "2442 5513" [label="[]", style=dashed]; -"2442 5513" -> "2443 5514" [label="[]", style=solid]; -"2443 5514" -> "2444 5515" [label="[-1, -1]", style=dashed]; -"2444 5515" -> "2445 5516" [label="[-1, -1]", style=dashed]; -"2445 5516" -> "2446 5519" [label="[-1]", style=dashed]; -"2445 5516" -> "2452 5527" [label="[-1]", style=dashed]; -"2446 5519" -> "2448 5520" [label="[-1]", style=dashed]; -"2447 5518" -> "2448 5520" [label="[]", style=solid]; -"2448 5520" -> "2449 5529" [label="[]", style=solid]; -"2448 5520" -> "2457 5540" [label="[]", style=solid]; -"2449 5529" -> "2450 5530" [label="[]", style=solid]; -"2450 5530" -> "2454 5533" [label="[]", style=solid]; -"2451 5525" -> "2452 5527" [label="[]", style=solid]; -"2452 5527" -> "2453 5528" [label="[]", style=solid]; -"2452 5527" -> "3538 5538" [label="[]", style=solid]; -"2453 5528" -> "2454 5533" [label="[]", style=solid]; -"2454 5533" -> "2455 5535" [label="[-1, 3]", style=dashed]; -"2455 5535" -> "2456 5536" [label="[-1, 1]", style=dashed]; -"2456 5536" -> "2457 5540" [label="[-1]", style=dashed]; -"2456 5536" -> "3537 5537" [label="[-1]", style=dashed]; -"2457 5540" -> "3484 6520" [label="[]", style=solid]; -"2458 5466" -> "2459 5468" [label="[]", style=dashed]; -"2459 5468" -> "2460 5469" [label="[]", style=dashed]; -"2460 5469" -> "2461 5470" [label="[]", style=solid]; -"2461 5470" -> "2462 5471" [label="[-1, -1]", style=dashed]; -"2462 5471" -> "2463 5472" [label="[-1, -1]", style=dashed]; -"2463 5472" -> "2464 5475" [label="[-1]", style=dashed]; -"2463 5472" -> "2470 5483" [label="[-1]", style=dashed]; -"2464 5475" -> "2466 5476" [label="[-1]", style=dashed]; -"2465 5474" -> "2466 5476" [label="[]", style=solid]; -"2466 5476" -> "2467 5485" [label="[]", style=solid]; -"2466 5476" -> "2475 5496" [label="[]", style=solid]; -"2467 5485" -> "2468 5486" [label="[]", style=solid]; -"2468 5486" -> "2472 5489" [label="[]", style=solid]; -"2469 5481" -> "2470 5483" [label="[]", style=solid]; -"2470 5483" -> "2471 5484" [label="[]", style=solid]; -"2470 5483" -> "3540 5494" [label="[]", style=solid]; -"2471 5484" -> "2472 5489" [label="[]", style=solid]; -"2472 5489" -> "2473 5491" [label="[-1, 3]", style=dashed]; -"2473 5491" -> "2474 5492" [label="[-1, 1]", style=dashed]; -"2474 5492" -> "2475 5496" [label="[-1]", style=dashed]; -"2474 5492" -> "3539 5493" [label="[-1]", style=dashed]; -"2475 5496" -> "3484 6520" [label="[]", style=solid]; -"2476 5422" -> "2477 5424" [label="[]", style=dashed]; -"2477 5424" -> "2478 5425" [label="[]", style=dashed]; -"2478 5425" -> "2479 5426" [label="[]", style=solid]; -"2479 5426" -> "2480 5427" [label="[-1, -1]", style=dashed]; -"2480 5427" -> "2481 5428" [label="[-1, -1]", style=dashed]; -"2481 5428" -> "2482 5431" [label="[-1]", style=dashed]; -"2481 5428" -> "2488 5439" [label="[-1]", style=dashed]; -"2482 5431" -> "2484 5432" [label="[-1]", style=dashed]; -"2483 5430" -> "2484 5432" [label="[]", style=solid]; -"2484 5432" -> "2485 5441" [label="[]", style=solid]; -"2484 5432" -> "2493 5452" [label="[]", style=solid]; -"2485 5441" -> "2486 5442" [label="[]", style=solid]; -"2486 5442" -> "2490 5445" [label="[]", style=solid]; -"2487 5437" -> "2488 5439" [label="[]", style=solid]; -"2488 5439" -> "2489 5440" [label="[]", style=solid]; -"2488 5439" -> "3542 5450" [label="[]", style=solid]; -"2489 5440" -> "2490 5445" [label="[]", style=solid]; -"2490 5445" -> "2491 5447" [label="[-1, 3]", style=dashed]; -"2491 5447" -> "2492 5448" [label="[-1, 1]", style=dashed]; -"2492 5448" -> "2493 5452" [label="[-1]", style=dashed]; -"2492 5448" -> "3541 5449" [label="[-1]", style=dashed]; -"2493 5452" -> "3484 6520" [label="[]", style=solid]; -"2494 5378" -> "2495 5380" [label="[]", style=dashed]; -"2495 5380" -> "2496 5381" [label="[]", style=dashed]; -"2496 5381" -> "2497 5382" [label="[]", style=solid]; -"2497 5382" -> "2498 5383" [label="[-1, -1]", style=dashed]; -"2498 5383" -> "2499 5384" [label="[-1, -1]", style=dashed]; -"2499 5384" -> "2500 5387" [label="[-1]", style=dashed]; -"2499 5384" -> "2506 5395" [label="[-1]", style=dashed]; -"2500 5387" -> "2502 5388" [label="[-1]", style=dashed]; -"2501 5386" -> "2502 5388" [label="[]", style=solid]; -"2502 5388" -> "2503 5397" [label="[]", style=solid]; -"2502 5388" -> "2511 5408" [label="[]", style=solid]; -"2503 5397" -> "2504 5398" [label="[]", style=solid]; -"2504 5398" -> "2508 5401" [label="[]", style=solid]; -"2505 5393" -> "2506 5395" [label="[]", style=solid]; -"2506 5395" -> "2507 5396" [label="[]", style=solid]; -"2506 5395" -> "3544 5406" [label="[]", style=solid]; -"2507 5396" -> "2508 5401" [label="[]", style=solid]; -"2508 5401" -> "2509 5403" [label="[-1, 3]", style=dashed]; -"2509 5403" -> "2510 5404" [label="[-1, 1]", style=dashed]; -"2510 5404" -> "2511 5408" [label="[-1]", style=dashed]; -"2510 5404" -> "3543 5405" [label="[-1]", style=dashed]; -"2511 5408" -> "3484 6520" [label="[]", style=solid]; -"2512 5334" -> "2513 5336" [label="[]", style=dashed]; -"2513 5336" -> "2514 5337" [label="[]", style=dashed]; -"2514 5337" -> "2515 5338" [label="[]", style=solid]; -"2515 5338" -> "2516 5339" [label="[-1, -1]", style=dashed]; -"2516 5339" -> "2517 5340" [label="[-1, -1]", style=dashed]; -"2517 5340" -> "2518 5343" [label="[-1]", style=dashed]; -"2517 5340" -> "2524 5351" [label="[-1]", style=dashed]; -"2518 5343" -> "2520 5344" [label="[-1]", style=dashed]; -"2519 5342" -> "2520 5344" [label="[]", style=solid]; -"2520 5344" -> "2521 5353" [label="[]", style=solid]; -"2520 5344" -> "2529 5364" [label="[]", style=solid]; -"2521 5353" -> "2522 5354" [label="[]", style=solid]; -"2522 5354" -> "2526 5357" [label="[]", style=solid]; -"2523 5349" -> "2524 5351" [label="[]", style=solid]; -"2524 5351" -> "2525 5352" [label="[]", style=solid]; -"2524 5351" -> "3546 5362" [label="[]", style=solid]; -"2525 5352" -> "2526 5357" [label="[]", style=solid]; -"2526 5357" -> "2527 5359" [label="[-1, 3]", style=dashed]; -"2527 5359" -> "2528 5360" [label="[-1, 1]", style=dashed]; -"2528 5360" -> "2529 5364" [label="[-1]", style=dashed]; -"2528 5360" -> "3545 5361" [label="[-1]", style=dashed]; -"2529 5364" -> "3484 6520" [label="[]", style=solid]; -"2530 5290" -> "2531 5292" [label="[]", style=dashed]; -"2531 5292" -> "2532 5293" [label="[]", style=dashed]; -"2532 5293" -> "2533 5294" [label="[]", style=solid]; -"2533 5294" -> "2534 5295" [label="[-1, -1]", style=dashed]; -"2534 5295" -> "2535 5296" [label="[-1, -1]", style=dashed]; -"2535 5296" -> "2536 5299" [label="[-1]", style=dashed]; -"2535 5296" -> "2542 5307" [label="[-1]", style=dashed]; -"2536 5299" -> "2538 5300" [label="[-1]", style=dashed]; -"2537 5298" -> "2538 5300" [label="[]", style=solid]; -"2538 5300" -> "2539 5309" [label="[]", style=solid]; -"2538 5300" -> "2547 5320" [label="[]", style=solid]; -"2539 5309" -> "2540 5310" [label="[]", style=solid]; -"2540 5310" -> "2544 5313" [label="[]", style=solid]; -"2541 5305" -> "2542 5307" [label="[]", style=solid]; -"2542 5307" -> "2543 5308" [label="[]", style=solid]; -"2542 5307" -> "3548 5318" [label="[]", style=solid]; -"2543 5308" -> "2544 5313" [label="[]", style=solid]; -"2544 5313" -> "2545 5315" [label="[-1, 3]", style=dashed]; -"2545 5315" -> "2546 5316" [label="[-1, 1]", style=dashed]; -"2546 5316" -> "2547 5320" [label="[-1]", style=dashed]; -"2546 5316" -> "3547 5317" [label="[-1]", style=dashed]; -"2547 5320" -> "3484 6520" [label="[]", style=solid]; -"2548 5246" -> "2549 5248" [label="[]", style=dashed]; -"2549 5248" -> "2550 5249" [label="[]", style=dashed]; -"2550 5249" -> "2551 5250" [label="[]", style=solid]; -"2551 5250" -> "2552 5251" [label="[-1, -1]", style=dashed]; -"2552 5251" -> "2553 5252" [label="[-1, -1]", style=dashed]; -"2553 5252" -> "2554 5255" [label="[-1]", style=dashed]; -"2553 5252" -> "2560 5263" [label="[-1]", style=dashed]; -"2554 5255" -> "2556 5256" [label="[-1]", style=dashed]; -"2555 5254" -> "2556 5256" [label="[]", style=solid]; -"2556 5256" -> "2557 5265" [label="[]", style=solid]; -"2556 5256" -> "2565 5276" [label="[]", style=solid]; -"2557 5265" -> "2558 5266" [label="[]", style=solid]; -"2558 5266" -> "2562 5269" [label="[]", style=solid]; -"2559 5261" -> "2560 5263" [label="[]", style=solid]; -"2560 5263" -> "2561 5264" [label="[]", style=solid]; -"2560 5263" -> "3550 5274" [label="[]", style=solid]; -"2561 5264" -> "2562 5269" [label="[]", style=solid]; -"2562 5269" -> "2563 5271" [label="[-1, 3]", style=dashed]; -"2563 5271" -> "2564 5272" [label="[-1, 1]", style=dashed]; -"2564 5272" -> "2565 5276" [label="[-1]", style=dashed]; -"2564 5272" -> "3549 5273" [label="[-1]", style=dashed]; -"2565 5276" -> "3484 6520" [label="[]", style=solid]; -"2566 5202" -> "2567 5204" [label="[]", style=dashed]; -"2567 5204" -> "2568 5205" [label="[]", style=dashed]; -"2568 5205" -> "2569 5206" [label="[]", style=solid]; -"2569 5206" -> "2570 5207" [label="[-1, -1]", style=dashed]; -"2570 5207" -> "2571 5208" [label="[-1, -1]", style=dashed]; -"2571 5208" -> "2572 5211" [label="[-1]", style=dashed]; -"2571 5208" -> "2578 5219" [label="[-1]", style=dashed]; -"2572 5211" -> "2574 5212" [label="[-1]", style=dashed]; -"2573 5210" -> "2574 5212" [label="[]", style=solid]; -"2574 5212" -> "2575 5221" [label="[]", style=solid]; -"2574 5212" -> "2583 5232" [label="[]", style=solid]; -"2575 5221" -> "2576 5222" [label="[]", style=solid]; -"2576 5222" -> "2580 5225" [label="[]", style=solid]; -"2577 5217" -> "2578 5219" [label="[]", style=solid]; -"2578 5219" -> "2579 5220" [label="[]", style=solid]; -"2578 5219" -> "3552 5230" [label="[]", style=solid]; -"2579 5220" -> "2580 5225" [label="[]", style=solid]; -"2580 5225" -> "2581 5227" [label="[-1, 3]", style=dashed]; -"2581 5227" -> "2582 5228" [label="[-1, 1]", style=dashed]; -"2582 5228" -> "2583 5232" [label="[-1]", style=dashed]; -"2582 5228" -> "3551 5229" [label="[-1]", style=dashed]; -"2583 5232" -> "3484 6520" [label="[]", style=solid]; -"2584 5158" -> "2585 5160" [label="[]", style=dashed]; -"2585 5160" -> "2586 5161" [label="[]", style=dashed]; -"2586 5161" -> "2587 5162" [label="[]", style=solid]; -"2587 5162" -> "2588 5163" [label="[-1, -1]", style=dashed]; -"2588 5163" -> "2589 5164" [label="[-1, -1]", style=dashed]; -"2589 5164" -> "2590 5167" [label="[-1]", style=dashed]; -"2589 5164" -> "2596 5175" [label="[-1]", style=dashed]; -"2590 5167" -> "2592 5168" [label="[-1]", style=dashed]; -"2591 5166" -> "2592 5168" [label="[]", style=solid]; -"2592 5168" -> "2593 5177" [label="[]", style=solid]; -"2592 5168" -> "2601 5188" [label="[]", style=solid]; -"2593 5177" -> "2594 5178" [label="[]", style=solid]; -"2594 5178" -> "2598 5181" [label="[]", style=solid]; -"2595 5173" -> "2596 5175" [label="[]", style=solid]; -"2596 5175" -> "2597 5176" [label="[]", style=solid]; -"2596 5175" -> "3554 5186" [label="[]", style=solid]; -"2597 5176" -> "2598 5181" [label="[]", style=solid]; -"2598 5181" -> "2599 5183" [label="[-1, 3]", style=dashed]; -"2599 5183" -> "2600 5184" [label="[-1, 1]", style=dashed]; -"2600 5184" -> "2601 5188" [label="[-1]", style=dashed]; -"2600 5184" -> "3553 5185" [label="[-1]", style=dashed]; -"2601 5188" -> "3484 6520" [label="[]", style=solid]; -"2602 5114" -> "2603 5116" [label="[]", style=dashed]; -"2603 5116" -> "2604 5117" [label="[]", style=dashed]; -"2604 5117" -> "2605 5118" [label="[]", style=solid]; -"2605 5118" -> "2606 5119" [label="[-1, -1]", style=dashed]; -"2606 5119" -> "2607 5120" [label="[-1, -1]", style=dashed]; -"2607 5120" -> "2608 5123" [label="[-1]", style=dashed]; -"2607 5120" -> "2614 5131" [label="[-1]", style=dashed]; -"2608 5123" -> "2610 5124" [label="[-1]", style=dashed]; -"2609 5122" -> "2610 5124" [label="[]", style=solid]; -"2610 5124" -> "2611 5133" [label="[]", style=solid]; -"2610 5124" -> "2619 5144" [label="[]", style=solid]; -"2611 5133" -> "2612 5134" [label="[]", style=solid]; -"2612 5134" -> "2616 5137" [label="[]", style=solid]; -"2613 5129" -> "2614 5131" [label="[]", style=solid]; -"2614 5131" -> "2615 5132" [label="[]", style=solid]; -"2614 5131" -> "3556 5142" [label="[]", style=solid]; -"2615 5132" -> "2616 5137" [label="[]", style=solid]; -"2616 5137" -> "2617 5139" [label="[-1, 3]", style=dashed]; -"2617 5139" -> "2618 5140" [label="[-1, 1]", style=dashed]; -"2618 5140" -> "2619 5144" [label="[-1]", style=dashed]; -"2618 5140" -> "3555 5141" [label="[-1]", style=dashed]; -"2619 5144" -> "3484 6520" [label="[]", style=solid]; -"2620 5070" -> "2621 5072" [label="[]", style=dashed]; -"2621 5072" -> "2622 5073" [label="[]", style=dashed]; -"2622 5073" -> "2623 5074" [label="[]", style=solid]; -"2623 5074" -> "2624 5075" [label="[-1, -1]", style=dashed]; -"2624 5075" -> "2625 5076" [label="[-1, -1]", style=dashed]; -"2625 5076" -> "2626 5079" [label="[-1]", style=dashed]; -"2625 5076" -> "2632 5087" [label="[-1]", style=dashed]; -"2626 5079" -> "2628 5080" [label="[-1]", style=dashed]; -"2627 5078" -> "2628 5080" [label="[]", style=solid]; -"2628 5080" -> "2629 5089" [label="[]", style=solid]; -"2628 5080" -> "2637 5100" [label="[]", style=solid]; -"2629 5089" -> "2630 5090" [label="[]", style=solid]; -"2630 5090" -> "2634 5093" [label="[]", style=solid]; -"2631 5085" -> "2632 5087" [label="[]", style=solid]; -"2632 5087" -> "2633 5088" [label="[]", style=solid]; -"2632 5087" -> "3558 5098" [label="[]", style=solid]; -"2633 5088" -> "2634 5093" [label="[]", style=solid]; -"2634 5093" -> "2635 5095" [label="[-1, 3]", style=dashed]; -"2635 5095" -> "2636 5096" [label="[-1, 1]", style=dashed]; -"2636 5096" -> "2637 5100" [label="[-1]", style=dashed]; -"2636 5096" -> "3557 5097" [label="[-1]", style=dashed]; -"2637 5100" -> "3484 6520" [label="[]", style=solid]; -"2638 5026" -> "2639 5028" [label="[]", style=dashed]; -"2639 5028" -> "2640 5029" [label="[]", style=dashed]; -"2640 5029" -> "2641 5030" [label="[]", style=solid]; -"2641 5030" -> "2642 5031" [label="[-1, -1]", style=dashed]; -"2642 5031" -> "2643 5032" [label="[-1, -1]", style=dashed]; -"2643 5032" -> "2644 5035" [label="[-1]", style=dashed]; -"2643 5032" -> "2650 5043" [label="[-1]", style=dashed]; -"2644 5035" -> "2646 5036" [label="[-1]", style=dashed]; -"2645 5034" -> "2646 5036" [label="[]", style=solid]; -"2646 5036" -> "2647 5045" [label="[]", style=solid]; -"2646 5036" -> "2655 5056" [label="[]", style=solid]; -"2647 5045" -> "2648 5046" [label="[]", style=solid]; -"2648 5046" -> "2652 5049" [label="[]", style=solid]; -"2649 5041" -> "2650 5043" [label="[]", style=solid]; -"2650 5043" -> "2651 5044" [label="[]", style=solid]; -"2650 5043" -> "3560 5054" [label="[]", style=solid]; -"2651 5044" -> "2652 5049" [label="[]", style=solid]; -"2652 5049" -> "2653 5051" [label="[-1, 3]", style=dashed]; -"2653 5051" -> "2654 5052" [label="[-1, 1]", style=dashed]; -"2654 5052" -> "2655 5056" [label="[-1]", style=dashed]; -"2654 5052" -> "3559 5053" [label="[-1]", style=dashed]; -"2655 5056" -> "3484 6520" [label="[]", style=solid]; -"2656 4982" -> "2657 4984" [label="[]", style=dashed]; -"2657 4984" -> "2658 4985" [label="[]", style=dashed]; -"2658 4985" -> "2659 4986" [label="[]", style=solid]; -"2659 4986" -> "2660 4987" [label="[-1, -1]", style=dashed]; -"2660 4987" -> "2661 4988" [label="[-1, -1]", style=dashed]; -"2661 4988" -> "2662 4991" [label="[-1]", style=dashed]; -"2661 4988" -> "2668 4999" [label="[-1]", style=dashed]; -"2662 4991" -> "2664 4992" [label="[-1]", style=dashed]; -"2663 4990" -> "2664 4992" [label="[]", style=solid]; -"2664 4992" -> "2665 5001" [label="[]", style=solid]; -"2664 4992" -> "2673 5012" [label="[]", style=solid]; -"2665 5001" -> "2666 5002" [label="[]", style=solid]; -"2666 5002" -> "2670 5005" [label="[]", style=solid]; -"2667 4997" -> "2668 4999" [label="[]", style=solid]; -"2668 4999" -> "2669 5000" [label="[]", style=solid]; -"2668 4999" -> "3562 5010" [label="[]", style=solid]; -"2669 5000" -> "2670 5005" [label="[]", style=solid]; -"2670 5005" -> "2671 5007" [label="[-1, 3]", style=dashed]; -"2671 5007" -> "2672 5008" [label="[-1, 1]", style=dashed]; -"2672 5008" -> "2673 5012" [label="[-1]", style=dashed]; -"2672 5008" -> "3561 5009" [label="[-1]", style=dashed]; -"2673 5012" -> "3484 6520" [label="[]", style=solid]; -"2674 4938" -> "2675 4940" [label="[]", style=dashed]; -"2675 4940" -> "2676 4941" [label="[]", style=dashed]; -"2676 4941" -> "2677 4942" [label="[]", style=solid]; -"2677 4942" -> "2678 4943" [label="[-1, -1]", style=dashed]; -"2678 4943" -> "2679 4944" [label="[-1, -1]", style=dashed]; -"2679 4944" -> "2680 4947" [label="[-1]", style=dashed]; -"2679 4944" -> "2686 4955" [label="[-1]", style=dashed]; -"2680 4947" -> "2682 4948" [label="[-1]", style=dashed]; -"2681 4946" -> "2682 4948" [label="[]", style=solid]; -"2682 4948" -> "2683 4957" [label="[]", style=solid]; -"2682 4948" -> "2691 4968" [label="[]", style=solid]; -"2683 4957" -> "2684 4958" [label="[]", style=solid]; -"2684 4958" -> "2688 4961" [label="[]", style=solid]; -"2685 4953" -> "2686 4955" [label="[]", style=solid]; -"2686 4955" -> "2687 4956" [label="[]", style=solid]; -"2686 4955" -> "3564 4966" [label="[]", style=solid]; -"2687 4956" -> "2688 4961" [label="[]", style=solid]; -"2688 4961" -> "2689 4963" [label="[-1, 3]", style=dashed]; -"2689 4963" -> "2690 4964" [label="[-1, 1]", style=dashed]; -"2690 4964" -> "2691 4968" [label="[-1]", style=dashed]; -"2690 4964" -> "3563 4965" [label="[-1]", style=dashed]; -"2691 4968" -> "3484 6520" [label="[]", style=solid]; -"2692 4894" -> "2693 4896" [label="[]", style=dashed]; -"2693 4896" -> "2694 4897" [label="[]", style=dashed]; -"2694 4897" -> "2695 4898" [label="[]", style=solid]; -"2695 4898" -> "2696 4899" [label="[-1, -1]", style=dashed]; -"2696 4899" -> "2697 4900" [label="[-1, -1]", style=dashed]; -"2697 4900" -> "2698 4903" [label="[-1]", style=dashed]; -"2697 4900" -> "2704 4911" [label="[-1]", style=dashed]; -"2698 4903" -> "2700 4904" [label="[-1]", style=dashed]; -"2699 4902" -> "2700 4904" [label="[]", style=solid]; -"2700 4904" -> "2701 4913" [label="[]", style=solid]; -"2700 4904" -> "2709 4924" [label="[]", style=solid]; -"2701 4913" -> "2702 4914" [label="[]", style=solid]; -"2702 4914" -> "2706 4917" [label="[]", style=solid]; -"2703 4909" -> "2704 4911" [label="[]", style=solid]; -"2704 4911" -> "2705 4912" [label="[]", style=solid]; -"2704 4911" -> "3566 4922" [label="[]", style=solid]; -"2705 4912" -> "2706 4917" [label="[]", style=solid]; -"2706 4917" -> "2707 4919" [label="[-1, 3]", style=dashed]; -"2707 4919" -> "2708 4920" [label="[-1, 1]", style=dashed]; -"2708 4920" -> "2709 4924" [label="[-1]", style=dashed]; -"2708 4920" -> "3565 4921" [label="[-1]", style=dashed]; -"2709 4924" -> "3484 6520" [label="[]", style=solid]; -"2710 4850" -> "2711 4852" [label="[]", style=dashed]; -"2711 4852" -> "2712 4853" [label="[]", style=dashed]; -"2712 4853" -> "2713 4854" [label="[]", style=solid]; -"2713 4854" -> "2714 4855" [label="[-1, -1]", style=dashed]; -"2714 4855" -> "2715 4856" [label="[-1, -1]", style=dashed]; -"2715 4856" -> "2716 4859" [label="[-1]", style=dashed]; -"2715 4856" -> "2722 4867" [label="[-1]", style=dashed]; -"2716 4859" -> "2718 4860" [label="[-1]", style=dashed]; -"2717 4858" -> "2718 4860" [label="[]", style=solid]; -"2718 4860" -> "2719 4869" [label="[]", style=solid]; -"2718 4860" -> "2727 4880" [label="[]", style=solid]; -"2719 4869" -> "2720 4870" [label="[]", style=solid]; -"2720 4870" -> "2724 4873" [label="[]", style=solid]; -"2721 4865" -> "2722 4867" [label="[]", style=solid]; -"2722 4867" -> "2723 4868" [label="[]", style=solid]; -"2722 4867" -> "3568 4878" [label="[]", style=solid]; -"2723 4868" -> "2724 4873" [label="[]", style=solid]; -"2724 4873" -> "2725 4875" [label="[-1, 3]", style=dashed]; -"2725 4875" -> "2726 4876" [label="[-1, 1]", style=dashed]; -"2726 4876" -> "2727 4880" [label="[-1]", style=dashed]; -"2726 4876" -> "3567 4877" [label="[-1]", style=dashed]; -"2727 4880" -> "3484 6520" [label="[]", style=solid]; -"2728 4806" -> "2729 4808" [label="[]", style=dashed]; -"2729 4808" -> "2730 4809" [label="[]", style=dashed]; -"2730 4809" -> "2731 4810" [label="[]", style=solid]; -"2731 4810" -> "2732 4811" [label="[-1, -1]", style=dashed]; -"2732 4811" -> "2733 4812" [label="[-1, -1]", style=dashed]; -"2733 4812" -> "2734 4815" [label="[-1]", style=dashed]; -"2733 4812" -> "2740 4823" [label="[-1]", style=dashed]; -"2734 4815" -> "2736 4816" [label="[-1]", style=dashed]; -"2735 4814" -> "2736 4816" [label="[]", style=solid]; -"2736 4816" -> "2737 4825" [label="[]", style=solid]; -"2736 4816" -> "2745 4836" [label="[]", style=solid]; -"2737 4825" -> "2738 4826" [label="[]", style=solid]; -"2738 4826" -> "2742 4829" [label="[]", style=solid]; -"2739 4821" -> "2740 4823" [label="[]", style=solid]; -"2740 4823" -> "2741 4824" [label="[]", style=solid]; -"2740 4823" -> "3570 4834" [label="[]", style=solid]; -"2741 4824" -> "2742 4829" [label="[]", style=solid]; -"2742 4829" -> "2743 4831" [label="[-1, 3]", style=dashed]; -"2743 4831" -> "2744 4832" [label="[-1, 1]", style=dashed]; -"2744 4832" -> "2745 4836" [label="[-1]", style=dashed]; -"2744 4832" -> "3569 4833" [label="[-1]", style=dashed]; -"2745 4836" -> "3484 6520" [label="[]", style=solid]; -"2746 4762" -> "2747 4764" [label="[]", style=dashed]; -"2747 4764" -> "2748 4765" [label="[]", style=dashed]; -"2748 4765" -> "2749 4766" [label="[]", style=solid]; -"2749 4766" -> "2750 4767" [label="[-1, -1]", style=dashed]; -"2750 4767" -> "2751 4768" [label="[-1, -1]", style=dashed]; -"2751 4768" -> "2752 4771" [label="[-1]", style=dashed]; -"2751 4768" -> "2758 4779" [label="[-1]", style=dashed]; -"2752 4771" -> "2754 4772" [label="[-1]", style=dashed]; -"2753 4770" -> "2754 4772" [label="[]", style=solid]; -"2754 4772" -> "2755 4781" [label="[]", style=solid]; -"2754 4772" -> "2763 4792" [label="[]", style=solid]; -"2755 4781" -> "2756 4782" [label="[]", style=solid]; -"2756 4782" -> "2760 4785" [label="[]", style=solid]; -"2757 4777" -> "2758 4779" [label="[]", style=solid]; -"2758 4779" -> "2759 4780" [label="[]", style=solid]; -"2758 4779" -> "3572 4790" [label="[]", style=solid]; -"2759 4780" -> "2760 4785" [label="[]", style=solid]; -"2760 4785" -> "2761 4787" [label="[-1, 3]", style=dashed]; -"2761 4787" -> "2762 4788" [label="[-1, 1]", style=dashed]; -"2762 4788" -> "2763 4792" [label="[-1]", style=dashed]; -"2762 4788" -> "3571 4789" [label="[-1]", style=dashed]; -"2763 4792" -> "3484 6520" [label="[]", style=solid]; -"2764 4718" -> "2765 4720" [label="[]", style=dashed]; -"2765 4720" -> "2766 4721" [label="[]", style=dashed]; -"2766 4721" -> "2767 4722" [label="[]", style=solid]; -"2767 4722" -> "2768 4723" [label="[-1, -1]", style=dashed]; -"2768 4723" -> "2769 4724" [label="[-1, -1]", style=dashed]; -"2769 4724" -> "2770 4727" [label="[-1]", style=dashed]; -"2769 4724" -> "2776 4735" [label="[-1]", style=dashed]; -"2770 4727" -> "2772 4728" [label="[-1]", style=dashed]; -"2771 4726" -> "2772 4728" [label="[]", style=solid]; -"2772 4728" -> "2773 4737" [label="[]", style=solid]; -"2772 4728" -> "2781 4748" [label="[]", style=solid]; -"2773 4737" -> "2774 4738" [label="[]", style=solid]; -"2774 4738" -> "2778 4741" [label="[]", style=solid]; -"2775 4733" -> "2776 4735" [label="[]", style=solid]; -"2776 4735" -> "2777 4736" [label="[]", style=solid]; -"2776 4735" -> "3574 4746" [label="[]", style=solid]; -"2777 4736" -> "2778 4741" [label="[]", style=solid]; -"2778 4741" -> "2779 4743" [label="[-1, 3]", style=dashed]; -"2779 4743" -> "2780 4744" [label="[-1, 1]", style=dashed]; -"2780 4744" -> "2781 4748" [label="[-1]", style=dashed]; -"2780 4744" -> "3573 4745" [label="[-1]", style=dashed]; -"2781 4748" -> "3484 6520" [label="[]", style=solid]; -"2782 4674" -> "2783 4676" [label="[]", style=dashed]; -"2783 4676" -> "2784 4677" [label="[]", style=dashed]; -"2784 4677" -> "2785 4678" [label="[]", style=solid]; -"2785 4678" -> "2786 4679" [label="[-1, -1]", style=dashed]; -"2786 4679" -> "2787 4680" [label="[-1, -1]", style=dashed]; -"2787 4680" -> "2788 4683" [label="[-1]", style=dashed]; -"2787 4680" -> "2794 4691" [label="[-1]", style=dashed]; -"2788 4683" -> "2790 4684" [label="[-1]", style=dashed]; -"2789 4682" -> "2790 4684" [label="[]", style=solid]; -"2790 4684" -> "2791 4693" [label="[]", style=solid]; -"2790 4684" -> "2799 4704" [label="[]", style=solid]; -"2791 4693" -> "2792 4694" [label="[]", style=solid]; -"2792 4694" -> "2796 4697" [label="[]", style=solid]; -"2793 4689" -> "2794 4691" [label="[]", style=solid]; -"2794 4691" -> "2795 4692" [label="[]", style=solid]; -"2794 4691" -> "3576 4702" [label="[]", style=solid]; -"2795 4692" -> "2796 4697" [label="[]", style=solid]; -"2796 4697" -> "2797 4699" [label="[-1, 3]", style=dashed]; -"2797 4699" -> "2798 4700" [label="[-1, 1]", style=dashed]; -"2798 4700" -> "2799 4704" [label="[-1]", style=dashed]; -"2798 4700" -> "3575 4701" [label="[-1]", style=dashed]; -"2799 4704" -> "3484 6520" [label="[]", style=solid]; -"2800 4630" -> "2801 4632" [label="[]", style=dashed]; -"2801 4632" -> "2802 4633" [label="[]", style=dashed]; -"2802 4633" -> "2803 4634" [label="[]", style=solid]; -"2803 4634" -> "2804 4635" [label="[-1, -1]", style=dashed]; -"2804 4635" -> "2805 4636" [label="[-1, -1]", style=dashed]; -"2805 4636" -> "2806 4639" [label="[-1]", style=dashed]; -"2805 4636" -> "2812 4647" [label="[-1]", style=dashed]; -"2806 4639" -> "2808 4640" [label="[-1]", style=dashed]; -"2807 4638" -> "2808 4640" [label="[]", style=solid]; -"2808 4640" -> "2809 4649" [label="[]", style=solid]; -"2808 4640" -> "2817 4660" [label="[]", style=solid]; -"2809 4649" -> "2810 4650" [label="[]", style=solid]; -"2810 4650" -> "2814 4653" [label="[]", style=solid]; -"2811 4645" -> "2812 4647" [label="[]", style=solid]; -"2812 4647" -> "2813 4648" [label="[]", style=solid]; -"2812 4647" -> "3578 4658" [label="[]", style=solid]; -"2813 4648" -> "2814 4653" [label="[]", style=solid]; -"2814 4653" -> "2815 4655" [label="[-1, 3]", style=dashed]; -"2815 4655" -> "2816 4656" [label="[-1, 1]", style=dashed]; -"2816 4656" -> "2817 4660" [label="[-1]", style=dashed]; -"2816 4656" -> "3577 4657" [label="[-1]", style=dashed]; -"2817 4660" -> "3484 6520" [label="[]", style=solid]; -"2818 4586" -> "2819 4588" [label="[]", style=dashed]; -"2819 4588" -> "2820 4589" [label="[]", style=dashed]; -"2820 4589" -> "2821 4590" [label="[]", style=solid]; -"2821 4590" -> "2822 4591" [label="[-1, -1]", style=dashed]; -"2822 4591" -> "2823 4592" [label="[-1, -1]", style=dashed]; -"2823 4592" -> "2824 4595" [label="[-1]", style=dashed]; -"2823 4592" -> "2830 4603" [label="[-1]", style=dashed]; -"2824 4595" -> "2826 4596" [label="[-1]", style=dashed]; -"2825 4594" -> "2826 4596" [label="[]", style=solid]; -"2826 4596" -> "2827 4605" [label="[]", style=solid]; -"2826 4596" -> "2835 4616" [label="[]", style=solid]; -"2827 4605" -> "2828 4606" [label="[]", style=solid]; -"2828 4606" -> "2832 4609" [label="[]", style=solid]; -"2829 4601" -> "2830 4603" [label="[]", style=solid]; -"2830 4603" -> "2831 4604" [label="[]", style=solid]; -"2830 4603" -> "3580 4614" [label="[]", style=solid]; -"2831 4604" -> "2832 4609" [label="[]", style=solid]; -"2832 4609" -> "2833 4611" [label="[-1, 3]", style=dashed]; -"2833 4611" -> "2834 4612" [label="[-1, 1]", style=dashed]; -"2834 4612" -> "2835 4616" [label="[-1]", style=dashed]; -"2834 4612" -> "3579 4613" [label="[-1]", style=dashed]; -"2835 4616" -> "3484 6520" [label="[]", style=solid]; -"2836 4542" -> "2837 4544" [label="[]", style=dashed]; -"2837 4544" -> "2838 4545" [label="[]", style=dashed]; -"2838 4545" -> "2839 4546" [label="[]", style=solid]; -"2839 4546" -> "2840 4547" [label="[-1, -1]", style=dashed]; -"2840 4547" -> "2841 4548" [label="[-1, -1]", style=dashed]; -"2841 4548" -> "2842 4551" [label="[-1]", style=dashed]; -"2841 4548" -> "2848 4559" [label="[-1]", style=dashed]; -"2842 4551" -> "2844 4552" [label="[-1]", style=dashed]; -"2843 4550" -> "2844 4552" [label="[]", style=solid]; -"2844 4552" -> "2845 4561" [label="[]", style=solid]; -"2844 4552" -> "2853 4572" [label="[]", style=solid]; -"2845 4561" -> "2846 4562" [label="[]", style=solid]; -"2846 4562" -> "2850 4565" [label="[]", style=solid]; -"2847 4557" -> "2848 4559" [label="[]", style=solid]; -"2848 4559" -> "2849 4560" [label="[]", style=solid]; -"2848 4559" -> "3582 4570" [label="[]", style=solid]; -"2849 4560" -> "2850 4565" [label="[]", style=solid]; -"2850 4565" -> "2851 4567" [label="[-1, 3]", style=dashed]; -"2851 4567" -> "2852 4568" [label="[-1, 1]", style=dashed]; -"2852 4568" -> "2853 4572" [label="[-1]", style=dashed]; -"2852 4568" -> "3581 4569" [label="[-1]", style=dashed]; -"2853 4572" -> "3484 6520" [label="[]", style=solid]; -"2854 4498" -> "2855 4500" [label="[]", style=dashed]; -"2855 4500" -> "2856 4501" [label="[]", style=dashed]; -"2856 4501" -> "2857 4502" [label="[]", style=solid]; -"2857 4502" -> "2858 4503" [label="[-1, -1]", style=dashed]; -"2858 4503" -> "2859 4504" [label="[-1, -1]", style=dashed]; -"2859 4504" -> "2860 4507" [label="[-1]", style=dashed]; -"2859 4504" -> "2866 4515" [label="[-1]", style=dashed]; -"2860 4507" -> "2862 4508" [label="[-1]", style=dashed]; -"2861 4506" -> "2862 4508" [label="[]", style=solid]; -"2862 4508" -> "2863 4517" [label="[]", style=solid]; -"2862 4508" -> "2871 4528" [label="[]", style=solid]; -"2863 4517" -> "2864 4518" [label="[]", style=solid]; -"2864 4518" -> "2868 4521" [label="[]", style=solid]; -"2865 4513" -> "2866 4515" [label="[]", style=solid]; -"2866 4515" -> "2867 4516" [label="[]", style=solid]; -"2866 4515" -> "3584 4526" [label="[]", style=solid]; -"2867 4516" -> "2868 4521" [label="[]", style=solid]; -"2868 4521" -> "2869 4523" [label="[-1, 3]", style=dashed]; -"2869 4523" -> "2870 4524" [label="[-1, 1]", style=dashed]; -"2870 4524" -> "2871 4528" [label="[-1]", style=dashed]; -"2870 4524" -> "3583 4525" [label="[-1]", style=dashed]; -"2871 4528" -> "3484 6520" [label="[]", style=solid]; -"2872 4454" -> "2873 4456" [label="[]", style=dashed]; -"2873 4456" -> "2874 4457" [label="[]", style=dashed]; -"2874 4457" -> "2875 4458" [label="[]", style=solid]; -"2875 4458" -> "2876 4459" [label="[-1, -1]", style=dashed]; -"2876 4459" -> "2877 4460" [label="[-1, -1]", style=dashed]; -"2877 4460" -> "2878 4463" [label="[-1]", style=dashed]; -"2877 4460" -> "2884 4471" [label="[-1]", style=dashed]; -"2878 4463" -> "2880 4464" [label="[-1]", style=dashed]; -"2879 4462" -> "2880 4464" [label="[]", style=solid]; -"2880 4464" -> "2881 4473" [label="[]", style=solid]; -"2880 4464" -> "2889 4484" [label="[]", style=solid]; -"2881 4473" -> "2882 4474" [label="[]", style=solid]; -"2882 4474" -> "2886 4477" [label="[]", style=solid]; -"2883 4469" -> "2884 4471" [label="[]", style=solid]; -"2884 4471" -> "2885 4472" [label="[]", style=solid]; -"2884 4471" -> "3586 4482" [label="[]", style=solid]; -"2885 4472" -> "2886 4477" [label="[]", style=solid]; -"2886 4477" -> "2887 4479" [label="[-1, 3]", style=dashed]; -"2887 4479" -> "2888 4480" [label="[-1, 1]", style=dashed]; -"2888 4480" -> "2889 4484" [label="[-1]", style=dashed]; -"2888 4480" -> "3585 4481" [label="[-1]", style=dashed]; -"2889 4484" -> "3484 6520" [label="[]", style=solid]; -"2890 4410" -> "2891 4412" [label="[]", style=dashed]; -"2891 4412" -> "2892 4413" [label="[]", style=dashed]; -"2892 4413" -> "2893 4414" [label="[]", style=solid]; -"2893 4414" -> "2894 4415" [label="[-1, -1]", style=dashed]; -"2894 4415" -> "2895 4416" [label="[-1, -1]", style=dashed]; -"2895 4416" -> "2896 4419" [label="[-1]", style=dashed]; -"2895 4416" -> "2902 4427" [label="[-1]", style=dashed]; -"2896 4419" -> "2898 4420" [label="[-1]", style=dashed]; -"2897 4418" -> "2898 4420" [label="[]", style=solid]; -"2898 4420" -> "2899 4429" [label="[]", style=solid]; -"2898 4420" -> "2907 4440" [label="[]", style=solid]; -"2899 4429" -> "2900 4430" [label="[]", style=solid]; -"2900 4430" -> "2904 4433" [label="[]", style=solid]; -"2901 4425" -> "2902 4427" [label="[]", style=solid]; -"2902 4427" -> "2903 4428" [label="[]", style=solid]; -"2902 4427" -> "3588 4438" [label="[]", style=solid]; -"2903 4428" -> "2904 4433" [label="[]", style=solid]; -"2904 4433" -> "2905 4435" [label="[-1, 3]", style=dashed]; -"2905 4435" -> "2906 4436" [label="[-1, 1]", style=dashed]; -"2906 4436" -> "2907 4440" [label="[-1]", style=dashed]; -"2906 4436" -> "3587 4437" [label="[-1]", style=dashed]; -"2907 4440" -> "3484 6520" [label="[]", style=solid]; -"2908 4366" -> "2909 4368" [label="[]", style=dashed]; -"2909 4368" -> "2910 4369" [label="[]", style=dashed]; -"2910 4369" -> "2911 4370" [label="[]", style=solid]; -"2911 4370" -> "2912 4371" [label="[-1, -1]", style=dashed]; -"2912 4371" -> "2913 4372" [label="[-1, -1]", style=dashed]; -"2913 4372" -> "2914 4375" [label="[-1]", style=dashed]; -"2913 4372" -> "2920 4383" [label="[-1]", style=dashed]; -"2914 4375" -> "2916 4376" [label="[-1]", style=dashed]; -"2915 4374" -> "2916 4376" [label="[]", style=solid]; -"2916 4376" -> "2917 4385" [label="[]", style=solid]; -"2916 4376" -> "2925 4396" [label="[]", style=solid]; -"2917 4385" -> "2918 4386" [label="[]", style=solid]; -"2918 4386" -> "2922 4389" [label="[]", style=solid]; -"2919 4381" -> "2920 4383" [label="[]", style=solid]; -"2920 4383" -> "2921 4384" [label="[]", style=solid]; -"2920 4383" -> "3590 4394" [label="[]", style=solid]; -"2921 4384" -> "2922 4389" [label="[]", style=solid]; -"2922 4389" -> "2923 4391" [label="[-1, 3]", style=dashed]; -"2923 4391" -> "2924 4392" [label="[-1, 1]", style=dashed]; -"2924 4392" -> "2925 4396" [label="[-1]", style=dashed]; -"2924 4392" -> "3589 4393" [label="[-1]", style=dashed]; -"2925 4396" -> "3484 6520" [label="[]", style=solid]; -"2926 4322" -> "2927 4324" [label="[]", style=dashed]; -"2927 4324" -> "2928 4325" [label="[]", style=dashed]; -"2928 4325" -> "2929 4326" [label="[]", style=solid]; -"2929 4326" -> "2930 4327" [label="[-1, -1]", style=dashed]; -"2930 4327" -> "2931 4328" [label="[-1, -1]", style=dashed]; -"2931 4328" -> "2932 4331" [label="[-1]", style=dashed]; -"2931 4328" -> "2938 4339" [label="[-1]", style=dashed]; -"2932 4331" -> "2934 4332" [label="[-1]", style=dashed]; -"2933 4330" -> "2934 4332" [label="[]", style=solid]; -"2934 4332" -> "2935 4341" [label="[]", style=solid]; -"2934 4332" -> "2943 4352" [label="[]", style=solid]; -"2935 4341" -> "2936 4342" [label="[]", style=solid]; -"2936 4342" -> "2940 4345" [label="[]", style=solid]; -"2937 4337" -> "2938 4339" [label="[]", style=solid]; -"2938 4339" -> "2939 4340" [label="[]", style=solid]; -"2938 4339" -> "3592 4350" [label="[]", style=solid]; -"2939 4340" -> "2940 4345" [label="[]", style=solid]; -"2940 4345" -> "2941 4347" [label="[-1, 3]", style=dashed]; -"2941 4347" -> "2942 4348" [label="[-1, 1]", style=dashed]; -"2942 4348" -> "2943 4352" [label="[-1]", style=dashed]; -"2942 4348" -> "3591 4349" [label="[-1]", style=dashed]; -"2943 4352" -> "3484 6520" [label="[]", style=solid]; -"2944 4278" -> "2945 4280" [label="[]", style=dashed]; -"2945 4280" -> "2946 4281" [label="[]", style=dashed]; -"2946 4281" -> "2947 4282" [label="[]", style=solid]; -"2947 4282" -> "2948 4283" [label="[-1, -1]", style=dashed]; -"2948 4283" -> "2949 4284" [label="[-1, -1]", style=dashed]; -"2949 4284" -> "2950 4287" [label="[-1]", style=dashed]; -"2949 4284" -> "2956 4295" [label="[-1]", style=dashed]; -"2950 4287" -> "2952 4288" [label="[-1]", style=dashed]; -"2951 4286" -> "2952 4288" [label="[]", style=solid]; -"2952 4288" -> "2953 4297" [label="[]", style=solid]; -"2952 4288" -> "2961 4308" [label="[]", style=solid]; -"2953 4297" -> "2954 4298" [label="[]", style=solid]; -"2954 4298" -> "2958 4301" [label="[]", style=solid]; -"2955 4293" -> "2956 4295" [label="[]", style=solid]; -"2956 4295" -> "2957 4296" [label="[]", style=solid]; -"2956 4295" -> "3594 4306" [label="[]", style=solid]; -"2957 4296" -> "2958 4301" [label="[]", style=solid]; -"2958 4301" -> "2959 4303" [label="[-1, 3]", style=dashed]; -"2959 4303" -> "2960 4304" [label="[-1, 1]", style=dashed]; -"2960 4304" -> "2961 4308" [label="[-1]", style=dashed]; -"2960 4304" -> "3593 4305" [label="[-1]", style=dashed]; -"2961 4308" -> "3484 6520" [label="[]", style=solid]; -"2962 4234" -> "2963 4236" [label="[]", style=dashed]; -"2963 4236" -> "2964 4237" [label="[]", style=dashed]; -"2964 4237" -> "2965 4238" [label="[]", style=solid]; -"2965 4238" -> "2966 4239" [label="[-1, -1]", style=dashed]; -"2966 4239" -> "2967 4240" [label="[-1, -1]", style=dashed]; -"2967 4240" -> "2968 4243" [label="[-1]", style=dashed]; -"2967 4240" -> "2974 4251" [label="[-1]", style=dashed]; -"2968 4243" -> "2970 4244" [label="[-1]", style=dashed]; -"2969 4242" -> "2970 4244" [label="[]", style=solid]; -"2970 4244" -> "2971 4253" [label="[]", style=solid]; -"2970 4244" -> "2979 4264" [label="[]", style=solid]; -"2971 4253" -> "2972 4254" [label="[]", style=solid]; -"2972 4254" -> "2976 4257" [label="[]", style=solid]; -"2973 4249" -> "2974 4251" [label="[]", style=solid]; -"2974 4251" -> "2975 4252" [label="[]", style=solid]; -"2974 4251" -> "3596 4262" [label="[]", style=solid]; -"2975 4252" -> "2976 4257" [label="[]", style=solid]; -"2976 4257" -> "2977 4259" [label="[-1, 3]", style=dashed]; -"2977 4259" -> "2978 4260" [label="[-1, 1]", style=dashed]; -"2978 4260" -> "2979 4264" [label="[-1]", style=dashed]; -"2978 4260" -> "3595 4261" [label="[-1]", style=dashed]; -"2979 4264" -> "3484 6520" [label="[]", style=solid]; -"2980 4190" -> "2981 4192" [label="[]", style=dashed]; -"2981 4192" -> "2982 4193" [label="[]", style=dashed]; -"2982 4193" -> "2983 4194" [label="[]", style=solid]; -"2983 4194" -> "2984 4195" [label="[-1, -1]", style=dashed]; -"2984 4195" -> "2985 4196" [label="[-1, -1]", style=dashed]; -"2985 4196" -> "2986 4199" [label="[-1]", style=dashed]; -"2985 4196" -> "2992 4207" [label="[-1]", style=dashed]; -"2986 4199" -> "2988 4200" [label="[-1]", style=dashed]; -"2987 4198" -> "2988 4200" [label="[]", style=solid]; -"2988 4200" -> "2989 4209" [label="[]", style=solid]; -"2988 4200" -> "2997 4220" [label="[]", style=solid]; -"2989 4209" -> "2990 4210" [label="[]", style=solid]; -"2990 4210" -> "2994 4213" [label="[]", style=solid]; -"2991 4205" -> "2992 4207" [label="[]", style=solid]; -"2992 4207" -> "2993 4208" [label="[]", style=solid]; -"2992 4207" -> "3598 4218" [label="[]", style=solid]; -"2993 4208" -> "2994 4213" [label="[]", style=solid]; -"2994 4213" -> "2995 4215" [label="[-1, 3]", style=dashed]; -"2995 4215" -> "2996 4216" [label="[-1, 1]", style=dashed]; -"2996 4216" -> "2997 4220" [label="[-1]", style=dashed]; -"2996 4216" -> "3597 4217" [label="[-1]", style=dashed]; -"2997 4220" -> "3484 6520" [label="[]", style=solid]; -"2998 4146" -> "2999 4148" [label="[]", style=dashed]; -"2999 4148" -> "3000 4149" [label="[]", style=dashed]; -"3000 4149" -> "3001 4150" [label="[]", style=solid]; -"3001 4150" -> "3002 4151" [label="[-1, -1]", style=dashed]; -"3002 4151" -> "3003 4152" [label="[-1, -1]", style=dashed]; -"3003 4152" -> "3004 4155" [label="[-1]", style=dashed]; -"3003 4152" -> "3010 4163" [label="[-1]", style=dashed]; -"3004 4155" -> "3006 4156" [label="[-1]", style=dashed]; -"3005 4154" -> "3006 4156" [label="[]", style=solid]; -"3006 4156" -> "3007 4165" [label="[]", style=solid]; -"3006 4156" -> "3015 4176" [label="[]", style=solid]; -"3007 4165" -> "3008 4166" [label="[]", style=solid]; -"3008 4166" -> "3012 4169" [label="[]", style=solid]; -"3009 4161" -> "3010 4163" [label="[]", style=solid]; -"3010 4163" -> "3011 4164" [label="[]", style=solid]; -"3010 4163" -> "3600 4174" [label="[]", style=solid]; -"3011 4164" -> "3012 4169" [label="[]", style=solid]; -"3012 4169" -> "3013 4171" [label="[-1, 3]", style=dashed]; -"3013 4171" -> "3014 4172" [label="[-1, 1]", style=dashed]; -"3014 4172" -> "3015 4176" [label="[-1]", style=dashed]; -"3014 4172" -> "3599 4173" [label="[-1]", style=dashed]; -"3015 4176" -> "3484 6520" [label="[]", style=solid]; -"3016 4102" -> "3017 4104" [label="[]", style=dashed]; -"3017 4104" -> "3018 4105" [label="[]", style=dashed]; -"3018 4105" -> "3019 4106" [label="[]", style=solid]; -"3019 4106" -> "3020 4107" [label="[-1, -1]", style=dashed]; -"3020 4107" -> "3021 4108" [label="[-1, -1]", style=dashed]; -"3021 4108" -> "3022 4111" [label="[-1]", style=dashed]; -"3021 4108" -> "3028 4119" [label="[-1]", style=dashed]; -"3022 4111" -> "3024 4112" [label="[-1]", style=dashed]; -"3023 4110" -> "3024 4112" [label="[]", style=solid]; -"3024 4112" -> "3025 4121" [label="[]", style=solid]; -"3024 4112" -> "3033 4132" [label="[]", style=solid]; -"3025 4121" -> "3026 4122" [label="[]", style=solid]; -"3026 4122" -> "3030 4125" [label="[]", style=solid]; -"3027 4117" -> "3028 4119" [label="[]", style=solid]; -"3028 4119" -> "3029 4120" [label="[]", style=solid]; -"3028 4119" -> "3602 4130" [label="[]", style=solid]; -"3029 4120" -> "3030 4125" [label="[]", style=solid]; -"3030 4125" -> "3031 4127" [label="[-1, 3]", style=dashed]; -"3031 4127" -> "3032 4128" [label="[-1, 1]", style=dashed]; -"3032 4128" -> "3033 4132" [label="[-1]", style=dashed]; -"3032 4128" -> "3601 4129" [label="[-1]", style=dashed]; -"3033 4132" -> "3484 6520" [label="[]", style=solid]; -"3034 4058" -> "3035 4060" [label="[]", style=dashed]; -"3035 4060" -> "3036 4061" [label="[]", style=dashed]; -"3036 4061" -> "3037 4062" [label="[]", style=solid]; -"3037 4062" -> "3038 4063" [label="[-1, -1]", style=dashed]; -"3038 4063" -> "3039 4064" [label="[-1, -1]", style=dashed]; -"3039 4064" -> "3040 4067" [label="[-1]", style=dashed]; -"3039 4064" -> "3046 4075" [label="[-1]", style=dashed]; -"3040 4067" -> "3042 4068" [label="[-1]", style=dashed]; -"3041 4066" -> "3042 4068" [label="[]", style=solid]; -"3042 4068" -> "3043 4077" [label="[]", style=solid]; -"3042 4068" -> "3051 4088" [label="[]", style=solid]; -"3043 4077" -> "3044 4078" [label="[]", style=solid]; -"3044 4078" -> "3048 4081" [label="[]", style=solid]; -"3045 4073" -> "3046 4075" [label="[]", style=solid]; -"3046 4075" -> "3047 4076" [label="[]", style=solid]; -"3046 4075" -> "3604 4086" [label="[]", style=solid]; -"3047 4076" -> "3048 4081" [label="[]", style=solid]; -"3048 4081" -> "3049 4083" [label="[-1, 3]", style=dashed]; -"3049 4083" -> "3050 4084" [label="[-1, 1]", style=dashed]; -"3050 4084" -> "3051 4088" [label="[-1]", style=dashed]; -"3050 4084" -> "3603 4085" [label="[-1]", style=dashed]; -"3051 4088" -> "3484 6520" [label="[]", style=solid]; -"3052 4014" -> "3053 4016" [label="[]", style=dashed]; -"3053 4016" -> "3054 4017" [label="[]", style=dashed]; -"3054 4017" -> "3055 4018" [label="[]", style=solid]; -"3055 4018" -> "3056 4019" [label="[-1, -1]", style=dashed]; -"3056 4019" -> "3057 4020" [label="[-1, -1]", style=dashed]; -"3057 4020" -> "3058 4023" [label="[-1]", style=dashed]; -"3057 4020" -> "3064 4031" [label="[-1]", style=dashed]; -"3058 4023" -> "3060 4024" [label="[-1]", style=dashed]; -"3059 4022" -> "3060 4024" [label="[]", style=solid]; -"3060 4024" -> "3061 4033" [label="[]", style=solid]; -"3060 4024" -> "3069 4044" [label="[]", style=solid]; -"3061 4033" -> "3062 4034" [label="[]", style=solid]; -"3062 4034" -> "3066 4037" [label="[]", style=solid]; -"3063 4029" -> "3064 4031" [label="[]", style=solid]; -"3064 4031" -> "3065 4032" [label="[]", style=solid]; -"3064 4031" -> "3606 4042" [label="[]", style=solid]; -"3065 4032" -> "3066 4037" [label="[]", style=solid]; -"3066 4037" -> "3067 4039" [label="[-1, 3]", style=dashed]; -"3067 4039" -> "3068 4040" [label="[-1, 1]", style=dashed]; -"3068 4040" -> "3069 4044" [label="[-1]", style=dashed]; -"3068 4040" -> "3605 4041" [label="[-1]", style=dashed]; -"3069 4044" -> "3484 6520" [label="[]", style=solid]; -"3070 3970" -> "3071 3972" [label="[]", style=dashed]; -"3071 3972" -> "3072 3973" [label="[]", style=dashed]; -"3072 3973" -> "3073 3974" [label="[]", style=solid]; -"3073 3974" -> "3074 3975" [label="[-1, -1]", style=dashed]; -"3074 3975" -> "3075 3976" [label="[-1, -1]", style=dashed]; -"3075 3976" -> "3076 3979" [label="[-1]", style=dashed]; -"3075 3976" -> "3082 3987" [label="[-1]", style=dashed]; -"3076 3979" -> "3078 3980" [label="[-1]", style=dashed]; -"3077 3978" -> "3078 3980" [label="[]", style=solid]; -"3078 3980" -> "3079 3989" [label="[]", style=solid]; -"3078 3980" -> "3087 4000" [label="[]", style=solid]; -"3079 3989" -> "3080 3990" [label="[]", style=solid]; -"3080 3990" -> "3084 3993" [label="[]", style=solid]; -"3081 3985" -> "3082 3987" [label="[]", style=solid]; -"3082 3987" -> "3083 3988" [label="[]", style=solid]; -"3082 3987" -> "3608 3998" [label="[]", style=solid]; -"3083 3988" -> "3084 3993" [label="[]", style=solid]; -"3084 3993" -> "3085 3995" [label="[-1, 3]", style=dashed]; -"3085 3995" -> "3086 3996" [label="[-1, 1]", style=dashed]; -"3086 3996" -> "3087 4000" [label="[-1]", style=dashed]; -"3086 3996" -> "3607 3997" [label="[-1]", style=dashed]; -"3087 4000" -> "3484 6520" [label="[]", style=solid]; -"3088 3926" -> "3089 3928" [label="[]", style=dashed]; -"3089 3928" -> "3090 3929" [label="[]", style=dashed]; -"3090 3929" -> "3091 3930" [label="[]", style=solid]; -"3091 3930" -> "3092 3931" [label="[-1, -1]", style=dashed]; -"3092 3931" -> "3093 3932" [label="[-1, -1]", style=dashed]; -"3093 3932" -> "3094 3935" [label="[-1]", style=dashed]; -"3093 3932" -> "3100 3943" [label="[-1]", style=dashed]; -"3094 3935" -> "3096 3936" [label="[-1]", style=dashed]; -"3095 3934" -> "3096 3936" [label="[]", style=solid]; -"3096 3936" -> "3097 3945" [label="[]", style=solid]; -"3096 3936" -> "3105 3956" [label="[]", style=solid]; -"3097 3945" -> "3098 3946" [label="[]", style=solid]; -"3098 3946" -> "3102 3949" [label="[]", style=solid]; -"3099 3941" -> "3100 3943" [label="[]", style=solid]; -"3100 3943" -> "3101 3944" [label="[]", style=solid]; -"3100 3943" -> "3610 3954" [label="[]", style=solid]; -"3101 3944" -> "3102 3949" [label="[]", style=solid]; -"3102 3949" -> "3103 3951" [label="[-1, 3]", style=dashed]; -"3103 3951" -> "3104 3952" [label="[-1, 1]", style=dashed]; -"3104 3952" -> "3105 3956" [label="[-1]", style=dashed]; -"3104 3952" -> "3609 3953" [label="[-1]", style=dashed]; -"3105 3956" -> "3484 6520" [label="[]", style=solid]; -"3106 3882" -> "3107 3884" [label="[]", style=dashed]; -"3107 3884" -> "3108 3885" [label="[]", style=dashed]; -"3108 3885" -> "3109 3886" [label="[]", style=solid]; -"3109 3886" -> "3110 3887" [label="[-1, -1]", style=dashed]; -"3110 3887" -> "3111 3888" [label="[-1, -1]", style=dashed]; -"3111 3888" -> "3112 3891" [label="[-1]", style=dashed]; -"3111 3888" -> "3118 3899" [label="[-1]", style=dashed]; -"3112 3891" -> "3114 3892" [label="[-1]", style=dashed]; -"3113 3890" -> "3114 3892" [label="[]", style=solid]; -"3114 3892" -> "3115 3901" [label="[]", style=solid]; -"3114 3892" -> "3123 3912" [label="[]", style=solid]; -"3115 3901" -> "3116 3902" [label="[]", style=solid]; -"3116 3902" -> "3120 3905" [label="[]", style=solid]; -"3117 3897" -> "3118 3899" [label="[]", style=solid]; -"3118 3899" -> "3119 3900" [label="[]", style=solid]; -"3118 3899" -> "3612 3910" [label="[]", style=solid]; -"3119 3900" -> "3120 3905" [label="[]", style=solid]; -"3120 3905" -> "3121 3907" [label="[-1, 3]", style=dashed]; -"3121 3907" -> "3122 3908" [label="[-1, 1]", style=dashed]; -"3122 3908" -> "3123 3912" [label="[-1]", style=dashed]; -"3122 3908" -> "3611 3909" [label="[-1]", style=dashed]; -"3123 3912" -> "3484 6520" [label="[]", style=solid]; -"3124 3838" -> "3125 3840" [label="[]", style=dashed]; -"3125 3840" -> "3126 3841" [label="[]", style=dashed]; -"3126 3841" -> "3127 3842" [label="[]", style=solid]; -"3127 3842" -> "3128 3843" [label="[-1, -1]", style=dashed]; -"3128 3843" -> "3129 3844" [label="[-1, -1]", style=dashed]; -"3129 3844" -> "3130 3847" [label="[-1]", style=dashed]; -"3129 3844" -> "3136 3855" [label="[-1]", style=dashed]; -"3130 3847" -> "3132 3848" [label="[-1]", style=dashed]; -"3131 3846" -> "3132 3848" [label="[]", style=solid]; -"3132 3848" -> "3133 3857" [label="[]", style=solid]; -"3132 3848" -> "3141 3868" [label="[]", style=solid]; -"3133 3857" -> "3134 3858" [label="[]", style=solid]; -"3134 3858" -> "3138 3861" [label="[]", style=solid]; -"3135 3853" -> "3136 3855" [label="[]", style=solid]; -"3136 3855" -> "3137 3856" [label="[]", style=solid]; -"3136 3855" -> "3614 3866" [label="[]", style=solid]; -"3137 3856" -> "3138 3861" [label="[]", style=solid]; -"3138 3861" -> "3139 3863" [label="[-1, 3]", style=dashed]; -"3139 3863" -> "3140 3864" [label="[-1, 1]", style=dashed]; -"3140 3864" -> "3141 3868" [label="[-1]", style=dashed]; -"3140 3864" -> "3613 3865" [label="[-1]", style=dashed]; -"3141 3868" -> "3484 6520" [label="[]", style=solid]; -"3142 3794" -> "3143 3796" [label="[]", style=dashed]; -"3143 3796" -> "3144 3797" [label="[]", style=dashed]; -"3144 3797" -> "3145 3798" [label="[]", style=solid]; -"3145 3798" -> "3146 3799" [label="[-1, -1]", style=dashed]; -"3146 3799" -> "3147 3800" [label="[-1, -1]", style=dashed]; -"3147 3800" -> "3148 3803" [label="[-1]", style=dashed]; -"3147 3800" -> "3154 3811" [label="[-1]", style=dashed]; -"3148 3803" -> "3150 3804" [label="[-1]", style=dashed]; -"3149 3802" -> "3150 3804" [label="[]", style=solid]; -"3150 3804" -> "3151 3813" [label="[]", style=solid]; -"3150 3804" -> "3159 3824" [label="[]", style=solid]; -"3151 3813" -> "3152 3814" [label="[]", style=solid]; -"3152 3814" -> "3156 3817" [label="[]", style=solid]; -"3153 3809" -> "3154 3811" [label="[]", style=solid]; -"3154 3811" -> "3155 3812" [label="[]", style=solid]; -"3154 3811" -> "3616 3822" [label="[]", style=solid]; -"3155 3812" -> "3156 3817" [label="[]", style=solid]; -"3156 3817" -> "3157 3819" [label="[-1, 3]", style=dashed]; -"3157 3819" -> "3158 3820" [label="[-1, 1]", style=dashed]; -"3158 3820" -> "3159 3824" [label="[-1]", style=dashed]; -"3158 3820" -> "3615 3821" [label="[-1]", style=dashed]; -"3159 3824" -> "3484 6520" [label="[]", style=solid]; -"3160 3750" -> "3161 3752" [label="[]", style=dashed]; -"3161 3752" -> "3162 3753" [label="[]", style=dashed]; -"3162 3753" -> "3163 3754" [label="[]", style=solid]; -"3163 3754" -> "3164 3755" [label="[-1, -1]", style=dashed]; -"3164 3755" -> "3165 3756" [label="[-1, -1]", style=dashed]; -"3165 3756" -> "3166 3759" [label="[-1]", style=dashed]; -"3165 3756" -> "3172 3767" [label="[-1]", style=dashed]; -"3166 3759" -> "3168 3760" [label="[-1]", style=dashed]; -"3167 3758" -> "3168 3760" [label="[]", style=solid]; -"3168 3760" -> "3169 3769" [label="[]", style=solid]; -"3168 3760" -> "3177 3780" [label="[]", style=solid]; -"3169 3769" -> "3170 3770" [label="[]", style=solid]; -"3170 3770" -> "3174 3773" [label="[]", style=solid]; -"3171 3765" -> "3172 3767" [label="[]", style=solid]; -"3172 3767" -> "3173 3768" [label="[]", style=solid]; -"3172 3767" -> "3618 3778" [label="[]", style=solid]; -"3173 3768" -> "3174 3773" [label="[]", style=solid]; -"3174 3773" -> "3175 3775" [label="[-1, 3]", style=dashed]; -"3175 3775" -> "3176 3776" [label="[-1, 1]", style=dashed]; -"3176 3776" -> "3177 3780" [label="[-1]", style=dashed]; -"3176 3776" -> "3617 3777" [label="[-1]", style=dashed]; -"3177 3780" -> "3484 6520" [label="[]", style=solid]; -"3178 3706" -> "3179 3708" [label="[]", style=dashed]; -"3179 3708" -> "3180 3709" [label="[]", style=dashed]; -"3180 3709" -> "3181 3710" [label="[]", style=solid]; -"3181 3710" -> "3182 3711" [label="[-1, -1]", style=dashed]; -"3182 3711" -> "3183 3712" [label="[-1, -1]", style=dashed]; -"3183 3712" -> "3184 3715" [label="[-1]", style=dashed]; -"3183 3712" -> "3190 3723" [label="[-1]", style=dashed]; -"3184 3715" -> "3186 3716" [label="[-1]", style=dashed]; -"3185 3714" -> "3186 3716" [label="[]", style=solid]; -"3186 3716" -> "3187 3725" [label="[]", style=solid]; -"3186 3716" -> "3195 3736" [label="[]", style=solid]; -"3187 3725" -> "3188 3726" [label="[]", style=solid]; -"3188 3726" -> "3192 3729" [label="[]", style=solid]; -"3189 3721" -> "3190 3723" [label="[]", style=solid]; -"3190 3723" -> "3191 3724" [label="[]", style=solid]; -"3190 3723" -> "3620 3734" [label="[]", style=solid]; -"3191 3724" -> "3192 3729" [label="[]", style=solid]; -"3192 3729" -> "3193 3731" [label="[-1, 3]", style=dashed]; -"3193 3731" -> "3194 3732" [label="[-1, 1]", style=dashed]; -"3194 3732" -> "3195 3736" [label="[-1]", style=dashed]; -"3194 3732" -> "3619 3733" [label="[-1]", style=dashed]; -"3195 3736" -> "3484 6520" [label="[]", style=solid]; -"3196 3662" -> "3197 3664" [label="[]", style=dashed]; -"3197 3664" -> "3198 3665" [label="[]", style=dashed]; -"3198 3665" -> "3199 3666" [label="[]", style=solid]; -"3199 3666" -> "3200 3667" [label="[-1, -1]", style=dashed]; -"3200 3667" -> "3201 3668" [label="[-1, -1]", style=dashed]; -"3201 3668" -> "3202 3671" [label="[-1]", style=dashed]; -"3201 3668" -> "3208 3679" [label="[-1]", style=dashed]; -"3202 3671" -> "3204 3672" [label="[-1]", style=dashed]; -"3203 3670" -> "3204 3672" [label="[]", style=solid]; -"3204 3672" -> "3205 3681" [label="[]", style=solid]; -"3204 3672" -> "3213 3692" [label="[]", style=solid]; -"3205 3681" -> "3206 3682" [label="[]", style=solid]; -"3206 3682" -> "3210 3685" [label="[]", style=solid]; -"3207 3677" -> "3208 3679" [label="[]", style=solid]; -"3208 3679" -> "3209 3680" [label="[]", style=solid]; -"3208 3679" -> "3622 3690" [label="[]", style=solid]; -"3209 3680" -> "3210 3685" [label="[]", style=solid]; -"3210 3685" -> "3211 3687" [label="[-1, 3]", style=dashed]; -"3211 3687" -> "3212 3688" [label="[-1, 1]", style=dashed]; -"3212 3688" -> "3213 3692" [label="[-1]", style=dashed]; -"3212 3688" -> "3621 3689" [label="[-1]", style=dashed]; -"3213 3692" -> "3484 6520" [label="[]", style=solid]; -"3214 3618" -> "3215 3620" [label="[]", style=dashed]; -"3215 3620" -> "3216 3621" [label="[]", style=dashed]; -"3216 3621" -> "3217 3622" [label="[]", style=solid]; -"3217 3622" -> "3218 3623" [label="[-1, -1]", style=dashed]; -"3218 3623" -> "3219 3624" [label="[-1, -1]", style=dashed]; -"3219 3624" -> "3220 3627" [label="[-1]", style=dashed]; -"3219 3624" -> "3226 3635" [label="[-1]", style=dashed]; -"3220 3627" -> "3222 3628" [label="[-1]", style=dashed]; -"3221 3626" -> "3222 3628" [label="[]", style=solid]; -"3222 3628" -> "3223 3637" [label="[]", style=solid]; -"3222 3628" -> "3231 3648" [label="[]", style=solid]; -"3223 3637" -> "3224 3638" [label="[]", style=solid]; -"3224 3638" -> "3228 3641" [label="[]", style=solid]; -"3225 3633" -> "3226 3635" [label="[]", style=solid]; -"3226 3635" -> "3227 3636" [label="[]", style=solid]; -"3226 3635" -> "3624 3646" [label="[]", style=solid]; -"3227 3636" -> "3228 3641" [label="[]", style=solid]; -"3228 3641" -> "3229 3643" [label="[-1, 3]", style=dashed]; -"3229 3643" -> "3230 3644" [label="[-1, 1]", style=dashed]; -"3230 3644" -> "3231 3648" [label="[-1]", style=dashed]; -"3230 3644" -> "3623 3645" [label="[-1]", style=dashed]; -"3231 3648" -> "3484 6520" [label="[]", style=solid]; -"3232 3574" -> "3233 3576" [label="[]", style=dashed]; -"3233 3576" -> "3234 3577" [label="[]", style=dashed]; -"3234 3577" -> "3235 3578" [label="[]", style=solid]; -"3235 3578" -> "3236 3579" [label="[-1, -1]", style=dashed]; -"3236 3579" -> "3237 3580" [label="[-1, -1]", style=dashed]; -"3237 3580" -> "3238 3583" [label="[-1]", style=dashed]; -"3237 3580" -> "3244 3591" [label="[-1]", style=dashed]; -"3238 3583" -> "3240 3584" [label="[-1]", style=dashed]; -"3239 3582" -> "3240 3584" [label="[]", style=solid]; -"3240 3584" -> "3241 3593" [label="[]", style=solid]; -"3240 3584" -> "3249 3604" [label="[]", style=solid]; -"3241 3593" -> "3242 3594" [label="[]", style=solid]; -"3242 3594" -> "3246 3597" [label="[]", style=solid]; -"3243 3589" -> "3244 3591" [label="[]", style=solid]; -"3244 3591" -> "3245 3592" [label="[]", style=solid]; -"3244 3591" -> "3626 3602" [label="[]", style=solid]; -"3245 3592" -> "3246 3597" [label="[]", style=solid]; -"3246 3597" -> "3247 3599" [label="[-1, 3]", style=dashed]; -"3247 3599" -> "3248 3600" [label="[-1, 1]", style=dashed]; -"3248 3600" -> "3249 3604" [label="[-1]", style=dashed]; -"3248 3600" -> "3625 3601" [label="[-1]", style=dashed]; -"3249 3604" -> "3484 6520" [label="[]", style=solid]; -"3250 3530" -> "3251 3532" [label="[]", style=dashed]; -"3251 3532" -> "3252 3533" [label="[]", style=dashed]; -"3252 3533" -> "3253 3534" [label="[]", style=solid]; -"3253 3534" -> "3254 3535" [label="[-1, -1]", style=dashed]; -"3254 3535" -> "3255 3536" [label="[-1, -1]", style=dashed]; -"3255 3536" -> "3256 3539" [label="[-1]", style=dashed]; -"3255 3536" -> "3262 3547" [label="[-1]", style=dashed]; -"3256 3539" -> "3258 3540" [label="[-1]", style=dashed]; -"3257 3538" -> "3258 3540" [label="[]", style=solid]; -"3258 3540" -> "3259 3549" [label="[]", style=solid]; -"3258 3540" -> "3267 3560" [label="[]", style=solid]; -"3259 3549" -> "3260 3550" [label="[]", style=solid]; -"3260 3550" -> "3264 3553" [label="[]", style=solid]; -"3261 3545" -> "3262 3547" [label="[]", style=solid]; -"3262 3547" -> "3263 3548" [label="[]", style=solid]; -"3262 3547" -> "3628 3558" [label="[]", style=solid]; -"3263 3548" -> "3264 3553" [label="[]", style=solid]; -"3264 3553" -> "3265 3555" [label="[-1, 3]", style=dashed]; -"3265 3555" -> "3266 3556" [label="[-1, 1]", style=dashed]; -"3266 3556" -> "3267 3560" [label="[-1]", style=dashed]; -"3266 3556" -> "3627 3557" [label="[-1]", style=dashed]; -"3267 3560" -> "3484 6520" [label="[]", style=solid]; -"3268 3486" -> "3269 3488" [label="[]", style=dashed]; -"3269 3488" -> "3270 3489" [label="[]", style=dashed]; -"3270 3489" -> "3271 3490" [label="[]", style=solid]; -"3271 3490" -> "3272 3491" [label="[-1, -1]", style=dashed]; -"3272 3491" -> "3273 3492" [label="[-1, -1]", style=dashed]; -"3273 3492" -> "3274 3495" [label="[-1]", style=dashed]; -"3273 3492" -> "3280 3503" [label="[-1]", style=dashed]; -"3274 3495" -> "3276 3496" [label="[-1]", style=dashed]; -"3275 3494" -> "3276 3496" [label="[]", style=solid]; -"3276 3496" -> "3277 3505" [label="[]", style=solid]; -"3276 3496" -> "3285 3516" [label="[]", style=solid]; -"3277 3505" -> "3278 3506" [label="[]", style=solid]; -"3278 3506" -> "3282 3509" [label="[]", style=solid]; -"3279 3501" -> "3280 3503" [label="[]", style=solid]; -"3280 3503" -> "3281 3504" [label="[]", style=solid]; -"3280 3503" -> "3630 3514" [label="[]", style=solid]; -"3281 3504" -> "3282 3509" [label="[]", style=solid]; -"3282 3509" -> "3283 3511" [label="[-1, 3]", style=dashed]; -"3283 3511" -> "3284 3512" [label="[-1, 1]", style=dashed]; -"3284 3512" -> "3285 3516" [label="[-1]", style=dashed]; -"3284 3512" -> "3629 3513" [label="[-1]", style=dashed]; -"3285 3516" -> "3484 6520" [label="[]", style=solid]; -"3286 3442" -> "3287 3444" [label="[]", style=dashed]; -"3287 3444" -> "3288 3445" [label="[]", style=dashed]; -"3288 3445" -> "3289 3446" [label="[]", style=solid]; -"3289 3446" -> "3290 3447" [label="[-1, -1]", style=dashed]; -"3290 3447" -> "3291 3448" [label="[-1, -1]", style=dashed]; -"3291 3448" -> "3292 3451" [label="[-1]", style=dashed]; -"3291 3448" -> "3298 3459" [label="[-1]", style=dashed]; -"3292 3451" -> "3294 3452" [label="[-1]", style=dashed]; -"3293 3450" -> "3294 3452" [label="[]", style=solid]; -"3294 3452" -> "3295 3461" [label="[]", style=solid]; -"3294 3452" -> "3303 3472" [label="[]", style=solid]; -"3295 3461" -> "3296 3462" [label="[]", style=solid]; -"3296 3462" -> "3300 3465" [label="[]", style=solid]; -"3297 3457" -> "3298 3459" [label="[]", style=solid]; -"3298 3459" -> "3299 3460" [label="[]", style=solid]; -"3298 3459" -> "3632 3470" [label="[]", style=solid]; -"3299 3460" -> "3300 3465" [label="[]", style=solid]; -"3300 3465" -> "3301 3467" [label="[-1, 3]", style=dashed]; -"3301 3467" -> "3302 3468" [label="[-1, 1]", style=dashed]; -"3302 3468" -> "3303 3472" [label="[-1]", style=dashed]; -"3302 3468" -> "3631 3469" [label="[-1]", style=dashed]; -"3303 3472" -> "3484 6520" [label="[]", style=solid]; -"3304 3398" -> "3305 3400" [label="[]", style=dashed]; -"3305 3400" -> "3306 3401" [label="[]", style=dashed]; -"3306 3401" -> "3307 3402" [label="[]", style=solid]; -"3307 3402" -> "3308 3403" [label="[-1, -1]", style=dashed]; -"3308 3403" -> "3309 3404" [label="[-1, -1]", style=dashed]; -"3309 3404" -> "3310 3407" [label="[-1]", style=dashed]; -"3309 3404" -> "3316 3415" [label="[-1]", style=dashed]; -"3310 3407" -> "3312 3408" [label="[-1]", style=dashed]; -"3311 3406" -> "3312 3408" [label="[]", style=solid]; -"3312 3408" -> "3313 3417" [label="[]", style=solid]; -"3312 3408" -> "3321 3428" [label="[]", style=solid]; -"3313 3417" -> "3314 3418" [label="[]", style=solid]; -"3314 3418" -> "3318 3421" [label="[]", style=solid]; -"3315 3413" -> "3316 3415" [label="[]", style=solid]; -"3316 3415" -> "3317 3416" [label="[]", style=solid]; -"3316 3415" -> "3634 3426" [label="[]", style=solid]; -"3317 3416" -> "3318 3421" [label="[]", style=solid]; -"3318 3421" -> "3319 3423" [label="[-1, 3]", style=dashed]; -"3319 3423" -> "3320 3424" [label="[-1, 1]", style=dashed]; -"3320 3424" -> "3321 3428" [label="[-1]", style=dashed]; -"3320 3424" -> "3633 3425" [label="[-1]", style=dashed]; -"3321 3428" -> "3484 6520" [label="[]", style=solid]; -"3322 3354" -> "3323 3356" [label="[]", style=dashed]; -"3323 3356" -> "3324 3357" [label="[]", style=dashed]; -"3324 3357" -> "3325 3358" [label="[]", style=solid]; -"3325 3358" -> "3326 3359" [label="[-1, -1]", style=dashed]; -"3326 3359" -> "3327 3360" [label="[-1, -1]", style=dashed]; -"3327 3360" -> "3328 3363" [label="[-1]", style=dashed]; -"3327 3360" -> "3334 3371" [label="[-1]", style=dashed]; -"3328 3363" -> "3330 3364" [label="[-1]", style=dashed]; -"3329 3362" -> "3330 3364" [label="[]", style=solid]; -"3330 3364" -> "3331 3373" [label="[]", style=solid]; -"3330 3364" -> "3339 3384" [label="[]", style=solid]; -"3331 3373" -> "3332 3374" [label="[]", style=solid]; -"3332 3374" -> "3336 3377" [label="[]", style=solid]; -"3333 3369" -> "3334 3371" [label="[]", style=solid]; -"3334 3371" -> "3335 3372" [label="[]", style=solid]; -"3334 3371" -> "3636 3382" [label="[]", style=solid]; -"3335 3372" -> "3336 3377" [label="[]", style=solid]; -"3336 3377" -> "3337 3379" [label="[-1, 3]", style=dashed]; -"3337 3379" -> "3338 3380" [label="[-1, 1]", style=dashed]; -"3338 3380" -> "3339 3384" [label="[-1]", style=dashed]; -"3338 3380" -> "3635 3381" [label="[-1]", style=dashed]; -"3339 3384" -> "3484 6520" [label="[]", style=solid]; -"3340 3310" -> "3341 3312" [label="[]", style=dashed]; -"3341 3312" -> "3342 3313" [label="[]", style=dashed]; -"3342 3313" -> "3343 3314" [label="[]", style=solid]; -"3343 3314" -> "3344 3315" [label="[-1, -1]", style=dashed]; -"3344 3315" -> "3345 3316" [label="[-1, -1]", style=dashed]; -"3345 3316" -> "3346 3319" [label="[-1]", style=dashed]; -"3345 3316" -> "3352 3327" [label="[-1]", style=dashed]; -"3346 3319" -> "3348 3320" [label="[-1]", style=dashed]; -"3347 3318" -> "3348 3320" [label="[]", style=solid]; -"3348 3320" -> "3349 3329" [label="[]", style=solid]; -"3348 3320" -> "3357 3340" [label="[]", style=solid]; -"3349 3329" -> "3350 3330" [label="[]", style=solid]; -"3350 3330" -> "3354 3333" [label="[]", style=solid]; -"3351 3325" -> "3352 3327" [label="[]", style=solid]; -"3352 3327" -> "3353 3328" [label="[]", style=solid]; -"3352 3327" -> "3638 3338" [label="[]", style=solid]; -"3353 3328" -> "3354 3333" [label="[]", style=solid]; -"3354 3333" -> "3355 3335" [label="[-1, 3]", style=dashed]; -"3355 3335" -> "3356 3336" [label="[-1, 1]", style=dashed]; -"3356 3336" -> "3357 3340" [label="[-1]", style=dashed]; -"3356 3336" -> "3637 3337" [label="[-1]", style=dashed]; -"3357 3340" -> "3484 6520" [label="[]", style=solid]; -"3358 3266" -> "3359 3268" [label="[]", style=dashed]; -"3359 3268" -> "3360 3269" [label="[]", style=dashed]; -"3360 3269" -> "3361 3270" [label="[]", style=solid]; -"3361 3270" -> "3362 3271" [label="[-1, -1]", style=dashed]; -"3362 3271" -> "3363 3272" [label="[-1, -1]", style=dashed]; -"3363 3272" -> "3364 3275" [label="[-1]", style=dashed]; -"3363 3272" -> "3370 3283" [label="[-1]", style=dashed]; -"3364 3275" -> "3366 3276" [label="[-1]", style=dashed]; -"3365 3274" -> "3366 3276" [label="[]", style=solid]; -"3366 3276" -> "3367 3285" [label="[]", style=solid]; -"3366 3276" -> "3375 3296" [label="[]", style=solid]; -"3367 3285" -> "3368 3286" [label="[]", style=solid]; -"3368 3286" -> "3372 3289" [label="[]", style=solid]; -"3369 3281" -> "3370 3283" [label="[]", style=solid]; -"3370 3283" -> "3371 3284" [label="[]", style=solid]; -"3370 3283" -> "3640 3294" [label="[]", style=solid]; -"3371 3284" -> "3372 3289" [label="[]", style=solid]; -"3372 3289" -> "3373 3291" [label="[-1, 3]", style=dashed]; -"3373 3291" -> "3374 3292" [label="[-1, 1]", style=dashed]; -"3374 3292" -> "3375 3296" [label="[-1]", style=dashed]; -"3374 3292" -> "3639 3293" [label="[-1]", style=dashed]; -"3375 3296" -> "3484 6520" [label="[]", style=solid]; -"3376 3222" -> "3377 3224" [label="[]", style=dashed]; -"3377 3224" -> "3378 3225" [label="[]", style=dashed]; -"3378 3225" -> "3379 3226" [label="[]", style=solid]; -"3379 3226" -> "3380 3227" [label="[-1, -1]", style=dashed]; -"3380 3227" -> "3381 3228" [label="[-1, -1]", style=dashed]; -"3381 3228" -> "3382 3231" [label="[-1]", style=dashed]; -"3381 3228" -> "3388 3239" [label="[-1]", style=dashed]; -"3382 3231" -> "3384 3232" [label="[-1]", style=dashed]; -"3383 3230" -> "3384 3232" [label="[]", style=solid]; -"3384 3232" -> "3385 3241" [label="[]", style=solid]; -"3384 3232" -> "3393 3252" [label="[]", style=solid]; -"3385 3241" -> "3386 3242" [label="[]", style=solid]; -"3386 3242" -> "3390 3245" [label="[]", style=solid]; -"3387 3237" -> "3388 3239" [label="[]", style=solid]; -"3388 3239" -> "3389 3240" [label="[]", style=solid]; -"3388 3239" -> "3642 3250" [label="[]", style=solid]; -"3389 3240" -> "3390 3245" [label="[]", style=solid]; -"3390 3245" -> "3391 3247" [label="[-1, 3]", style=dashed]; -"3391 3247" -> "3392 3248" [label="[-1, 1]", style=dashed]; -"3392 3248" -> "3393 3252" [label="[-1]", style=dashed]; -"3392 3248" -> "3641 3249" [label="[-1]", style=dashed]; -"3393 3252" -> "3484 6520" [label="[]", style=solid]; -"3394 3178" -> "3395 3180" [label="[]", style=dashed]; -"3395 3180" -> "3396 3181" [label="[]", style=dashed]; -"3396 3181" -> "3397 3182" [label="[]", style=solid]; -"3397 3182" -> "3398 3183" [label="[-1, -1]", style=dashed]; -"3398 3183" -> "3399 3184" [label="[-1, -1]", style=dashed]; -"3399 3184" -> "3400 3187" [label="[-1]", style=dashed]; -"3399 3184" -> "3406 3195" [label="[-1]", style=dashed]; -"3400 3187" -> "3402 3188" [label="[-1]", style=dashed]; -"3401 3186" -> "3402 3188" [label="[]", style=solid]; -"3402 3188" -> "3403 3197" [label="[]", style=solid]; -"3402 3188" -> "3411 3208" [label="[]", style=solid]; -"3403 3197" -> "3404 3198" [label="[]", style=solid]; -"3404 3198" -> "3408 3201" [label="[]", style=solid]; -"3405 3193" -> "3406 3195" [label="[]", style=solid]; -"3406 3195" -> "3407 3196" [label="[]", style=solid]; -"3406 3195" -> "3644 3206" [label="[]", style=solid]; -"3407 3196" -> "3408 3201" [label="[]", style=solid]; -"3408 3201" -> "3409 3203" [label="[-1, 3]", style=dashed]; -"3409 3203" -> "3410 3204" [label="[-1, 1]", style=dashed]; -"3410 3204" -> "3411 3208" [label="[-1]", style=dashed]; -"3410 3204" -> "3643 3205" [label="[-1]", style=dashed]; -"3411 3208" -> "3484 6520" [label="[]", style=solid]; -"3412 3134" -> "3413 3136" [label="[]", style=dashed]; -"3413 3136" -> "3414 3137" [label="[]", style=dashed]; -"3414 3137" -> "3415 3138" [label="[]", style=solid]; -"3415 3138" -> "3416 3139" [label="[-1, -1]", style=dashed]; -"3416 3139" -> "3417 3140" [label="[-1, -1]", style=dashed]; -"3417 3140" -> "3418 3143" [label="[-1]", style=dashed]; -"3417 3140" -> "3424 3151" [label="[-1]", style=dashed]; -"3418 3143" -> "3420 3144" [label="[-1]", style=dashed]; -"3419 3142" -> "3420 3144" [label="[]", style=solid]; -"3420 3144" -> "3421 3153" [label="[]", style=solid]; -"3420 3144" -> "3429 3164" [label="[]", style=solid]; -"3421 3153" -> "3422 3154" [label="[]", style=solid]; -"3422 3154" -> "3426 3157" [label="[]", style=solid]; -"3423 3149" -> "3424 3151" [label="[]", style=solid]; -"3424 3151" -> "3425 3152" [label="[]", style=solid]; -"3424 3151" -> "3646 3162" [label="[]", style=solid]; -"3425 3152" -> "3426 3157" [label="[]", style=solid]; -"3426 3157" -> "3427 3159" [label="[-1, 3]", style=dashed]; -"3427 3159" -> "3428 3160" [label="[-1, 1]", style=dashed]; -"3428 3160" -> "3429 3164" [label="[-1]", style=dashed]; -"3428 3160" -> "3645 3161" [label="[-1]", style=dashed]; -"3429 3164" -> "3484 6520" [label="[]", style=solid]; -"3430 3090" -> "3431 3092" [label="[]", style=dashed]; -"3431 3092" -> "3432 3093" [label="[]", style=dashed]; -"3432 3093" -> "3433 3094" [label="[]", style=solid]; -"3433 3094" -> "3434 3095" [label="[-1, -1]", style=dashed]; -"3434 3095" -> "3435 3096" [label="[-1, -1]", style=dashed]; -"3435 3096" -> "3436 3099" [label="[-1]", style=dashed]; -"3435 3096" -> "3442 3107" [label="[-1]", style=dashed]; -"3436 3099" -> "3438 3100" [label="[-1]", style=dashed]; -"3437 3098" -> "3438 3100" [label="[]", style=solid]; -"3438 3100" -> "3439 3109" [label="[]", style=solid]; -"3438 3100" -> "3447 3120" [label="[]", style=solid]; -"3439 3109" -> "3440 3110" [label="[]", style=solid]; -"3440 3110" -> "3444 3113" [label="[]", style=solid]; -"3441 3105" -> "3442 3107" [label="[]", style=solid]; -"3442 3107" -> "3443 3108" [label="[]", style=solid]; -"3442 3107" -> "3648 3118" [label="[]", style=solid]; -"3443 3108" -> "3444 3113" [label="[]", style=solid]; -"3444 3113" -> "3445 3115" [label="[-1, 3]", style=dashed]; -"3445 3115" -> "3446 3116" [label="[-1, 1]", style=dashed]; -"3446 3116" -> "3447 3120" [label="[-1]", style=dashed]; -"3446 3116" -> "3647 3117" [label="[-1]", style=dashed]; -"3447 3120" -> "3484 6520" [label="[]", style=solid]; -"3448 3046" -> "3449 3048" [label="[]", style=dashed]; -"3449 3048" -> "3450 3049" [label="[]", style=dashed]; -"3450 3049" -> "3451 3050" [label="[]", style=solid]; -"3451 3050" -> "3452 3051" [label="[-1, -1]", style=dashed]; -"3452 3051" -> "3453 3052" [label="[-1, -1]", style=dashed]; -"3453 3052" -> "3454 3055" [label="[-1]", style=dashed]; -"3453 3052" -> "3460 3063" [label="[-1]", style=dashed]; -"3454 3055" -> "3456 3056" [label="[-1]", style=dashed]; -"3455 3054" -> "3456 3056" [label="[]", style=solid]; -"3456 3056" -> "3457 3065" [label="[]", style=solid]; -"3456 3056" -> "3465 3076" [label="[]", style=solid]; -"3457 3065" -> "3458 3066" [label="[]", style=solid]; -"3458 3066" -> "3462 3069" [label="[]", style=solid]; -"3459 3061" -> "3460 3063" [label="[]", style=solid]; -"3460 3063" -> "3461 3064" [label="[]", style=solid]; -"3460 3063" -> "3650 3074" [label="[]", style=solid]; -"3461 3064" -> "3462 3069" [label="[]", style=solid]; -"3462 3069" -> "3463 3071" [label="[-1, 3]", style=dashed]; -"3463 3071" -> "3464 3072" [label="[-1, 1]", style=dashed]; -"3464 3072" -> "3465 3076" [label="[-1]", style=dashed]; -"3464 3072" -> "3649 3073" [label="[-1]", style=dashed]; -"3465 3076" -> "3484 6520" [label="[]", style=solid]; -"3466 3002" -> "3467 3004" [label="[]", style=dashed]; -"3467 3004" -> "3468 3005" [label="[]", style=dashed]; -"3468 3005" -> "3469 3006" [label="[]", style=solid]; -"3469 3006" -> "3470 3007" [label="[-1, -1]", style=dashed]; -"3470 3007" -> "3471 3008" [label="[-1, -1]", style=dashed]; -"3471 3008" -> "3472 3011" [label="[-1]", style=dashed]; -"3471 3008" -> "3478 3019" [label="[-1]", style=dashed]; -"3472 3011" -> "3474 3012" [label="[-1]", style=dashed]; -"3473 3010" -> "3474 3012" [label="[]", style=solid]; -"3474 3012" -> "3475 3021" [label="[]", style=solid]; -"3474 3012" -> "3483 3032" [label="[]", style=solid]; -"3475 3021" -> "3476 3022" [label="[]", style=solid]; -"3476 3022" -> "3480 3025" [label="[]", style=solid]; -"3477 3017" -> "3478 3019" [label="[]", style=solid]; -"3478 3019" -> "3479 3020" [label="[]", style=solid]; -"3478 3019" -> "3652 3030" [label="[]", style=solid]; -"3479 3020" -> "3480 3025" [label="[]", style=solid]; -"3480 3025" -> "3481 3027" [label="[-1, 3]", style=dashed]; -"3481 3027" -> "3482 3028" [label="[-1, 1]", style=dashed]; -"3482 3028" -> "3483 3032" [label="[-1]", style=dashed]; -"3482 3028" -> "3651 3029" [label="[-1]", style=dashed]; -"3483 3032" -> "3484 6520" [label="[]", style=solid]; -"3484 6520" -> "3485 6521" [label="[]", style=solid]; -"3484 6520" -> "3491 6528" [label="[]", style=solid]; -"3484 6520" -> "4241 6534" [label="[]", style=solid]; -"3485 6521" -> "3486 6523" [label="[-1]", style=dashed]; -"3486 6523" -> "3487 6524" [label="[-1]", style=dashed]; -"3487 6524" -> "3488 6525" [label="[-1]", style=dashed]; -"3488 6525" -> "3489 6526" [label="[]", style=dashed]; -"3489 6526" -> "3490 6527" [label="[]", style=dashed]; -"3490 6527" -> "3491 6528" [label="[1]", style=dashed]; -"3491 6528" -> "3492 6529" [label="[]", style=dashed]; -"3491 6528" -> "4226 6532" [label="[]", style=dashed]; -"3491 6528" -> "4240 6533" [label="[]", style=dashed]; -"3492 6529" -> "3654 6530" [label="[]", style=dashed]; -"3493 6505" -> "3494 6506" [label="[-1]", style=dashed]; -"3494 6506" -> "3653 6518" [label="[]", style=solid]; -"3494 6506" -> "3905 6513" [label="[]", style=solid]; -"3495 6461" -> "3496 6462" [label="[-1]", style=dashed]; -"3496 6462" -> "3653 6518" [label="[]", style=solid]; -"3496 6462" -> "3909 6469" [label="[]", style=solid]; -"3497 6417" -> "3498 6418" [label="[-1]", style=dashed]; -"3498 6418" -> "3653 6518" [label="[]", style=solid]; -"3498 6418" -> "3913 6425" [label="[]", style=solid]; -"3499 6373" -> "3500 6374" [label="[-1]", style=dashed]; -"3500 6374" -> "3653 6518" [label="[]", style=solid]; -"3500 6374" -> "3917 6381" [label="[]", style=solid]; -"3501 6329" -> "3502 6330" [label="[-1]", style=dashed]; -"3502 6330" -> "3653 6518" [label="[]", style=solid]; -"3502 6330" -> "3921 6337" [label="[]", style=solid]; -"3503 6285" -> "3504 6286" [label="[-1]", style=dashed]; -"3504 6286" -> "3653 6518" [label="[]", style=solid]; -"3504 6286" -> "3925 6293" [label="[]", style=solid]; -"3505 6241" -> "3506 6242" [label="[-1]", style=dashed]; -"3506 6242" -> "3653 6518" [label="[]", style=solid]; -"3506 6242" -> "3929 6249" [label="[]", style=solid]; -"3507 6197" -> "3508 6198" [label="[-1]", style=dashed]; -"3508 6198" -> "3653 6518" [label="[]", style=solid]; -"3508 6198" -> "3933 6205" [label="[]", style=solid]; -"3509 6153" -> "3510 6154" [label="[-1]", style=dashed]; -"3510 6154" -> "3653 6518" [label="[]", style=solid]; -"3510 6154" -> "3937 6161" [label="[]", style=solid]; -"3511 6109" -> "3512 6110" [label="[-1]", style=dashed]; -"3512 6110" -> "3653 6518" [label="[]", style=solid]; -"3512 6110" -> "3941 6117" [label="[]", style=solid]; -"3513 6065" -> "3514 6066" [label="[-1]", style=dashed]; -"3514 6066" -> "3653 6518" [label="[]", style=solid]; -"3514 6066" -> "3945 6073" [label="[]", style=solid]; -"3515 6021" -> "3516 6022" [label="[-1]", style=dashed]; -"3516 6022" -> "3653 6518" [label="[]", style=solid]; -"3516 6022" -> "3949 6029" [label="[]", style=solid]; -"3517 5977" -> "3518 5978" [label="[-1]", style=dashed]; -"3518 5978" -> "3653 6518" [label="[]", style=solid]; -"3518 5978" -> "3953 5985" [label="[]", style=solid]; -"3519 5933" -> "3520 5934" [label="[-1]", style=dashed]; -"3520 5934" -> "3653 6518" [label="[]", style=solid]; -"3520 5934" -> "3957 5941" [label="[]", style=solid]; -"3521 5889" -> "3522 5890" [label="[-1]", style=dashed]; -"3522 5890" -> "3653 6518" [label="[]", style=solid]; -"3522 5890" -> "3961 5897" [label="[]", style=solid]; -"3523 5845" -> "3524 5846" [label="[-1]", style=dashed]; -"3524 5846" -> "3653 6518" [label="[]", style=solid]; -"3524 5846" -> "3965 5853" [label="[]", style=solid]; -"3525 5801" -> "3526 5802" [label="[-1]", style=dashed]; -"3526 5802" -> "3653 6518" [label="[]", style=solid]; -"3526 5802" -> "3969 5809" [label="[]", style=solid]; -"3527 5757" -> "3528 5758" [label="[-1]", style=dashed]; -"3528 5758" -> "3653 6518" [label="[]", style=solid]; -"3528 5758" -> "3973 5765" [label="[]", style=solid]; -"3529 5713" -> "3530 5714" [label="[-1]", style=dashed]; -"3530 5714" -> "3653 6518" [label="[]", style=solid]; -"3530 5714" -> "3977 5721" [label="[]", style=solid]; -"3531 5669" -> "3532 5670" [label="[-1]", style=dashed]; -"3532 5670" -> "3653 6518" [label="[]", style=solid]; -"3532 5670" -> "3981 5677" [label="[]", style=solid]; -"3533 5625" -> "3534 5626" [label="[-1]", style=dashed]; -"3534 5626" -> "3653 6518" [label="[]", style=solid]; -"3534 5626" -> "3985 5633" [label="[]", style=solid]; -"3535 5581" -> "3536 5582" [label="[-1]", style=dashed]; -"3536 5582" -> "3653 6518" [label="[]", style=solid]; -"3536 5582" -> "3989 5589" [label="[]", style=solid]; -"3537 5537" -> "3538 5538" [label="[-1]", style=dashed]; -"3538 5538" -> "3653 6518" [label="[]", style=solid]; -"3538 5538" -> "3993 5545" [label="[]", style=solid]; -"3539 5493" -> "3540 5494" [label="[-1]", style=dashed]; -"3540 5494" -> "3653 6518" [label="[]", style=solid]; -"3540 5494" -> "3997 5501" [label="[]", style=solid]; -"3541 5449" -> "3542 5450" [label="[-1]", style=dashed]; -"3542 5450" -> "3653 6518" [label="[]", style=solid]; -"3542 5450" -> "4001 5457" [label="[]", style=solid]; -"3543 5405" -> "3544 5406" [label="[-1]", style=dashed]; -"3544 5406" -> "3653 6518" [label="[]", style=solid]; -"3544 5406" -> "4005 5413" [label="[]", style=solid]; -"3545 5361" -> "3546 5362" [label="[-1]", style=dashed]; -"3546 5362" -> "3653 6518" [label="[]", style=solid]; -"3546 5362" -> "4009 5369" [label="[]", style=solid]; -"3547 5317" -> "3548 5318" [label="[-1]", style=dashed]; -"3548 5318" -> "3653 6518" [label="[]", style=solid]; -"3548 5318" -> "4013 5325" [label="[]", style=solid]; -"3549 5273" -> "3550 5274" [label="[-1]", style=dashed]; -"3550 5274" -> "3653 6518" [label="[]", style=solid]; -"3550 5274" -> "4017 5281" [label="[]", style=solid]; -"3551 5229" -> "3552 5230" [label="[-1]", style=dashed]; -"3552 5230" -> "3653 6518" [label="[]", style=solid]; -"3552 5230" -> "4021 5237" [label="[]", style=solid]; -"3553 5185" -> "3554 5186" [label="[-1]", style=dashed]; -"3554 5186" -> "3653 6518" [label="[]", style=solid]; -"3554 5186" -> "4025 5193" [label="[]", style=solid]; -"3555 5141" -> "3556 5142" [label="[-1]", style=dashed]; -"3556 5142" -> "3653 6518" [label="[]", style=solid]; -"3556 5142" -> "4029 5149" [label="[]", style=solid]; -"3557 5097" -> "3558 5098" [label="[-1]", style=dashed]; -"3558 5098" -> "3653 6518" [label="[]", style=solid]; -"3558 5098" -> "4033 5105" [label="[]", style=solid]; -"3559 5053" -> "3560 5054" [label="[-1]", style=dashed]; -"3560 5054" -> "3653 6518" [label="[]", style=solid]; -"3560 5054" -> "4037 5061" [label="[]", style=solid]; -"3561 5009" -> "3562 5010" [label="[-1]", style=dashed]; -"3562 5010" -> "3653 6518" [label="[]", style=solid]; -"3562 5010" -> "4041 5017" [label="[]", style=solid]; -"3563 4965" -> "3564 4966" [label="[-1]", style=dashed]; -"3564 4966" -> "3653 6518" [label="[]", style=solid]; -"3564 4966" -> "4045 4973" [label="[]", style=solid]; -"3565 4921" -> "3566 4922" [label="[-1]", style=dashed]; -"3566 4922" -> "3653 6518" [label="[]", style=solid]; -"3566 4922" -> "4049 4929" [label="[]", style=solid]; -"3567 4877" -> "3568 4878" [label="[-1]", style=dashed]; -"3568 4878" -> "3653 6518" [label="[]", style=solid]; -"3568 4878" -> "4053 4885" [label="[]", style=solid]; -"3569 4833" -> "3570 4834" [label="[-1]", style=dashed]; -"3570 4834" -> "3653 6518" [label="[]", style=solid]; -"3570 4834" -> "4057 4841" [label="[]", style=solid]; -"3571 4789" -> "3572 4790" [label="[-1]", style=dashed]; -"3572 4790" -> "3653 6518" [label="[]", style=solid]; -"3572 4790" -> "4061 4797" [label="[]", style=solid]; -"3573 4745" -> "3574 4746" [label="[-1]", style=dashed]; -"3574 4746" -> "3653 6518" [label="[]", style=solid]; -"3574 4746" -> "4065 4753" [label="[]", style=solid]; -"3575 4701" -> "3576 4702" [label="[-1]", style=dashed]; -"3576 4702" -> "3653 6518" [label="[]", style=solid]; -"3576 4702" -> "4069 4709" [label="[]", style=solid]; -"3577 4657" -> "3578 4658" [label="[-1]", style=dashed]; -"3578 4658" -> "3653 6518" [label="[]", style=solid]; -"3578 4658" -> "4073 4665" [label="[]", style=solid]; -"3579 4613" -> "3580 4614" [label="[-1]", style=dashed]; -"3580 4614" -> "3653 6518" [label="[]", style=solid]; -"3580 4614" -> "4077 4621" [label="[]", style=solid]; -"3581 4569" -> "3582 4570" [label="[-1]", style=dashed]; -"3582 4570" -> "3653 6518" [label="[]", style=solid]; -"3582 4570" -> "4081 4577" [label="[]", style=solid]; -"3583 4525" -> "3584 4526" [label="[-1]", style=dashed]; -"3584 4526" -> "3653 6518" [label="[]", style=solid]; -"3584 4526" -> "4085 4533" [label="[]", style=solid]; -"3585 4481" -> "3586 4482" [label="[-1]", style=dashed]; -"3586 4482" -> "3653 6518" [label="[]", style=solid]; -"3586 4482" -> "4089 4489" [label="[]", style=solid]; -"3587 4437" -> "3588 4438" [label="[-1]", style=dashed]; -"3588 4438" -> "3653 6518" [label="[]", style=solid]; -"3588 4438" -> "4093 4445" [label="[]", style=solid]; -"3589 4393" -> "3590 4394" [label="[-1]", style=dashed]; -"3590 4394" -> "3653 6518" [label="[]", style=solid]; -"3590 4394" -> "4097 4401" [label="[]", style=solid]; -"3591 4349" -> "3592 4350" [label="[-1]", style=dashed]; -"3592 4350" -> "3653 6518" [label="[]", style=solid]; -"3592 4350" -> "4101 4357" [label="[]", style=solid]; -"3593 4305" -> "3594 4306" [label="[-1]", style=dashed]; -"3594 4306" -> "3653 6518" [label="[]", style=solid]; -"3594 4306" -> "4105 4313" [label="[]", style=solid]; -"3595 4261" -> "3596 4262" [label="[-1]", style=dashed]; -"3596 4262" -> "3653 6518" [label="[]", style=solid]; -"3596 4262" -> "4109 4269" [label="[]", style=solid]; -"3597 4217" -> "3598 4218" [label="[-1]", style=dashed]; -"3598 4218" -> "3653 6518" [label="[]", style=solid]; -"3598 4218" -> "4113 4225" [label="[]", style=solid]; -"3599 4173" -> "3600 4174" [label="[-1]", style=dashed]; -"3600 4174" -> "3653 6518" [label="[]", style=solid]; -"3600 4174" -> "4117 4181" [label="[]", style=solid]; -"3601 4129" -> "3602 4130" [label="[-1]", style=dashed]; -"3602 4130" -> "3653 6518" [label="[]", style=solid]; -"3602 4130" -> "4121 4137" [label="[]", style=solid]; -"3603 4085" -> "3604 4086" [label="[-1]", style=dashed]; -"3604 4086" -> "3653 6518" [label="[]", style=solid]; -"3604 4086" -> "4125 4093" [label="[]", style=solid]; -"3605 4041" -> "3606 4042" [label="[-1]", style=dashed]; -"3606 4042" -> "3653 6518" [label="[]", style=solid]; -"3606 4042" -> "4129 4049" [label="[]", style=solid]; -"3607 3997" -> "3608 3998" [label="[-1]", style=dashed]; -"3608 3998" -> "3653 6518" [label="[]", style=solid]; -"3608 3998" -> "4133 4005" [label="[]", style=solid]; -"3609 3953" -> "3610 3954" [label="[-1]", style=dashed]; -"3610 3954" -> "3653 6518" [label="[]", style=solid]; -"3610 3954" -> "4137 3961" [label="[]", style=solid]; -"3611 3909" -> "3612 3910" [label="[-1]", style=dashed]; -"3612 3910" -> "3653 6518" [label="[]", style=solid]; -"3612 3910" -> "4141 3917" [label="[]", style=solid]; -"3613 3865" -> "3614 3866" [label="[-1]", style=dashed]; -"3614 3866" -> "3653 6518" [label="[]", style=solid]; -"3614 3866" -> "4145 3873" [label="[]", style=solid]; -"3615 3821" -> "3616 3822" [label="[-1]", style=dashed]; -"3616 3822" -> "3653 6518" [label="[]", style=solid]; -"3616 3822" -> "4149 3829" [label="[]", style=solid]; -"3617 3777" -> "3618 3778" [label="[-1]", style=dashed]; -"3618 3778" -> "3653 6518" [label="[]", style=solid]; -"3618 3778" -> "4153 3785" [label="[]", style=solid]; -"3619 3733" -> "3620 3734" [label="[-1]", style=dashed]; -"3620 3734" -> "3653 6518" [label="[]", style=solid]; -"3620 3734" -> "4157 3741" [label="[]", style=solid]; -"3621 3689" -> "3622 3690" [label="[-1]", style=dashed]; -"3622 3690" -> "3653 6518" [label="[]", style=solid]; -"3622 3690" -> "4161 3697" [label="[]", style=solid]; -"3623 3645" -> "3624 3646" [label="[-1]", style=dashed]; -"3624 3646" -> "3653 6518" [label="[]", style=solid]; -"3624 3646" -> "4165 3653" [label="[]", style=solid]; -"3625 3601" -> "3626 3602" [label="[-1]", style=dashed]; -"3626 3602" -> "3653 6518" [label="[]", style=solid]; -"3626 3602" -> "4169 3609" [label="[]", style=solid]; -"3627 3557" -> "3628 3558" [label="[-1]", style=dashed]; -"3628 3558" -> "3653 6518" [label="[]", style=solid]; -"3628 3558" -> "4173 3565" [label="[]", style=solid]; -"3629 3513" -> "3630 3514" [label="[-1]", style=dashed]; -"3630 3514" -> "3653 6518" [label="[]", style=solid]; -"3630 3514" -> "4177 3521" [label="[]", style=solid]; -"3631 3469" -> "3632 3470" [label="[-1]", style=dashed]; -"3632 3470" -> "3653 6518" [label="[]", style=solid]; -"3632 3470" -> "4181 3477" [label="[]", style=solid]; -"3633 3425" -> "3634 3426" [label="[-1]", style=dashed]; -"3634 3426" -> "3653 6518" [label="[]", style=solid]; -"3634 3426" -> "4185 3433" [label="[]", style=solid]; -"3635 3381" -> "3636 3382" [label="[-1]", style=dashed]; -"3636 3382" -> "3653 6518" [label="[]", style=solid]; -"3636 3382" -> "4189 3389" [label="[]", style=solid]; -"3637 3337" -> "3638 3338" [label="[-1]", style=dashed]; -"3638 3338" -> "3653 6518" [label="[]", style=solid]; -"3638 3338" -> "4193 3345" [label="[]", style=solid]; -"3639 3293" -> "3640 3294" [label="[-1]", style=dashed]; -"3640 3294" -> "3653 6518" [label="[]", style=solid]; -"3640 3294" -> "4197 3301" [label="[]", style=solid]; -"3641 3249" -> "3642 3250" [label="[-1]", style=dashed]; -"3642 3250" -> "3653 6518" [label="[]", style=solid]; -"3642 3250" -> "4201 3257" [label="[]", style=solid]; -"3643 3205" -> "3644 3206" [label="[-1]", style=dashed]; -"3644 3206" -> "3653 6518" [label="[]", style=solid]; -"3644 3206" -> "4205 3213" [label="[]", style=solid]; -"3645 3161" -> "3646 3162" [label="[-1]", style=dashed]; -"3646 3162" -> "3653 6518" [label="[]", style=solid]; -"3646 3162" -> "4209 3169" [label="[]", style=solid]; -"3647 3117" -> "3648 3118" [label="[-1]", style=dashed]; -"3648 3118" -> "3653 6518" [label="[]", style=solid]; -"3648 3118" -> "4213 3125" [label="[]", style=solid]; -"3649 3073" -> "3650 3074" [label="[-1]", style=dashed]; -"3650 3074" -> "3653 6518" [label="[]", style=solid]; -"3650 3074" -> "4217 3081" [label="[]", style=solid]; -"3651 3029" -> "3652 3030" [label="[-1]", style=dashed]; -"3652 3030" -> "3653 6518" [label="[]", style=solid]; -"3652 3030" -> "4221 3037" [label="[]", style=solid]; -"3653 6518" -> "3654 6530" [label="[]", style=solid]; -"3654 6530" -> "3655 QuantizeLinear_6568_4" [label="[-1, 4]", style=solid]; -"3654 6530" -> "3657 QuantizeLinear_6568_3" [label="[-1, 4]", style=solid]; -"3654 6530" -> "3659 QuantizeLinear_6568_2" [label="[-1, 4]", style=solid]; -"3654 6530" -> "3661 QuantizeLinear_6568_1" [label="[-1, 4]", style=solid]; -"3654 6530" -> "3708 6539" [label="[-1, 4]", style=solid]; -"3654 6530" -> "3712 6547" [label="[-1, 4]", style=solid]; -"3654 6530" -> "4243 nncf_model_output_0" [label="[-1, 4]", style=solid]; -"3655 QuantizeLinear_6568_4" -> "3656 DequantizeLinear_6568_4" [label="[-1, 4]", style=dashed]; -"3656 DequantizeLinear_6568_4" -> "3673 6552" [label="[-1, 4]", style=solid]; -"3657 QuantizeLinear_6568_3" -> "3658 DequantizeLinear_6568_3" [label="[-1, 4]", style=dashed]; -"3658 DequantizeLinear_6568_3" -> "3671 6559" [label="[-1, 4]", style=solid]; -"3659 QuantizeLinear_6568_2" -> "3660 DequantizeLinear_6568_2" [label="[-1, 4]", style=dashed]; -"3660 DequantizeLinear_6568_2" -> "3665 6569" [label="[-1, 4]", style=solid]; -"3661 QuantizeLinear_6568_1" -> "3662 DequantizeLinear_6568_1" [label="[-1, 4]", style=dashed]; -"3662 DequantizeLinear_6568_1" -> "3663 6576" [label="[-1, 4]", style=solid]; -"3663 6576" -> "3664 6578" [label="[-1, 4]", style=solid]; -"3664 6578" -> "3667 6579" [label="[-1]", style=solid]; -"3665 6569" -> "3666 6571" [label="[-1, 4]", style=solid]; -"3666 6571" -> "3667 6579" [label="[-1]", style=solid]; -"3667 6579" -> "3668 QuantizeLinear_6617_1" [label="[-1]", style=solid]; -"3668 QuantizeLinear_6617_1" -> "3669 DequantizeLinear_6617_1" [label="[-1]", style=dashed]; -"3669 DequantizeLinear_6617_1" -> "3670 6581" [label="[-1]", style=solid]; -"3670 6581" -> "3679 QuantizeLinear_6619_1" [label="[-1]", style=solid]; -"3671 6559" -> "3672 6561" [label="[-1, 4]", style=solid]; -"3672 6561" -> "3675 6562" [label="[-1]", style=solid]; -"3673 6552" -> "3674 6554" [label="[-1, 4]", style=solid]; -"3674 6554" -> "3675 6562" [label="[-1]", style=solid]; -"3675 6562" -> "3676 QuantizeLinear_6600_1" [label="[-1]", style=solid]; -"3676 QuantizeLinear_6600_1" -> "3677 DequantizeLinear_6600_1" [label="[-1]", style=dashed]; -"3677 DequantizeLinear_6600_1" -> "3678 6564" [label="[-1]", style=solid]; -"3678 6564" -> "3681 QuantizeLinear_6602_1" [label="[-1]", style=solid]; -"3679 QuantizeLinear_6619_1" -> "3680 DequantizeLinear_6619_1" [label="[-1]", style=dashed]; -"3680 DequantizeLinear_6619_1" -> "3683 6582" [label="[-1]", style=solid]; -"3681 QuantizeLinear_6602_1" -> "3682 DequantizeLinear_6602_1" [label="[-1]", style=dashed]; -"3682 DequantizeLinear_6602_1" -> "3683 6582" [label="[-1]", style=solid]; -"3683 6582" -> "3684 QuantizeLinear_6620_1" [label="[-1]", style=solid]; -"3684 QuantizeLinear_6620_1" -> "3685 DequantizeLinear_6620_1" [label="[-1]", style=dashed]; -"3685 DequantizeLinear_6620_1" -> "3686 6583" [label="[-1]", style=solid]; -"3686 6583" -> "3687 6586" [label="[-1]", style=solid]; -"3687 6586" -> "3688 QuantizeLinear_6624_1" [label="[-1]", style=solid]; -"3688 QuantizeLinear_6624_1" -> "3689 DequantizeLinear_6624_1" [label="[-1]", style=dashed]; -"3689 DequantizeLinear_6624_1" -> "3690 6587" [label="[-1]", style=solid]; -"3690 6587" -> "3691 6588" [label="[-1]", style=solid]; -"3691 6588" -> "3692 6590" [label="[-1]", style=solid]; -"3692 6590" -> "3693 QuantizeLinear_6628_1" [label="[-1]", style=solid]; -"3693 QuantizeLinear_6628_1" -> "3694 DequantizeLinear_6628_1" [label="[-1]", style=dashed]; -"3694 DequantizeLinear_6628_1" -> "3695 6592" [label="[-1]", style=solid]; -"3695 6592" -> "3696 QuantizeLinear_6630_1" [label="[-1]", style=solid]; -"3696 QuantizeLinear_6630_1" -> "3697 DequantizeLinear_6630_1" [label="[-1]", style=dashed]; -"3697 DequantizeLinear_6630_1" -> "3698 6593" [label="[-1]", style=solid]; -"3698 6593" -> "3699 6594" [label="[-1]", style=solid]; -"3699 6594" -> "3700 6595" [label="[-1]", style=solid]; -"3700 6595" -> "3701 6597" [label="[-1]", style=dashed]; -"3701 6597" -> "3702 6599" [label="[-1]", style=dashed]; -"3701 6597" -> "3729 6685" [label="[-1]", style=dashed]; -"3701 6597" -> "3749 6667" [label="[-1]", style=dashed]; -"3701 6597" -> "3755 6616" [label="[-1]", style=dashed]; -"3701 6597" -> "3777 6713" [label="[-1]", style=dashed]; -"3701 6597" -> "3789 6633" [label="[-1]", style=dashed]; -"3701 6597" -> "3811 6741" [label="[-1]", style=dashed]; -"3701 6597" -> "3823 6650" [label="[-1]", style=dashed]; -"3701 6597" -> "3845 6769" [label="[-1]", style=dashed]; -"3702 6599" -> "3703 6601" [label="[-1]", style=dashed]; -"3703 6601" -> "3704 6602" [label="[-1]", style=solid]; -"3704 6602" -> "3705 6603" [label="[1, -1]", style=dashed]; -"3705 6603" -> "3706 6604" [label="[-1, 1]", style=dashed]; -"3706 6604" -> "3707 6605" [label="[-1]", style=dashed]; -"3707 6605" -> "3713 6606" [label="[-1]", style=dashed]; -"3708 6539" -> "3709 6544" [label="[-1, 4]", style=solid]; -"3709 6544" -> "3710 6545" [label="[-1, 1]", style=solid]; -"3710 6545" -> "3711 6546" [label="[2]", style=dashed]; -"3711 6546" -> "3712 6547" [label="[-1, -1]", style=solid]; -"3712 6547" -> "3713 6606" [label="[-1, -1]", style=solid]; -"3712 6547" -> "3761 6623" [label="[-1, -1]", style=solid]; -"3712 6547" -> "3795 6640" [label="[-1, -1]", style=solid]; -"3712 6547" -> "3829 6657" [label="[-1, -1]", style=solid]; -"3713 6606" -> "3714 6612" [label="[-1, -1]", style=solid]; -"3713 6606" -> "3715 6608" [label="[-1, -1]", style=solid]; -"3714 6612" -> "3718 6613" [label="[-1, 4]", style=solid]; -"3715 6608" -> "3716 6609" [label="[-1, 1]", style=solid]; -"3716 6609" -> "3717 6610" [label="[-1]", style=solid]; -"3717 6610" -> "3718 6613" [label="[-1]", style=dashed]; -"3718 6613" -> "3719 6614" [label="[-1, 256, 14, 14]", style=solid]; -"3719 6614" -> "3720 6702" [label="[-1, 256, 14, 14]", style=solid]; -"3719 6614" -> "3723 6699" [label="[-1, 256, 14, 14]", style=solid]; -"3719 6614" -> "3726 6696" [label="[-1, 256, 14, 14]", style=solid]; -"3719 6614" -> "3740 6676" [label="[-1, 256, 14, 14]", style=solid]; -"3719 6614" -> "3743 6673" [label="[-1, 256, 14, 14]", style=solid]; -"3719 6614" -> "3746 6670" [label="[-1, 256, 14, 14]", style=solid]; -"3719 6614" -> "3754 6711" [label="[-1, 256, 14, 14]", style=solid]; -"3720 6702" -> "3721 6703" [label="[4]", style=dashed]; -"3721 6703" -> "3722 6707" [label="[]", style=dashed]; -"3722 6707" -> "3737 6708" [label="[1]", style=dashed]; -"3723 6699" -> "3724 6700" [label="[4]", style=dashed]; -"3724 6700" -> "3725 6706" [label="[]", style=dashed]; -"3725 6706" -> "3737 6708" [label="[1]", style=dashed]; -"3726 6696" -> "3727 6697" [label="[4]", style=dashed]; -"3727 6697" -> "3728 6705" [label="[]", style=dashed]; -"3728 6705" -> "3737 6708" [label="[1]", style=dashed]; -"3729 6685" -> "3730 6687" [label="[-1]", style=dashed]; -"3730 6687" -> "3731 6688" [label="[-1]", style=solid]; -"3731 6688" -> "3732 6689" [label="[1, -1]", style=dashed]; -"3732 6689" -> "3733 6691" [label="[-1, 1]", style=dashed]; -"3733 6691" -> "3734 6693" [label="[-1, 1, 1, 1]", style=dashed]; -"3733 6691" -> "3738 6709" [label="[-1, 1, 1, 1]", style=dashed]; -"3734 6693" -> "3735 6694" [label="[4]", style=dashed]; -"3735 6694" -> "3736 6704" [label="[]", style=dashed]; -"3736 6704" -> "3737 6708" [label="[1]", style=dashed]; -"3737 6708" -> "3738 6709" [label="[4]", style=dashed]; -"3738 6709" -> "3739 6710" [label="[-1, -1, -1, -1]", style=dashed]; -"3739 6710" -> "3754 6711" [label="[-1, -1, -1, -1]", style=dashed]; -"3740 6676" -> "3741 6677" [label="[4]", style=dashed]; -"3741 6677" -> "3742 6681" [label="[]", style=dashed]; -"3742 6681" -> "3752 6682" [label="[1]", style=dashed]; -"3743 6673" -> "3744 6674" [label="[4]", style=dashed]; -"3744 6674" -> "3745 6680" [label="[]", style=dashed]; -"3745 6680" -> "3752 6682" [label="[1]", style=dashed]; -"3746 6670" -> "3747 6671" [label="[4]", style=dashed]; -"3747 6671" -> "3748 6679" [label="[]", style=dashed]; -"3748 6679" -> "3752 6682" [label="[1]", style=dashed]; -"3749 6667" -> "3750 6668" [label="[1]", style=dashed]; -"3750 6668" -> "3751 6678" [label="[]", style=dashed]; -"3751 6678" -> "3752 6682" [label="[1]", style=dashed]; -"3752 6682" -> "3753 6683" [label="[4]", style=dashed]; -"3753 6683" -> "3754 6711" [label="[-1, -1, -1, -1]", style=solid]; -"3754 6711" -> "3788 6739" [label="[-1, -1, -1, -1]", style=solid]; -"3755 6616" -> "3756 6618" [label="[-1]", style=dashed]; -"3756 6618" -> "3757 6619" [label="[-1]", style=solid]; -"3757 6619" -> "3758 6620" [label="[1, -1]", style=dashed]; -"3758 6620" -> "3759 6621" [label="[-1, 1]", style=dashed]; -"3759 6621" -> "3760 6622" [label="[-1]", style=dashed]; -"3760 6622" -> "3761 6623" [label="[-1]", style=dashed]; -"3761 6623" -> "3762 6629" [label="[-1, -1]", style=solid]; -"3761 6623" -> "3763 6625" [label="[-1, -1]", style=solid]; -"3762 6629" -> "3766 6630" [label="[-1, 4]", style=solid]; -"3763 6625" -> "3764 6626" [label="[-1, 1]", style=solid]; -"3764 6626" -> "3765 6627" [label="[-1]", style=solid]; -"3765 6627" -> "3766 6630" [label="[-1]", style=dashed]; -"3766 6630" -> "3767 6631" [label="[-1, 256, 14, 14]", style=solid]; -"3767 6631" -> "3768 6730" [label="[-1, 256, 14, 14]", style=solid]; -"3767 6631" -> "3771 6727" [label="[-1, 256, 14, 14]", style=solid]; -"3767 6631" -> "3774 6724" [label="[-1, 256, 14, 14]", style=solid]; -"3767 6631" -> "3788 6739" [label="[-1, 256, 14, 14]", style=solid]; -"3768 6730" -> "3769 6731" [label="[4]", style=dashed]; -"3769 6731" -> "3770 6735" [label="[]", style=dashed]; -"3770 6735" -> "3785 6736" [label="[1]", style=dashed]; -"3771 6727" -> "3772 6728" [label="[4]", style=dashed]; -"3772 6728" -> "3773 6734" [label="[]", style=dashed]; -"3773 6734" -> "3785 6736" [label="[1]", style=dashed]; -"3774 6724" -> "3775 6725" [label="[4]", style=dashed]; -"3775 6725" -> "3776 6733" [label="[]", style=dashed]; -"3776 6733" -> "3785 6736" [label="[1]", style=dashed]; -"3777 6713" -> "3778 6715" [label="[-1]", style=dashed]; -"3778 6715" -> "3779 6716" [label="[-1]", style=solid]; -"3779 6716" -> "3780 6717" [label="[1, -1]", style=dashed]; -"3780 6717" -> "3781 6719" [label="[-1, 1]", style=dashed]; -"3781 6719" -> "3782 6721" [label="[-1, 1, 1, 1]", style=dashed]; -"3781 6719" -> "3786 6737" [label="[-1, 1, 1, 1]", style=dashed]; -"3782 6721" -> "3783 6722" [label="[4]", style=dashed]; -"3783 6722" -> "3784 6732" [label="[]", style=dashed]; -"3784 6732" -> "3785 6736" [label="[1]", style=dashed]; -"3785 6736" -> "3786 6737" [label="[4]", style=dashed]; -"3786 6737" -> "3787 6738" [label="[-1, -1, -1, -1]", style=dashed]; -"3787 6738" -> "3788 6739" [label="[-1, -1, -1, -1]", style=dashed]; -"3788 6739" -> "3822 6767" [label="[-1, -1, -1, -1]", style=solid]; -"3789 6633" -> "3790 6635" [label="[-1]", style=dashed]; -"3790 6635" -> "3791 6636" [label="[-1]", style=solid]; -"3791 6636" -> "3792 6637" [label="[1, -1]", style=dashed]; -"3792 6637" -> "3793 6638" [label="[-1, 1]", style=dashed]; -"3793 6638" -> "3794 6639" [label="[-1]", style=dashed]; -"3794 6639" -> "3795 6640" [label="[-1]", style=dashed]; -"3795 6640" -> "3796 6646" [label="[-1, -1]", style=solid]; -"3795 6640" -> "3797 6642" [label="[-1, -1]", style=solid]; -"3796 6646" -> "3800 6647" [label="[-1, 4]", style=solid]; -"3797 6642" -> "3798 6643" [label="[-1, 1]", style=solid]; -"3798 6643" -> "3799 6644" [label="[-1]", style=solid]; -"3799 6644" -> "3800 6647" [label="[-1]", style=dashed]; -"3800 6647" -> "3801 6648" [label="[-1, 256, 14, 14]", style=solid]; -"3801 6648" -> "3802 6758" [label="[-1, 256, 14, 14]", style=solid]; -"3801 6648" -> "3805 6755" [label="[-1, 256, 14, 14]", style=solid]; -"3801 6648" -> "3808 6752" [label="[-1, 256, 14, 14]", style=solid]; -"3801 6648" -> "3822 6767" [label="[-1, 256, 14, 14]", style=solid]; -"3802 6758" -> "3803 6759" [label="[4]", style=dashed]; -"3803 6759" -> "3804 6763" [label="[]", style=dashed]; -"3804 6763" -> "3819 6764" [label="[1]", style=dashed]; -"3805 6755" -> "3806 6756" [label="[4]", style=dashed]; -"3806 6756" -> "3807 6762" [label="[]", style=dashed]; -"3807 6762" -> "3819 6764" [label="[1]", style=dashed]; -"3808 6752" -> "3809 6753" [label="[4]", style=dashed]; -"3809 6753" -> "3810 6761" [label="[]", style=dashed]; -"3810 6761" -> "3819 6764" [label="[1]", style=dashed]; -"3811 6741" -> "3812 6743" [label="[-1]", style=dashed]; -"3812 6743" -> "3813 6744" [label="[-1]", style=solid]; -"3813 6744" -> "3814 6745" [label="[1, -1]", style=dashed]; -"3814 6745" -> "3815 6747" [label="[-1, 1]", style=dashed]; -"3815 6747" -> "3816 6749" [label="[-1, 1, 1, 1]", style=dashed]; -"3815 6747" -> "3820 6765" [label="[-1, 1, 1, 1]", style=dashed]; -"3816 6749" -> "3817 6750" [label="[4]", style=dashed]; -"3817 6750" -> "3818 6760" [label="[]", style=dashed]; -"3818 6760" -> "3819 6764" [label="[1]", style=dashed]; -"3819 6764" -> "3820 6765" [label="[4]", style=dashed]; -"3820 6765" -> "3821 6766" [label="[-1, -1, -1, -1]", style=dashed]; -"3821 6766" -> "3822 6767" [label="[-1, -1, -1, -1]", style=dashed]; -"3822 6767" -> "3856 6795" [label="[-1, -1, -1, -1]", style=solid]; -"3823 6650" -> "3824 6652" [label="[-1]", style=dashed]; -"3824 6652" -> "3825 6653" [label="[-1]", style=solid]; -"3825 6653" -> "3826 6654" [label="[1, -1]", style=dashed]; -"3826 6654" -> "3827 6655" [label="[-1, 1]", style=dashed]; -"3827 6655" -> "3828 6656" [label="[-1]", style=dashed]; -"3828 6656" -> "3829 6657" [label="[-1]", style=dashed]; -"3829 6657" -> "3830 6663" [label="[-1, -1]", style=solid]; -"3829 6657" -> "3831 6659" [label="[-1, -1]", style=solid]; -"3830 6663" -> "3834 6664" [label="[-1, 4]", style=solid]; -"3831 6659" -> "3832 6660" [label="[-1, 1]", style=solid]; -"3832 6660" -> "3833 6661" [label="[-1]", style=solid]; -"3833 6661" -> "3834 6664" [label="[-1]", style=dashed]; -"3834 6664" -> "3835 6665" [label="[-1, 256, 14, 14]", style=solid]; -"3835 6665" -> "3836 6786" [label="[-1, 256, 14, 14]", style=solid]; -"3835 6665" -> "3839 6783" [label="[-1, 256, 14, 14]", style=solid]; -"3835 6665" -> "3842 6780" [label="[-1, 256, 14, 14]", style=solid]; -"3835 6665" -> "3856 6795" [label="[-1, 256, 14, 14]", style=solid]; -"3836 6786" -> "3837 6787" [label="[4]", style=dashed]; -"3837 6787" -> "3838 6791" [label="[]", style=dashed]; -"3838 6791" -> "3853 6792" [label="[1]", style=dashed]; -"3839 6783" -> "3840 6784" [label="[4]", style=dashed]; -"3840 6784" -> "3841 6790" [label="[]", style=dashed]; -"3841 6790" -> "3853 6792" [label="[1]", style=dashed]; -"3842 6780" -> "3843 6781" [label="[4]", style=dashed]; -"3843 6781" -> "3844 6789" [label="[]", style=dashed]; -"3844 6789" -> "3853 6792" [label="[1]", style=dashed]; -"3845 6769" -> "3846 6771" [label="[-1]", style=dashed]; -"3846 6771" -> "3847 6772" [label="[-1]", style=solid]; -"3847 6772" -> "3848 6773" [label="[1, -1]", style=dashed]; -"3848 6773" -> "3849 6775" [label="[-1, 1]", style=dashed]; -"3849 6775" -> "3850 6777" [label="[-1, 1, 1, 1]", style=dashed]; -"3849 6775" -> "3854 6793" [label="[-1, 1, 1, 1]", style=dashed]; -"3850 6777" -> "3851 6778" [label="[4]", style=dashed]; -"3851 6778" -> "3852 6788" [label="[]", style=dashed]; -"3852 6788" -> "3853 6792" [label="[1]", style=dashed]; -"3853 6792" -> "3854 6793" [label="[4]", style=dashed]; -"3854 6793" -> "3855 6794" [label="[-1, -1, -1, -1]", style=dashed]; -"3855 6794" -> "3856 6795" [label="[-1, -1, -1, -1]", style=dashed]; -"3856 6795" -> "3857 QuantizeLinear_6833_1" [label="[-1, -1, -1, -1]", style=solid]; -"3857 QuantizeLinear_6833_1" -> "3858 DequantizeLinear_6833_1" [label="[-1, -1, -1, -1]", style=dashed]; -"3858 DequantizeLinear_6833_1" -> "3861 6798" [label="[-1, -1, -1, -1]", style=solid]; -"3859 QuantizeLinear_6834_1" -> "3860 DequantizeLinear_6834_1" [label="[256, 256, 3, 3]", style=dashed]; -"3860 DequantizeLinear_6834_1" -> "3861 6798" [label="[256, 256, 3, 3]", style=solid]; -"3861 6798" -> "3862 6799" [label="[-1, 256, -1, -1]", style=solid]; -"3862 6799" -> "3863 QuantizeLinear_6837_1" [label="[-1, 256, -1, -1]", style=solid]; -"3863 QuantizeLinear_6837_1" -> "3864 DequantizeLinear_6837_1" [label="[-1, 256, -1, -1]", style=dashed]; -"3864 DequantizeLinear_6837_1" -> "3867 6802" [label="[-1, 256, -1, -1]", style=solid]; -"3865 QuantizeLinear_6838_1" -> "3866 DequantizeLinear_6838_1" [label="[256, 256, 3, 3]", style=dashed]; -"3866 DequantizeLinear_6838_1" -> "3867 6802" [label="[256, 256, 3, 3]", style=solid]; -"3867 6802" -> "3868 6803" [label="[-1, 256, -1, -1]", style=solid]; -"3868 6803" -> "3869 QuantizeLinear_6841_1" [label="[-1, 256, -1, -1]", style=solid]; -"3869 QuantizeLinear_6841_1" -> "3870 DequantizeLinear_6841_1" [label="[-1, 256, -1, -1]", style=dashed]; -"3870 DequantizeLinear_6841_1" -> "3873 6806" [label="[-1, 256, -1, -1]", style=solid]; -"3871 QuantizeLinear_6842_1" -> "3872 DequantizeLinear_6842_1" [label="[256, 256, 3, 3]", style=dashed]; -"3872 DequantizeLinear_6842_1" -> "3873 6806" [label="[256, 256, 3, 3]", style=solid]; -"3873 6806" -> "3874 6807" [label="[-1, 256, -1, -1]", style=solid]; -"3874 6807" -> "3875 QuantizeLinear_6845_1" [label="[-1, 256, -1, -1]", style=solid]; -"3875 QuantizeLinear_6845_1" -> "3876 DequantizeLinear_6845_1" [label="[-1, 256, -1, -1]", style=dashed]; -"3876 DequantizeLinear_6845_1" -> "3879 6810" [label="[-1, 256, -1, -1]", style=solid]; -"3877 QuantizeLinear_6846_1" -> "3878 DequantizeLinear_6846_1" [label="[256, 256, 3, 3]", style=dashed]; -"3878 DequantizeLinear_6846_1" -> "3879 6810" [label="[256, 256, 3, 3]", style=solid]; -"3879 6810" -> "3880 6811" [label="[-1, 256, -1, -1]", style=solid]; -"3880 6811" -> "3881 QuantizeLinear_6849_1" [label="[-1, 256, -1, -1]", style=solid]; -"3881 QuantizeLinear_6849_1" -> "3882 DequantizeLinear_6849_1" [label="[-1, 256, -1, -1]", style=dashed]; -"3882 DequantizeLinear_6849_1" -> "3885 6814" [label="[-1, 256, -1, -1]", style=solid]; -"3883 QuantizeLinear_6850_1" -> "3884 DequantizeLinear_6850_1" [label="[256, 256, 2, 2]", style=dashed]; -"3884 DequantizeLinear_6850_1" -> "3885 6814" [label="[256, 256, 2, 2]", style=solid]; -"3885 6814" -> "3886 6815" [label="[-1, 256, -1, -1]", style=solid]; -"3886 6815" -> "3887 QuantizeLinear_6853_1" [label="[-1, 256, -1, -1]", style=solid]; -"3887 QuantizeLinear_6853_1" -> "3888 DequantizeLinear_6853_1" [label="[-1, 256, -1, -1]", style=dashed]; -"3888 DequantizeLinear_6853_1" -> "3891 6818" [label="[-1, 256, -1, -1]", style=solid]; -"3889 QuantizeLinear_6854_1" -> "3890 DequantizeLinear_6854_1" [label="[81, 256, 1, 1]", style=dashed]; -"3890 DequantizeLinear_6854_1" -> "3891 6818" [label="[81, 256, 1, 1]", style=solid]; -"3891 6818" -> "3892 6819" [label="[-1, 81, -1, -1]", style=solid]; -"3891 6818" -> "3895 6822" [label="[-1, 81, -1, -1]", style=solid]; -"3892 6819" -> "3893 6844" [label="[-1, 81, -1, -1]", style=solid]; -"3892 6819" -> "4229 6835" [label="[-1, 81, -1, -1]", style=solid]; -"3892 6819" -> "4232 6832" [label="[-1, 81, -1, -1]", style=solid]; -"3892 6819" -> "4237 6842" [label="[-1, 81, -1, -1]", style=solid]; -"3893 6844" -> "3894 6845" [label="[4]", style=dashed]; -"3894 6845" -> "3904 6846" [label="[]", style=dashed]; -"3895 6822" -> "3896 6823" [label="[4]", style=dashed]; -"3896 6823" -> "3897 6824" [label="[]", style=dashed]; -"3897 6824" -> "3898 6825" [label="[1]", style=dashed]; -"3898 6825" -> "3899 6826" [label="[1]", style=dashed]; -"3899 6826" -> "3900 6827" [label="[-1]", style=dashed]; -"3900 6827" -> "3901 6828" [label="[-1]", style=solid]; -"3901 6828" -> "3902 6829" [label="[1, -1]", style=dashed]; -"3902 6829" -> "3903 6830" [label="[-1, 1]", style=dashed]; -"3903 6830" -> "3904 6846" [label="[-1]", style=dashed]; -"3904 6846" -> "4228 6847" [label="[-1]", style=dashed]; -"3905 6513" -> "3906 6515" [label="[]", style=solid]; -"3906 6515" -> "3907 6516" [label="[]", style=solid]; -"3907 6516" -> "3908 6517" [label="[-1]", style=dashed]; -"3908 6517" -> "4225 6519" [label="[]", style=dashed]; -"3909 6469" -> "3910 6471" [label="[]", style=solid]; -"3910 6471" -> "3911 6472" [label="[]", style=solid]; -"3911 6472" -> "3912 6473" [label="[-1]", style=dashed]; -"3912 6473" -> "4225 6519" [label="[]", style=dashed]; -"3913 6425" -> "3914 6427" [label="[]", style=solid]; -"3914 6427" -> "3915 6428" [label="[]", style=solid]; -"3915 6428" -> "3916 6429" [label="[-1]", style=dashed]; -"3916 6429" -> "4225 6519" [label="[]", style=dashed]; -"3917 6381" -> "3918 6383" [label="[]", style=solid]; -"3918 6383" -> "3919 6384" [label="[]", style=solid]; -"3919 6384" -> "3920 6385" [label="[-1]", style=dashed]; -"3920 6385" -> "4225 6519" [label="[]", style=dashed]; -"3921 6337" -> "3922 6339" [label="[]", style=solid]; -"3922 6339" -> "3923 6340" [label="[]", style=solid]; -"3923 6340" -> "3924 6341" [label="[-1]", style=dashed]; -"3924 6341" -> "4225 6519" [label="[]", style=dashed]; -"3925 6293" -> "3926 6295" [label="[]", style=solid]; -"3926 6295" -> "3927 6296" [label="[]", style=solid]; -"3927 6296" -> "3928 6297" [label="[-1]", style=dashed]; -"3928 6297" -> "4225 6519" [label="[]", style=dashed]; -"3929 6249" -> "3930 6251" [label="[]", style=solid]; -"3930 6251" -> "3931 6252" [label="[]", style=solid]; -"3931 6252" -> "3932 6253" [label="[-1]", style=dashed]; -"3932 6253" -> "4225 6519" [label="[]", style=dashed]; -"3933 6205" -> "3934 6207" [label="[]", style=solid]; -"3934 6207" -> "3935 6208" [label="[]", style=solid]; -"3935 6208" -> "3936 6209" [label="[-1]", style=dashed]; -"3936 6209" -> "4225 6519" [label="[]", style=dashed]; -"3937 6161" -> "3938 6163" [label="[]", style=solid]; -"3938 6163" -> "3939 6164" [label="[]", style=solid]; -"3939 6164" -> "3940 6165" [label="[-1]", style=dashed]; -"3940 6165" -> "4225 6519" [label="[]", style=dashed]; -"3941 6117" -> "3942 6119" [label="[]", style=solid]; -"3942 6119" -> "3943 6120" [label="[]", style=solid]; -"3943 6120" -> "3944 6121" [label="[-1]", style=dashed]; -"3944 6121" -> "4225 6519" [label="[]", style=dashed]; -"3945 6073" -> "3946 6075" [label="[]", style=solid]; -"3946 6075" -> "3947 6076" [label="[]", style=solid]; -"3947 6076" -> "3948 6077" [label="[-1]", style=dashed]; -"3948 6077" -> "4225 6519" [label="[]", style=dashed]; -"3949 6029" -> "3950 6031" [label="[]", style=solid]; -"3950 6031" -> "3951 6032" [label="[]", style=solid]; -"3951 6032" -> "3952 6033" [label="[-1]", style=dashed]; -"3952 6033" -> "4225 6519" [label="[]", style=dashed]; -"3953 5985" -> "3954 5987" [label="[]", style=solid]; -"3954 5987" -> "3955 5988" [label="[]", style=solid]; -"3955 5988" -> "3956 5989" [label="[-1]", style=dashed]; -"3956 5989" -> "4225 6519" [label="[]", style=dashed]; -"3957 5941" -> "3958 5943" [label="[]", style=solid]; -"3958 5943" -> "3959 5944" [label="[]", style=solid]; -"3959 5944" -> "3960 5945" [label="[-1]", style=dashed]; -"3960 5945" -> "4225 6519" [label="[]", style=dashed]; -"3961 5897" -> "3962 5899" [label="[]", style=solid]; -"3962 5899" -> "3963 5900" [label="[]", style=solid]; -"3963 5900" -> "3964 5901" [label="[-1]", style=dashed]; -"3964 5901" -> "4225 6519" [label="[]", style=dashed]; -"3965 5853" -> "3966 5855" [label="[]", style=solid]; -"3966 5855" -> "3967 5856" [label="[]", style=solid]; -"3967 5856" -> "3968 5857" [label="[-1]", style=dashed]; -"3968 5857" -> "4225 6519" [label="[]", style=dashed]; -"3969 5809" -> "3970 5811" [label="[]", style=solid]; -"3970 5811" -> "3971 5812" [label="[]", style=solid]; -"3971 5812" -> "3972 5813" [label="[-1]", style=dashed]; -"3972 5813" -> "4225 6519" [label="[]", style=dashed]; -"3973 5765" -> "3974 5767" [label="[]", style=solid]; -"3974 5767" -> "3975 5768" [label="[]", style=solid]; -"3975 5768" -> "3976 5769" [label="[-1]", style=dashed]; -"3976 5769" -> "4225 6519" [label="[]", style=dashed]; -"3977 5721" -> "3978 5723" [label="[]", style=solid]; -"3978 5723" -> "3979 5724" [label="[]", style=solid]; -"3979 5724" -> "3980 5725" [label="[-1]", style=dashed]; -"3980 5725" -> "4225 6519" [label="[]", style=dashed]; -"3981 5677" -> "3982 5679" [label="[]", style=solid]; -"3982 5679" -> "3983 5680" [label="[]", style=solid]; -"3983 5680" -> "3984 5681" [label="[-1]", style=dashed]; -"3984 5681" -> "4225 6519" [label="[]", style=dashed]; -"3985 5633" -> "3986 5635" [label="[]", style=solid]; -"3986 5635" -> "3987 5636" [label="[]", style=solid]; -"3987 5636" -> "3988 5637" [label="[-1]", style=dashed]; -"3988 5637" -> "4225 6519" [label="[]", style=dashed]; -"3989 5589" -> "3990 5591" [label="[]", style=solid]; -"3990 5591" -> "3991 5592" [label="[]", style=solid]; -"3991 5592" -> "3992 5593" [label="[-1]", style=dashed]; -"3992 5593" -> "4225 6519" [label="[]", style=dashed]; -"3993 5545" -> "3994 5547" [label="[]", style=solid]; -"3994 5547" -> "3995 5548" [label="[]", style=solid]; -"3995 5548" -> "3996 5549" [label="[-1]", style=dashed]; -"3996 5549" -> "4225 6519" [label="[]", style=dashed]; -"3997 5501" -> "3998 5503" [label="[]", style=solid]; -"3998 5503" -> "3999 5504" [label="[]", style=solid]; -"3999 5504" -> "4000 5505" [label="[-1]", style=dashed]; -"4000 5505" -> "4225 6519" [label="[]", style=dashed]; -"4001 5457" -> "4002 5459" [label="[]", style=solid]; -"4002 5459" -> "4003 5460" [label="[]", style=solid]; -"4003 5460" -> "4004 5461" [label="[-1]", style=dashed]; -"4004 5461" -> "4225 6519" [label="[]", style=dashed]; -"4005 5413" -> "4006 5415" [label="[]", style=solid]; -"4006 5415" -> "4007 5416" [label="[]", style=solid]; -"4007 5416" -> "4008 5417" [label="[-1]", style=dashed]; -"4008 5417" -> "4225 6519" [label="[]", style=dashed]; -"4009 5369" -> "4010 5371" [label="[]", style=solid]; -"4010 5371" -> "4011 5372" [label="[]", style=solid]; -"4011 5372" -> "4012 5373" [label="[-1]", style=dashed]; -"4012 5373" -> "4225 6519" [label="[]", style=dashed]; -"4013 5325" -> "4014 5327" [label="[]", style=solid]; -"4014 5327" -> "4015 5328" [label="[]", style=solid]; -"4015 5328" -> "4016 5329" [label="[-1]", style=dashed]; -"4016 5329" -> "4225 6519" [label="[]", style=dashed]; -"4017 5281" -> "4018 5283" [label="[]", style=solid]; -"4018 5283" -> "4019 5284" [label="[]", style=solid]; -"4019 5284" -> "4020 5285" [label="[-1]", style=dashed]; -"4020 5285" -> "4225 6519" [label="[]", style=dashed]; -"4021 5237" -> "4022 5239" [label="[]", style=solid]; -"4022 5239" -> "4023 5240" [label="[]", style=solid]; -"4023 5240" -> "4024 5241" [label="[-1]", style=dashed]; -"4024 5241" -> "4225 6519" [label="[]", style=dashed]; -"4025 5193" -> "4026 5195" [label="[]", style=solid]; -"4026 5195" -> "4027 5196" [label="[]", style=solid]; -"4027 5196" -> "4028 5197" [label="[-1]", style=dashed]; -"4028 5197" -> "4225 6519" [label="[]", style=dashed]; -"4029 5149" -> "4030 5151" [label="[]", style=solid]; -"4030 5151" -> "4031 5152" [label="[]", style=solid]; -"4031 5152" -> "4032 5153" [label="[-1]", style=dashed]; -"4032 5153" -> "4225 6519" [label="[]", style=dashed]; -"4033 5105" -> "4034 5107" [label="[]", style=solid]; -"4034 5107" -> "4035 5108" [label="[]", style=solid]; -"4035 5108" -> "4036 5109" [label="[-1]", style=dashed]; -"4036 5109" -> "4225 6519" [label="[]", style=dashed]; -"4037 5061" -> "4038 5063" [label="[]", style=solid]; -"4038 5063" -> "4039 5064" [label="[]", style=solid]; -"4039 5064" -> "4040 5065" [label="[-1]", style=dashed]; -"4040 5065" -> "4225 6519" [label="[]", style=dashed]; -"4041 5017" -> "4042 5019" [label="[]", style=solid]; -"4042 5019" -> "4043 5020" [label="[]", style=solid]; -"4043 5020" -> "4044 5021" [label="[-1]", style=dashed]; -"4044 5021" -> "4225 6519" [label="[]", style=dashed]; -"4045 4973" -> "4046 4975" [label="[]", style=solid]; -"4046 4975" -> "4047 4976" [label="[]", style=solid]; -"4047 4976" -> "4048 4977" [label="[-1]", style=dashed]; -"4048 4977" -> "4225 6519" [label="[]", style=dashed]; -"4049 4929" -> "4050 4931" [label="[]", style=solid]; -"4050 4931" -> "4051 4932" [label="[]", style=solid]; -"4051 4932" -> "4052 4933" [label="[-1]", style=dashed]; -"4052 4933" -> "4225 6519" [label="[]", style=dashed]; -"4053 4885" -> "4054 4887" [label="[]", style=solid]; -"4054 4887" -> "4055 4888" [label="[]", style=solid]; -"4055 4888" -> "4056 4889" [label="[-1]", style=dashed]; -"4056 4889" -> "4225 6519" [label="[]", style=dashed]; -"4057 4841" -> "4058 4843" [label="[]", style=solid]; -"4058 4843" -> "4059 4844" [label="[]", style=solid]; -"4059 4844" -> "4060 4845" [label="[-1]", style=dashed]; -"4060 4845" -> "4225 6519" [label="[]", style=dashed]; -"4061 4797" -> "4062 4799" [label="[]", style=solid]; -"4062 4799" -> "4063 4800" [label="[]", style=solid]; -"4063 4800" -> "4064 4801" [label="[-1]", style=dashed]; -"4064 4801" -> "4225 6519" [label="[]", style=dashed]; -"4065 4753" -> "4066 4755" [label="[]", style=solid]; -"4066 4755" -> "4067 4756" [label="[]", style=solid]; -"4067 4756" -> "4068 4757" [label="[-1]", style=dashed]; -"4068 4757" -> "4225 6519" [label="[]", style=dashed]; -"4069 4709" -> "4070 4711" [label="[]", style=solid]; -"4070 4711" -> "4071 4712" [label="[]", style=solid]; -"4071 4712" -> "4072 4713" [label="[-1]", style=dashed]; -"4072 4713" -> "4225 6519" [label="[]", style=dashed]; -"4073 4665" -> "4074 4667" [label="[]", style=solid]; -"4074 4667" -> "4075 4668" [label="[]", style=solid]; -"4075 4668" -> "4076 4669" [label="[-1]", style=dashed]; -"4076 4669" -> "4225 6519" [label="[]", style=dashed]; -"4077 4621" -> "4078 4623" [label="[]", style=solid]; -"4078 4623" -> "4079 4624" [label="[]", style=solid]; -"4079 4624" -> "4080 4625" [label="[-1]", style=dashed]; -"4080 4625" -> "4225 6519" [label="[]", style=dashed]; -"4081 4577" -> "4082 4579" [label="[]", style=solid]; -"4082 4579" -> "4083 4580" [label="[]", style=solid]; -"4083 4580" -> "4084 4581" [label="[-1]", style=dashed]; -"4084 4581" -> "4225 6519" [label="[]", style=dashed]; -"4085 4533" -> "4086 4535" [label="[]", style=solid]; -"4086 4535" -> "4087 4536" [label="[]", style=solid]; -"4087 4536" -> "4088 4537" [label="[-1]", style=dashed]; -"4088 4537" -> "4225 6519" [label="[]", style=dashed]; -"4089 4489" -> "4090 4491" [label="[]", style=solid]; -"4090 4491" -> "4091 4492" [label="[]", style=solid]; -"4091 4492" -> "4092 4493" [label="[-1]", style=dashed]; -"4092 4493" -> "4225 6519" [label="[]", style=dashed]; -"4093 4445" -> "4094 4447" [label="[]", style=solid]; -"4094 4447" -> "4095 4448" [label="[]", style=solid]; -"4095 4448" -> "4096 4449" [label="[-1]", style=dashed]; -"4096 4449" -> "4225 6519" [label="[]", style=dashed]; -"4097 4401" -> "4098 4403" [label="[]", style=solid]; -"4098 4403" -> "4099 4404" [label="[]", style=solid]; -"4099 4404" -> "4100 4405" [label="[-1]", style=dashed]; -"4100 4405" -> "4225 6519" [label="[]", style=dashed]; -"4101 4357" -> "4102 4359" [label="[]", style=solid]; -"4102 4359" -> "4103 4360" [label="[]", style=solid]; -"4103 4360" -> "4104 4361" [label="[-1]", style=dashed]; -"4104 4361" -> "4225 6519" [label="[]", style=dashed]; -"4105 4313" -> "4106 4315" [label="[]", style=solid]; -"4106 4315" -> "4107 4316" [label="[]", style=solid]; -"4107 4316" -> "4108 4317" [label="[-1]", style=dashed]; -"4108 4317" -> "4225 6519" [label="[]", style=dashed]; -"4109 4269" -> "4110 4271" [label="[]", style=solid]; -"4110 4271" -> "4111 4272" [label="[]", style=solid]; -"4111 4272" -> "4112 4273" [label="[-1]", style=dashed]; -"4112 4273" -> "4225 6519" [label="[]", style=dashed]; -"4113 4225" -> "4114 4227" [label="[]", style=solid]; -"4114 4227" -> "4115 4228" [label="[]", style=solid]; -"4115 4228" -> "4116 4229" [label="[-1]", style=dashed]; -"4116 4229" -> "4225 6519" [label="[]", style=dashed]; -"4117 4181" -> "4118 4183" [label="[]", style=solid]; -"4118 4183" -> "4119 4184" [label="[]", style=solid]; -"4119 4184" -> "4120 4185" [label="[-1]", style=dashed]; -"4120 4185" -> "4225 6519" [label="[]", style=dashed]; -"4121 4137" -> "4122 4139" [label="[]", style=solid]; -"4122 4139" -> "4123 4140" [label="[]", style=solid]; -"4123 4140" -> "4124 4141" [label="[-1]", style=dashed]; -"4124 4141" -> "4225 6519" [label="[]", style=dashed]; -"4125 4093" -> "4126 4095" [label="[]", style=solid]; -"4126 4095" -> "4127 4096" [label="[]", style=solid]; -"4127 4096" -> "4128 4097" [label="[-1]", style=dashed]; -"4128 4097" -> "4225 6519" [label="[]", style=dashed]; -"4129 4049" -> "4130 4051" [label="[]", style=solid]; -"4130 4051" -> "4131 4052" [label="[]", style=solid]; -"4131 4052" -> "4132 4053" [label="[-1]", style=dashed]; -"4132 4053" -> "4225 6519" [label="[]", style=dashed]; -"4133 4005" -> "4134 4007" [label="[]", style=solid]; -"4134 4007" -> "4135 4008" [label="[]", style=solid]; -"4135 4008" -> "4136 4009" [label="[-1]", style=dashed]; -"4136 4009" -> "4225 6519" [label="[]", style=dashed]; -"4137 3961" -> "4138 3963" [label="[]", style=solid]; -"4138 3963" -> "4139 3964" [label="[]", style=solid]; -"4139 3964" -> "4140 3965" [label="[-1]", style=dashed]; -"4140 3965" -> "4225 6519" [label="[]", style=dashed]; -"4141 3917" -> "4142 3919" [label="[]", style=solid]; -"4142 3919" -> "4143 3920" [label="[]", style=solid]; -"4143 3920" -> "4144 3921" [label="[-1]", style=dashed]; -"4144 3921" -> "4225 6519" [label="[]", style=dashed]; -"4145 3873" -> "4146 3875" [label="[]", style=solid]; -"4146 3875" -> "4147 3876" [label="[]", style=solid]; -"4147 3876" -> "4148 3877" [label="[-1]", style=dashed]; -"4148 3877" -> "4225 6519" [label="[]", style=dashed]; -"4149 3829" -> "4150 3831" [label="[]", style=solid]; -"4150 3831" -> "4151 3832" [label="[]", style=solid]; -"4151 3832" -> "4152 3833" [label="[-1]", style=dashed]; -"4152 3833" -> "4225 6519" [label="[]", style=dashed]; -"4153 3785" -> "4154 3787" [label="[]", style=solid]; -"4154 3787" -> "4155 3788" [label="[]", style=solid]; -"4155 3788" -> "4156 3789" [label="[-1]", style=dashed]; -"4156 3789" -> "4225 6519" [label="[]", style=dashed]; -"4157 3741" -> "4158 3743" [label="[]", style=solid]; -"4158 3743" -> "4159 3744" [label="[]", style=solid]; -"4159 3744" -> "4160 3745" [label="[-1]", style=dashed]; -"4160 3745" -> "4225 6519" [label="[]", style=dashed]; -"4161 3697" -> "4162 3699" [label="[]", style=solid]; -"4162 3699" -> "4163 3700" [label="[]", style=solid]; -"4163 3700" -> "4164 3701" [label="[-1]", style=dashed]; -"4164 3701" -> "4225 6519" [label="[]", style=dashed]; -"4165 3653" -> "4166 3655" [label="[]", style=solid]; -"4166 3655" -> "4167 3656" [label="[]", style=solid]; -"4167 3656" -> "4168 3657" [label="[-1]", style=dashed]; -"4168 3657" -> "4225 6519" [label="[]", style=dashed]; -"4169 3609" -> "4170 3611" [label="[]", style=solid]; -"4170 3611" -> "4171 3612" [label="[]", style=solid]; -"4171 3612" -> "4172 3613" [label="[-1]", style=dashed]; -"4172 3613" -> "4225 6519" [label="[]", style=dashed]; -"4173 3565" -> "4174 3567" [label="[]", style=solid]; -"4174 3567" -> "4175 3568" [label="[]", style=solid]; -"4175 3568" -> "4176 3569" [label="[-1]", style=dashed]; -"4176 3569" -> "4225 6519" [label="[]", style=dashed]; -"4177 3521" -> "4178 3523" [label="[]", style=solid]; -"4178 3523" -> "4179 3524" [label="[]", style=solid]; -"4179 3524" -> "4180 3525" [label="[-1]", style=dashed]; -"4180 3525" -> "4225 6519" [label="[]", style=dashed]; -"4181 3477" -> "4182 3479" [label="[]", style=solid]; -"4182 3479" -> "4183 3480" [label="[]", style=solid]; -"4183 3480" -> "4184 3481" [label="[-1]", style=dashed]; -"4184 3481" -> "4225 6519" [label="[]", style=dashed]; -"4185 3433" -> "4186 3435" [label="[]", style=solid]; -"4186 3435" -> "4187 3436" [label="[]", style=solid]; -"4187 3436" -> "4188 3437" [label="[-1]", style=dashed]; -"4188 3437" -> "4225 6519" [label="[]", style=dashed]; -"4189 3389" -> "4190 3391" [label="[]", style=solid]; -"4190 3391" -> "4191 3392" [label="[]", style=solid]; -"4191 3392" -> "4192 3393" [label="[-1]", style=dashed]; -"4192 3393" -> "4225 6519" [label="[]", style=dashed]; -"4193 3345" -> "4194 3347" [label="[]", style=solid]; -"4194 3347" -> "4195 3348" [label="[]", style=solid]; -"4195 3348" -> "4196 3349" [label="[-1]", style=dashed]; -"4196 3349" -> "4225 6519" [label="[]", style=dashed]; -"4197 3301" -> "4198 3303" [label="[]", style=solid]; -"4198 3303" -> "4199 3304" [label="[]", style=solid]; -"4199 3304" -> "4200 3305" [label="[-1]", style=dashed]; -"4200 3305" -> "4225 6519" [label="[]", style=dashed]; -"4201 3257" -> "4202 3259" [label="[]", style=solid]; -"4202 3259" -> "4203 3260" [label="[]", style=solid]; -"4203 3260" -> "4204 3261" [label="[-1]", style=dashed]; -"4204 3261" -> "4225 6519" [label="[]", style=dashed]; -"4205 3213" -> "4206 3215" [label="[]", style=solid]; -"4206 3215" -> "4207 3216" [label="[]", style=solid]; -"4207 3216" -> "4208 3217" [label="[-1]", style=dashed]; -"4208 3217" -> "4225 6519" [label="[]", style=dashed]; -"4209 3169" -> "4210 3171" [label="[]", style=solid]; -"4210 3171" -> "4211 3172" [label="[]", style=solid]; -"4211 3172" -> "4212 3173" [label="[-1]", style=dashed]; -"4212 3173" -> "4225 6519" [label="[]", style=dashed]; -"4213 3125" -> "4214 3127" [label="[]", style=solid]; -"4214 3127" -> "4215 3128" [label="[]", style=solid]; -"4215 3128" -> "4216 3129" [label="[-1]", style=dashed]; -"4216 3129" -> "4225 6519" [label="[]", style=dashed]; -"4217 3081" -> "4218 3083" [label="[]", style=solid]; -"4218 3083" -> "4219 3084" [label="[]", style=solid]; -"4219 3084" -> "4220 3085" [label="[-1]", style=dashed]; -"4220 3085" -> "4225 6519" [label="[]", style=dashed]; -"4221 3037" -> "4222 3039" [label="[]", style=solid]; -"4222 3039" -> "4223 3040" [label="[]", style=solid]; -"4223 3040" -> "4224 3041" [label="[-1]", style=dashed]; -"4224 3041" -> "4225 6519" [label="[]", style=dashed]; -"4225 6519" -> "4226 6532" [label="[]", style=dashed]; -"4226 6532" -> "4227 6820" [label="[-1]", style=dashed]; -"4226 6532" -> "4244 nncf_model_output_1" [label="[-1]", style=dashed]; -"4227 6820" -> "4228 6847" [label="[-1]", style=dashed]; -"4228 6847" -> "4238 6848" [label="[-1]", style=dashed]; -"4229 6835" -> "4230 6836" [label="[4]", style=dashed]; -"4230 6836" -> "4231 6840" [label="[]", style=dashed]; -"4231 6840" -> "4236 6841" [label="[1]", style=dashed]; -"4232 6832" -> "4233 6833" [label="[4]", style=dashed]; -"4233 6833" -> "4234 6839" [label="[]", style=dashed]; -"4234 6839" -> "4236 6841" [label="[1]", style=dashed]; -"4235 6838" -> "4236 6841" [label="[1]", style=dashed]; -"4236 6841" -> "4237 6842" [label="[3]", style=dashed]; -"4237 6842" -> "4238 6848" [label="[]", style=solid]; -"4238 6848" -> "4239 6849" [label="[]", style=solid]; -"4239 6849" -> "4246 nncf_model_output_3" [label="[-1, 1, 28, 28]", style=solid]; -"4240 6533" -> "4241 6534" [label="[]", style=dashed]; -"4241 6534" -> "4245 nncf_model_output_2" [label="[-1]", style=solid]; -"4242 nncf_model_input_0" -> "2 QuantizeLinear_image_1" [label="[3, -1, -1]", style=solid]; +"1749 2490" -> "1750 QuantizeLinear_2527_4" [label="[]", style=solid]; +"1749 2490" -> "1752 QuantizeLinear_2527_3" [label="[]", style=solid]; +"1749 2490" -> "1754 QuantizeLinear_2527_2" [label="[]", style=solid]; +"1749 2490" -> "1756 QuantizeLinear_2527_1" [label="[]", style=solid]; +"1749 2490" -> "1803 2495" [label="[]", style=solid]; +"1749 2490" -> "1807 2503" [label="[]", style=solid]; +"1749 2490" -> "2009 2775" [label="[]", style=solid]; +"1750 QuantizeLinear_2527_4" -> "1751 DequantizeLinear_2527_4" [label="[]", style=dashed]; +"1751 DequantizeLinear_2527_4" -> "1768 2508" [label="[]", style=solid]; +"1752 QuantizeLinear_2527_3" -> "1753 DequantizeLinear_2527_3" [label="[]", style=dashed]; +"1753 DequantizeLinear_2527_3" -> "1766 2515" [label="[]", style=solid]; +"1754 QuantizeLinear_2527_2" -> "1755 DequantizeLinear_2527_2" [label="[]", style=dashed]; +"1755 DequantizeLinear_2527_2" -> "1760 2525" [label="[]", style=solid]; +"1756 QuantizeLinear_2527_1" -> "1757 DequantizeLinear_2527_1" [label="[]", style=dashed]; +"1757 DequantizeLinear_2527_1" -> "1758 2532" [label="[]", style=solid]; +"1758 2532" -> "1759 2534" [label="[]", style=solid]; +"1759 2534" -> "1762 2535" [label="[]", style=solid]; +"1760 2525" -> "1761 2527" [label="[]", style=solid]; +"1761 2527" -> "1762 2535" [label="[]", style=solid]; +"1762 2535" -> "1763 QuantizeLinear_2572_1" [label="[]", style=solid]; +"1763 QuantizeLinear_2572_1" -> "1764 DequantizeLinear_2572_1" [label="[]", style=dashed]; +"1764 DequantizeLinear_2572_1" -> "1765 2537" [label="[]", style=solid]; +"1765 2537" -> "1774 QuantizeLinear_2574_1" [label="[]", style=solid]; +"1766 2515" -> "1767 2517" [label="[]", style=solid]; +"1767 2517" -> "1770 2518" [label="[]", style=solid]; +"1768 2508" -> "1769 2510" [label="[]", style=solid]; +"1769 2510" -> "1770 2518" [label="[]", style=solid]; +"1770 2518" -> "1771 QuantizeLinear_2555_1" [label="[]", style=solid]; +"1771 QuantizeLinear_2555_1" -> "1772 DequantizeLinear_2555_1" [label="[]", style=dashed]; +"1772 DequantizeLinear_2555_1" -> "1773 2520" [label="[]", style=solid]; +"1773 2520" -> "1776 QuantizeLinear_2557_1" [label="[]", style=solid]; +"1774 QuantizeLinear_2574_1" -> "1775 DequantizeLinear_2574_1" [label="[]", style=dashed]; +"1775 DequantizeLinear_2574_1" -> "1778 2538" [label="[]", style=solid]; +"1776 QuantizeLinear_2557_1" -> "1777 DequantizeLinear_2557_1" [label="[]", style=dashed]; +"1777 DequantizeLinear_2557_1" -> "1778 2538" [label="[]", style=solid]; +"1778 2538" -> "1779 QuantizeLinear_2575_1" [label="[]", style=solid]; +"1779 QuantizeLinear_2575_1" -> "1780 DequantizeLinear_2575_1" [label="[]", style=dashed]; +"1780 DequantizeLinear_2575_1" -> "1781 2539" [label="[]", style=solid]; +"1781 2539" -> "1782 2542" [label="[]", style=solid]; +"1782 2542" -> "1783 QuantizeLinear_2579_1" [label="[]", style=solid]; +"1783 QuantizeLinear_2579_1" -> "1784 DequantizeLinear_2579_1" [label="[]", style=dashed]; +"1784 DequantizeLinear_2579_1" -> "1785 2543" [label="[]", style=solid]; +"1785 2543" -> "1786 2544" [label="[]", style=solid]; +"1786 2544" -> "1787 2546" [label="[]", style=solid]; +"1787 2546" -> "1788 QuantizeLinear_2583_1" [label="[]", style=solid]; +"1788 QuantizeLinear_2583_1" -> "1789 DequantizeLinear_2583_1" [label="[]", style=dashed]; +"1789 DequantizeLinear_2583_1" -> "1790 2548" [label="[]", style=solid]; +"1790 2548" -> "1791 QuantizeLinear_2585_1" [label="[]", style=solid]; +"1791 QuantizeLinear_2585_1" -> "1792 DequantizeLinear_2585_1" [label="[]", style=dashed]; +"1792 DequantizeLinear_2585_1" -> "1793 2549" [label="[]", style=solid]; +"1793 2549" -> "1794 2550" [label="[]", style=solid]; +"1794 2550" -> "1795 2551" [label="[]", style=solid]; +"1795 2551" -> "1796 2553" [label="[]", style=dashed]; +"1796 2553" -> "1797 2555" [label="[]", style=dashed]; +"1796 2553" -> "1824 2641" [label="[]", style=dashed]; +"1796 2553" -> "1844 2623" [label="[]", style=dashed]; +"1796 2553" -> "1850 2572" [label="[]", style=dashed]; +"1796 2553" -> "1872 2669" [label="[]", style=dashed]; +"1796 2553" -> "1884 2589" [label="[]", style=dashed]; +"1796 2553" -> "1906 2697" [label="[]", style=dashed]; +"1796 2553" -> "1918 2606" [label="[]", style=dashed]; +"1796 2553" -> "1940 2725" [label="[]", style=dashed]; +"1797 2555" -> "1798 2557" [label="[]", style=dashed]; +"1798 2557" -> "1799 2558" [label="[]", style=solid]; +"1799 2558" -> "1800 2559" [label="[-1, -1]", style=dashed]; +"1800 2559" -> "1801 2560" [label="[-1, -1]", style=dashed]; +"1801 2560" -> "1802 2561" [label="[-1]", style=dashed]; +"1802 2561" -> "1808 2562" [label="[-1]", style=dashed]; +"1803 2495" -> "1804 2500" [label="[]", style=solid]; +"1804 2500" -> "1805 2501" [label="[]", style=solid]; +"1805 2501" -> "1806 2502" [label="[-1]", style=dashed]; +"1806 2502" -> "1807 2503" [label="[]", style=solid]; +"1807 2503" -> "1808 2562" [label="[]", style=solid]; +"1807 2503" -> "1856 2579" [label="[]", style=solid]; +"1807 2503" -> "1890 2596" [label="[]", style=solid]; +"1807 2503" -> "1924 2613" [label="[]", style=solid]; +"1808 2562" -> "1809 2568" [label="[]", style=solid]; +"1808 2562" -> "1810 2564" [label="[]", style=solid]; +"1809 2568" -> "1813 2569" [label="[]", style=solid]; +"1810 2564" -> "1811 2565" [label="[]", style=solid]; +"1811 2565" -> "1812 2566" [label="[]", style=solid]; +"1812 2566" -> "1813 2569" [label="[]", style=dashed]; +"1813 2569" -> "1814 2570" [label="[-1, 256, 7, 7]", style=solid]; +"1814 2570" -> "1815 2658" [label="[-1, 256, 7, 7]", style=solid]; +"1814 2570" -> "1818 2655" [label="[-1, 256, 7, 7]", style=solid]; +"1814 2570" -> "1821 2652" [label="[-1, 256, 7, 7]", style=solid]; +"1814 2570" -> "1835 2632" [label="[-1, 256, 7, 7]", style=solid]; +"1814 2570" -> "1838 2629" [label="[-1, 256, 7, 7]", style=solid]; +"1814 2570" -> "1841 2626" [label="[-1, 256, 7, 7]", style=solid]; +"1814 2570" -> "1849 2667" [label="[-1, 256, 7, 7]", style=solid]; +"1815 2658" -> "1816 2659" [label="[4]", style=dashed]; +"1816 2659" -> "1817 2663" [label="[]", style=dashed]; +"1817 2663" -> "1832 2664" [label="[1]", style=dashed]; +"1818 2655" -> "1819 2656" [label="[4]", style=dashed]; +"1819 2656" -> "1820 2662" [label="[]", style=dashed]; +"1820 2662" -> "1832 2664" [label="[1]", style=dashed]; +"1821 2652" -> "1822 2653" [label="[4]", style=dashed]; +"1822 2653" -> "1823 2661" [label="[]", style=dashed]; +"1823 2661" -> "1832 2664" [label="[1]", style=dashed]; +"1824 2641" -> "1825 2643" [label="[]", style=dashed]; +"1825 2643" -> "1826 2644" [label="[]", style=solid]; +"1826 2644" -> "1827 2645" [label="[-1, -1]", style=dashed]; +"1827 2645" -> "1828 2647" [label="[-1, -1]", style=dashed]; +"1828 2647" -> "1829 2649" [label="[-1, 1, 1, 1]", style=dashed]; +"1828 2647" -> "1833 2665" [label="[-1, 1, 1, 1]", style=dashed]; +"1829 2649" -> "1830 2650" [label="[4]", style=dashed]; +"1830 2650" -> "1831 2660" [label="[]", style=dashed]; +"1831 2660" -> "1832 2664" [label="[1]", style=dashed]; +"1832 2664" -> "1833 2665" [label="[4]", style=dashed]; +"1833 2665" -> "1834 2666" [label="[-1, -1, -1, -1]", style=dashed]; +"1834 2666" -> "1849 2667" [label="[-1, -1, -1, -1]", style=dashed]; +"1835 2632" -> "1836 2633" [label="[4]", style=dashed]; +"1836 2633" -> "1837 2637" [label="[]", style=dashed]; +"1837 2637" -> "1847 2638" [label="[1]", style=dashed]; +"1838 2629" -> "1839 2630" [label="[4]", style=dashed]; +"1839 2630" -> "1840 2636" [label="[]", style=dashed]; +"1840 2636" -> "1847 2638" [label="[1]", style=dashed]; +"1841 2626" -> "1842 2627" [label="[4]", style=dashed]; +"1842 2627" -> "1843 2635" [label="[]", style=dashed]; +"1843 2635" -> "1847 2638" [label="[1]", style=dashed]; +"1844 2623" -> "1845 2624" [label="[-1]", style=dashed]; +"1845 2624" -> "1846 2634" [label="[]", style=dashed]; +"1846 2634" -> "1847 2638" [label="[1]", style=dashed]; +"1847 2638" -> "1848 2639" [label="[4]", style=dashed]; +"1848 2639" -> "1849 2667" [label="[-1, -1, -1, -1]", style=solid]; +"1849 2667" -> "1883 2695" [label="[-1, -1, -1, -1]", style=solid]; +"1850 2572" -> "1851 2574" [label="[]", style=dashed]; +"1851 2574" -> "1852 2575" [label="[]", style=solid]; +"1852 2575" -> "1853 2576" [label="[-1, -1]", style=dashed]; +"1853 2576" -> "1854 2577" [label="[-1, -1]", style=dashed]; +"1854 2577" -> "1855 2578" [label="[-1]", style=dashed]; +"1855 2578" -> "1856 2579" [label="[-1]", style=dashed]; +"1856 2579" -> "1857 2585" [label="[]", style=solid]; +"1856 2579" -> "1858 2581" [label="[]", style=solid]; +"1857 2585" -> "1861 2586" [label="[]", style=solid]; +"1858 2581" -> "1859 2582" [label="[]", style=solid]; +"1859 2582" -> "1860 2583" [label="[]", style=solid]; +"1860 2583" -> "1861 2586" [label="[]", style=dashed]; +"1861 2586" -> "1862 2587" [label="[-1, 256, 7, 7]", style=solid]; +"1862 2587" -> "1863 2686" [label="[-1, 256, 7, 7]", style=solid]; +"1862 2587" -> "1866 2683" [label="[-1, 256, 7, 7]", style=solid]; +"1862 2587" -> "1869 2680" [label="[-1, 256, 7, 7]", style=solid]; +"1862 2587" -> "1883 2695" [label="[-1, 256, 7, 7]", style=solid]; +"1863 2686" -> "1864 2687" [label="[4]", style=dashed]; +"1864 2687" -> "1865 2691" [label="[]", style=dashed]; +"1865 2691" -> "1880 2692" [label="[1]", style=dashed]; +"1866 2683" -> "1867 2684" [label="[4]", style=dashed]; +"1867 2684" -> "1868 2690" [label="[]", style=dashed]; +"1868 2690" -> "1880 2692" [label="[1]", style=dashed]; +"1869 2680" -> "1870 2681" [label="[4]", style=dashed]; +"1870 2681" -> "1871 2689" [label="[]", style=dashed]; +"1871 2689" -> "1880 2692" [label="[1]", style=dashed]; +"1872 2669" -> "1873 2671" [label="[]", style=dashed]; +"1873 2671" -> "1874 2672" [label="[]", style=solid]; +"1874 2672" -> "1875 2673" [label="[-1, -1]", style=dashed]; +"1875 2673" -> "1876 2675" [label="[-1, -1]", style=dashed]; +"1876 2675" -> "1877 2677" [label="[-1, 1, 1, 1]", style=dashed]; +"1876 2675" -> "1881 2693" [label="[-1, 1, 1, 1]", style=dashed]; +"1877 2677" -> "1878 2678" [label="[4]", style=dashed]; +"1878 2678" -> "1879 2688" [label="[]", style=dashed]; +"1879 2688" -> "1880 2692" [label="[1]", style=dashed]; +"1880 2692" -> "1881 2693" [label="[4]", style=dashed]; +"1881 2693" -> "1882 2694" [label="[-1, -1, -1, -1]", style=dashed]; +"1882 2694" -> "1883 2695" [label="[-1, -1, -1, -1]", style=dashed]; +"1883 2695" -> "1917 2723" [label="[-1, -1, -1, -1]", style=solid]; +"1884 2589" -> "1885 2591" [label="[]", style=dashed]; +"1885 2591" -> "1886 2592" [label="[]", style=solid]; +"1886 2592" -> "1887 2593" [label="[-1, -1]", style=dashed]; +"1887 2593" -> "1888 2594" [label="[-1, -1]", style=dashed]; +"1888 2594" -> "1889 2595" [label="[-1]", style=dashed]; +"1889 2595" -> "1890 2596" [label="[-1]", style=dashed]; +"1890 2596" -> "1891 2602" [label="[]", style=solid]; +"1890 2596" -> "1892 2598" [label="[]", style=solid]; +"1891 2602" -> "1895 2603" [label="[]", style=solid]; +"1892 2598" -> "1893 2599" [label="[]", style=solid]; +"1893 2599" -> "1894 2600" [label="[]", style=solid]; +"1894 2600" -> "1895 2603" [label="[]", style=dashed]; +"1895 2603" -> "1896 2604" [label="[-1, 256, 7, 7]", style=solid]; +"1896 2604" -> "1897 2714" [label="[-1, 256, 7, 7]", style=solid]; +"1896 2604" -> "1900 2711" [label="[-1, 256, 7, 7]", style=solid]; +"1896 2604" -> "1903 2708" [label="[-1, 256, 7, 7]", style=solid]; +"1896 2604" -> "1917 2723" [label="[-1, 256, 7, 7]", style=solid]; +"1897 2714" -> "1898 2715" [label="[4]", style=dashed]; +"1898 2715" -> "1899 2719" [label="[]", style=dashed]; +"1899 2719" -> "1914 2720" [label="[1]", style=dashed]; +"1900 2711" -> "1901 2712" [label="[4]", style=dashed]; +"1901 2712" -> "1902 2718" [label="[]", style=dashed]; +"1902 2718" -> "1914 2720" [label="[1]", style=dashed]; +"1903 2708" -> "1904 2709" [label="[4]", style=dashed]; +"1904 2709" -> "1905 2717" [label="[]", style=dashed]; +"1905 2717" -> "1914 2720" [label="[1]", style=dashed]; +"1906 2697" -> "1907 2699" [label="[]", style=dashed]; +"1907 2699" -> "1908 2700" [label="[]", style=solid]; +"1908 2700" -> "1909 2701" [label="[-1, -1]", style=dashed]; +"1909 2701" -> "1910 2703" [label="[-1, -1]", style=dashed]; +"1910 2703" -> "1911 2705" [label="[-1, 1, 1, 1]", style=dashed]; +"1910 2703" -> "1915 2721" [label="[-1, 1, 1, 1]", style=dashed]; +"1911 2705" -> "1912 2706" [label="[4]", style=dashed]; +"1912 2706" -> "1913 2716" [label="[]", style=dashed]; +"1913 2716" -> "1914 2720" [label="[1]", style=dashed]; +"1914 2720" -> "1915 2721" [label="[4]", style=dashed]; +"1915 2721" -> "1916 2722" [label="[-1, -1, -1, -1]", style=dashed]; +"1916 2722" -> "1917 2723" [label="[-1, -1, -1, -1]", style=dashed]; +"1917 2723" -> "1951 2751" [label="[-1, -1, -1, -1]", style=solid]; +"1918 2606" -> "1919 2608" [label="[]", style=dashed]; +"1919 2608" -> "1920 2609" [label="[]", style=solid]; +"1920 2609" -> "1921 2610" [label="[-1, -1]", style=dashed]; +"1921 2610" -> "1922 2611" [label="[-1, -1]", style=dashed]; +"1922 2611" -> "1923 2612" [label="[-1]", style=dashed]; +"1923 2612" -> "1924 2613" [label="[-1]", style=dashed]; +"1924 2613" -> "1925 2619" [label="[]", style=solid]; +"1924 2613" -> "1926 2615" [label="[]", style=solid]; +"1925 2619" -> "1929 2620" [label="[]", style=solid]; +"1926 2615" -> "1927 2616" [label="[]", style=solid]; +"1927 2616" -> "1928 2617" [label="[]", style=solid]; +"1928 2617" -> "1929 2620" [label="[]", style=dashed]; +"1929 2620" -> "1930 2621" [label="[-1, 256, 7, 7]", style=solid]; +"1930 2621" -> "1931 2742" [label="[-1, 256, 7, 7]", style=solid]; +"1930 2621" -> "1934 2739" [label="[-1, 256, 7, 7]", style=solid]; +"1930 2621" -> "1937 2736" [label="[-1, 256, 7, 7]", style=solid]; +"1930 2621" -> "1951 2751" [label="[-1, 256, 7, 7]", style=solid]; +"1931 2742" -> "1932 2743" [label="[4]", style=dashed]; +"1932 2743" -> "1933 2747" [label="[]", style=dashed]; +"1933 2747" -> "1948 2748" [label="[1]", style=dashed]; +"1934 2739" -> "1935 2740" [label="[4]", style=dashed]; +"1935 2740" -> "1936 2746" [label="[]", style=dashed]; +"1936 2746" -> "1948 2748" [label="[1]", style=dashed]; +"1937 2736" -> "1938 2737" [label="[4]", style=dashed]; +"1938 2737" -> "1939 2745" [label="[]", style=dashed]; +"1939 2745" -> "1948 2748" [label="[1]", style=dashed]; +"1940 2725" -> "1941 2727" [label="[]", style=dashed]; +"1941 2727" -> "1942 2728" [label="[]", style=solid]; +"1942 2728" -> "1943 2729" [label="[-1, -1]", style=dashed]; +"1943 2729" -> "1944 2731" [label="[-1, -1]", style=dashed]; +"1944 2731" -> "1945 2733" [label="[-1, 1, 1, 1]", style=dashed]; +"1944 2731" -> "1949 2749" [label="[-1, 1, 1, 1]", style=dashed]; +"1945 2733" -> "1946 2734" [label="[4]", style=dashed]; +"1946 2734" -> "1947 2744" [label="[]", style=dashed]; +"1947 2744" -> "1948 2748" [label="[1]", style=dashed]; +"1948 2748" -> "1949 2749" [label="[4]", style=dashed]; +"1949 2749" -> "1950 2750" [label="[-1, -1, -1, -1]", style=dashed]; +"1950 2750" -> "1951 2751" [label="[-1, -1, -1, -1]", style=dashed]; +"1951 2751" -> "1953 QuantizeLinear_2788_1" [label="[-1, -1, -1, -1]", style=solid]; +"1952 2757" -> "1958 2758" [label="[1]", style=dashed]; +"1953 QuantizeLinear_2788_1" -> "1954 DequantizeLinear_2788_1" [label="[-1, -1, -1, -1]", style=dashed]; +"1954 DequantizeLinear_2788_1" -> "1955 2753" [label="[-1, -1, -1, -1]", style=solid]; +"1954 DequantizeLinear_2788_1" -> "1959 2759" [label="[-1, -1, -1, -1]", style=solid]; +"1955 2753" -> "1956 2754" [label="[4]", style=dashed]; +"1956 2754" -> "1957 2756" [label="[]", style=dashed]; +"1957 2756" -> "1958 2758" [label="[1]", style=dashed]; +"1958 2758" -> "1959 2759" [label="[2]", style=dashed]; +"1959 2759" -> "1962 2762_MatMul" [label="[]", style=solid]; +"1960 QuantizeLinear_2797_1" -> "1961 DequantizeLinear_2797_1" [label="[12544, 1024]", style=dashed]; +"1961 DequantizeLinear_2797_1" -> "1962 2762_MatMul" [label="[12544, 1024]", style=solid]; +"1962 2762_MatMul" -> "1963 2762_Add" [label="[]", style=solid]; +"1963 2762_Add" -> "1964 2763" [label="[]", style=solid]; +"1964 2763" -> "1965 QuantizeLinear_2800_1" [label="[]", style=solid]; +"1965 QuantizeLinear_2800_1" -> "1966 DequantizeLinear_2800_1" [label="[]", style=dashed]; +"1966 DequantizeLinear_2800_1" -> "1969 2766_MatMul" [label="[]", style=solid]; +"1967 QuantizeLinear_2801_1" -> "1968 DequantizeLinear_2801_1" [label="[1024, 1024]", style=dashed]; +"1968 DequantizeLinear_2801_1" -> "1969 2766_MatMul" [label="[1024, 1024]", style=solid]; +"1969 2766_MatMul" -> "1970 2766_Add" [label="[]", style=solid]; +"1970 2766_Add" -> "1971 2767" [label="[]", style=solid]; +"1971 2767" -> "1972 QuantizeLinear_2804_1" [label="[]", style=solid]; +"1972 QuantizeLinear_2804_1" -> "1973 DequantizeLinear_2804_1" [label="[]", style=dashed]; +"1973 DequantizeLinear_2804_1" -> "1976 2770_MatMul" [label="[]", style=solid]; +"1973 DequantizeLinear_2804_1" -> "2005 2773_MatMul" [label="[]", style=solid]; +"1974 QuantizeLinear_2805_1" -> "1975 DequantizeLinear_2805_1" [label="[1024, 81]", style=dashed]; +"1975 DequantizeLinear_2805_1" -> "1976 2770_MatMul" [label="[1024, 81]", style=solid]; +"1976 2770_MatMul" -> "1977 2770_Add" [label="[]", style=solid]; +"1977 2770_Add" -> "1978 2774" [label="[]", style=solid]; +"1978 2774" -> "1979 2950" [label="[]", style=solid]; +"1978 2774" -> "1984 2955" [label="[]", style=solid]; +"1979 2950" -> "1980 2951" [label="[-1]", style=dashed]; +"1980 2951" -> "1981 2992" [label="[]", style=dashed]; +"1980 2951" -> "1999 2984" [label="[]", style=dashed]; +"1981 2992" -> "1983 2993" [label="[1]", style=dashed]; +"1982 2991" -> "1983 2993" [label="[1]", style=dashed]; +"1983 2993" -> "1985 2994" [label="[2]", style=dashed]; +"1984 2955" -> "1985 2994" [label="[-1]", style=solid]; +"1985 2994" -> "1986 2996" [label="[]", style=solid]; +"1985 2994" -> "1995 6486" [label="[]", style=solid]; +"1985 2994" -> "2107 6442" [label="[]", style=solid]; +"1985 2994" -> "2125 6398" [label="[]", style=solid]; +"1985 2994" -> "2143 6354" [label="[]", style=solid]; +"1985 2994" -> "2161 6310" [label="[]", style=solid]; +"1985 2994" -> "2179 6266" [label="[]", style=solid]; +"1985 2994" -> "2197 6222" [label="[]", style=solid]; +"1985 2994" -> "2215 6178" [label="[]", style=solid]; +"1985 2994" -> "2233 6134" [label="[]", style=solid]; +"1985 2994" -> "2251 6090" [label="[]", style=solid]; +"1985 2994" -> "2269 6046" [label="[]", style=solid]; +"1985 2994" -> "2287 6002" [label="[]", style=solid]; +"1985 2994" -> "2305 5958" [label="[]", style=solid]; +"1985 2994" -> "2323 5914" [label="[]", style=solid]; +"1985 2994" -> "2341 5870" [label="[]", style=solid]; +"1985 2994" -> "2359 5826" [label="[]", style=solid]; +"1985 2994" -> "2377 5782" [label="[]", style=solid]; +"1985 2994" -> "2395 5738" [label="[]", style=solid]; +"1985 2994" -> "2413 5694" [label="[]", style=solid]; +"1985 2994" -> "2431 5650" [label="[]", style=solid]; +"1985 2994" -> "2449 5606" [label="[]", style=solid]; +"1985 2994" -> "2467 5562" [label="[]", style=solid]; +"1985 2994" -> "2485 5518" [label="[]", style=solid]; +"1985 2994" -> "2503 5474" [label="[]", style=solid]; +"1985 2994" -> "2521 5430" [label="[]", style=solid]; +"1985 2994" -> "2539 5386" [label="[]", style=solid]; +"1985 2994" -> "2557 5342" [label="[]", style=solid]; +"1985 2994" -> "2575 5298" [label="[]", style=solid]; +"1985 2994" -> "2593 5254" [label="[]", style=solid]; +"1985 2994" -> "2611 5210" [label="[]", style=solid]; +"1985 2994" -> "2629 5166" [label="[]", style=solid]; +"1985 2994" -> "2647 5122" [label="[]", style=solid]; +"1985 2994" -> "2665 5078" [label="[]", style=solid]; +"1985 2994" -> "2683 5034" [label="[]", style=solid]; +"1985 2994" -> "2701 4990" [label="[]", style=solid]; +"1985 2994" -> "2719 4946" [label="[]", style=solid]; +"1985 2994" -> "2737 4902" [label="[]", style=solid]; +"1985 2994" -> "2755 4858" [label="[]", style=solid]; +"1985 2994" -> "2773 4814" [label="[]", style=solid]; +"1985 2994" -> "2791 4770" [label="[]", style=solid]; +"1985 2994" -> "2809 4726" [label="[]", style=solid]; +"1985 2994" -> "2827 4682" [label="[]", style=solid]; +"1985 2994" -> "2845 4638" [label="[]", style=solid]; +"1985 2994" -> "2863 4594" [label="[]", style=solid]; +"1985 2994" -> "2881 4550" [label="[]", style=solid]; +"1985 2994" -> "2899 4506" [label="[]", style=solid]; +"1985 2994" -> "2917 4462" [label="[]", style=solid]; +"1985 2994" -> "2935 4418" [label="[]", style=solid]; +"1985 2994" -> "2953 4374" [label="[]", style=solid]; +"1985 2994" -> "2971 4330" [label="[]", style=solid]; +"1985 2994" -> "2989 4286" [label="[]", style=solid]; +"1985 2994" -> "3007 4242" [label="[]", style=solid]; +"1985 2994" -> "3025 4198" [label="[]", style=solid]; +"1985 2994" -> "3043 4154" [label="[]", style=solid]; +"1985 2994" -> "3061 4110" [label="[]", style=solid]; +"1985 2994" -> "3079 4066" [label="[]", style=solid]; +"1985 2994" -> "3097 4022" [label="[]", style=solid]; +"1985 2994" -> "3115 3978" [label="[]", style=solid]; +"1985 2994" -> "3133 3934" [label="[]", style=solid]; +"1985 2994" -> "3151 3890" [label="[]", style=solid]; +"1985 2994" -> "3169 3846" [label="[]", style=solid]; +"1985 2994" -> "3187 3802" [label="[]", style=solid]; +"1985 2994" -> "3205 3758" [label="[]", style=solid]; +"1985 2994" -> "3223 3714" [label="[]", style=solid]; +"1985 2994" -> "3241 3670" [label="[]", style=solid]; +"1985 2994" -> "3259 3626" [label="[]", style=solid]; +"1985 2994" -> "3277 3582" [label="[]", style=solid]; +"1985 2994" -> "3295 3538" [label="[]", style=solid]; +"1985 2994" -> "3313 3494" [label="[]", style=solid]; +"1985 2994" -> "3331 3450" [label="[]", style=solid]; +"1985 2994" -> "3349 3406" [label="[]", style=solid]; +"1985 2994" -> "3367 3362" [label="[]", style=solid]; +"1985 2994" -> "3385 3318" [label="[]", style=solid]; +"1985 2994" -> "3403 3274" [label="[]", style=solid]; +"1985 2994" -> "3421 3230" [label="[]", style=solid]; +"1985 2994" -> "3439 3186" [label="[]", style=solid]; +"1985 2994" -> "3457 3142" [label="[]", style=solid]; +"1985 2994" -> "3475 3098" [label="[]", style=solid]; +"1985 2994" -> "3493 3054" [label="[]", style=solid]; +"1985 2994" -> "3511 3010" [label="[]", style=solid]; +"1986 2996" -> "1987 2997" [label="[]", style=dashed]; +"1987 2997" -> "1988 6478" [label="[]", style=dashed]; +"1987 2997" -> "2100 6434" [label="[]", style=dashed]; +"1987 2997" -> "2118 6390" [label="[]", style=dashed]; +"1987 2997" -> "2136 6346" [label="[]", style=dashed]; +"1987 2997" -> "2154 6302" [label="[]", style=dashed]; +"1987 2997" -> "2172 6258" [label="[]", style=dashed]; +"1987 2997" -> "2190 6214" [label="[]", style=dashed]; +"1987 2997" -> "2208 6170" [label="[]", style=dashed]; +"1987 2997" -> "2226 6126" [label="[]", style=dashed]; +"1987 2997" -> "2244 6082" [label="[]", style=dashed]; +"1987 2997" -> "2262 6038" [label="[]", style=dashed]; +"1987 2997" -> "2280 5994" [label="[]", style=dashed]; +"1987 2997" -> "2298 5950" [label="[]", style=dashed]; +"1987 2997" -> "2316 5906" [label="[]", style=dashed]; +"1987 2997" -> "2334 5862" [label="[]", style=dashed]; +"1987 2997" -> "2352 5818" [label="[]", style=dashed]; +"1987 2997" -> "2370 5774" [label="[]", style=dashed]; +"1987 2997" -> "2388 5730" [label="[]", style=dashed]; +"1987 2997" -> "2406 5686" [label="[]", style=dashed]; +"1987 2997" -> "2424 5642" [label="[]", style=dashed]; +"1987 2997" -> "2442 5598" [label="[]", style=dashed]; +"1987 2997" -> "2460 5554" [label="[]", style=dashed]; +"1987 2997" -> "2478 5510" [label="[]", style=dashed]; +"1987 2997" -> "2496 5466" [label="[]", style=dashed]; +"1987 2997" -> "2514 5422" [label="[]", style=dashed]; +"1987 2997" -> "2532 5378" [label="[]", style=dashed]; +"1987 2997" -> "2550 5334" [label="[]", style=dashed]; +"1987 2997" -> "2568 5290" [label="[]", style=dashed]; +"1987 2997" -> "2586 5246" [label="[]", style=dashed]; +"1987 2997" -> "2604 5202" [label="[]", style=dashed]; +"1987 2997" -> "2622 5158" [label="[]", style=dashed]; +"1987 2997" -> "2640 5114" [label="[]", style=dashed]; +"1987 2997" -> "2658 5070" [label="[]", style=dashed]; +"1987 2997" -> "2676 5026" [label="[]", style=dashed]; +"1987 2997" -> "2694 4982" [label="[]", style=dashed]; +"1987 2997" -> "2712 4938" [label="[]", style=dashed]; +"1987 2997" -> "2730 4894" [label="[]", style=dashed]; +"1987 2997" -> "2748 4850" [label="[]", style=dashed]; +"1987 2997" -> "2766 4806" [label="[]", style=dashed]; +"1987 2997" -> "2784 4762" [label="[]", style=dashed]; +"1987 2997" -> "2802 4718" [label="[]", style=dashed]; +"1987 2997" -> "2820 4674" [label="[]", style=dashed]; +"1987 2997" -> "2838 4630" [label="[]", style=dashed]; +"1987 2997" -> "2856 4586" [label="[]", style=dashed]; +"1987 2997" -> "2874 4542" [label="[]", style=dashed]; +"1987 2997" -> "2892 4498" [label="[]", style=dashed]; +"1987 2997" -> "2910 4454" [label="[]", style=dashed]; +"1987 2997" -> "2928 4410" [label="[]", style=dashed]; +"1987 2997" -> "2946 4366" [label="[]", style=dashed]; +"1987 2997" -> "2964 4322" [label="[]", style=dashed]; +"1987 2997" -> "2982 4278" [label="[]", style=dashed]; +"1987 2997" -> "3000 4234" [label="[]", style=dashed]; +"1987 2997" -> "3018 4190" [label="[]", style=dashed]; +"1987 2997" -> "3036 4146" [label="[]", style=dashed]; +"1987 2997" -> "3054 4102" [label="[]", style=dashed]; +"1987 2997" -> "3072 4058" [label="[]", style=dashed]; +"1987 2997" -> "3090 4014" [label="[]", style=dashed]; +"1987 2997" -> "3108 3970" [label="[]", style=dashed]; +"1987 2997" -> "3126 3926" [label="[]", style=dashed]; +"1987 2997" -> "3144 3882" [label="[]", style=dashed]; +"1987 2997" -> "3162 3838" [label="[]", style=dashed]; +"1987 2997" -> "3180 3794" [label="[]", style=dashed]; +"1987 2997" -> "3198 3750" [label="[]", style=dashed]; +"1987 2997" -> "3216 3706" [label="[]", style=dashed]; +"1987 2997" -> "3234 3662" [label="[]", style=dashed]; +"1987 2997" -> "3252 3618" [label="[]", style=dashed]; +"1987 2997" -> "3270 3574" [label="[]", style=dashed]; +"1987 2997" -> "3288 3530" [label="[]", style=dashed]; +"1987 2997" -> "3306 3486" [label="[]", style=dashed]; +"1987 2997" -> "3324 3442" [label="[]", style=dashed]; +"1987 2997" -> "3342 3398" [label="[]", style=dashed]; +"1987 2997" -> "3360 3354" [label="[]", style=dashed]; +"1987 2997" -> "3378 3310" [label="[]", style=dashed]; +"1987 2997" -> "3396 3266" [label="[]", style=dashed]; +"1987 2997" -> "3414 3222" [label="[]", style=dashed]; +"1987 2997" -> "3432 3178" [label="[]", style=dashed]; +"1987 2997" -> "3450 3134" [label="[]", style=dashed]; +"1987 2997" -> "3468 3090" [label="[]", style=dashed]; +"1987 2997" -> "3486 3046" [label="[]", style=dashed]; +"1987 2997" -> "3504 3002" [label="[]", style=dashed]; +"1988 6478" -> "1989 6480" [label="[]", style=dashed]; +"1989 6480" -> "1990 6481" [label="[]", style=dashed]; +"1990 6481" -> "1991 6482" [label="[]", style=solid]; +"1991 6482" -> "1992 6483" [label="[-1, -1]", style=dashed]; +"1992 6483" -> "1993 6484" [label="[-1, -1]", style=dashed]; +"1993 6484" -> "1994 6487" [label="[-1]", style=dashed]; +"1993 6484" -> "2094 6495" [label="[-1]", style=dashed]; +"1994 6487" -> "1996 6488" [label="[-1]", style=dashed]; +"1995 6486" -> "1996 6488" [label="[]", style=solid]; +"1996 6488" -> "1997 6497" [label="[]", style=solid]; +"1996 6488" -> "2099 6508" [label="[]", style=solid]; +"1997 6497" -> "1998 6498" [label="[]", style=solid]; +"1998 6498" -> "2096 6501" [label="[]", style=solid]; +"1999 2984" -> "2000 2987" [label="[]", style=dashed]; +"2000 2987" -> "2002 2988" [label="[1]", style=dashed]; +"2001 2986" -> "2002 2988" [label="[1]", style=dashed]; +"2002 2988" -> "2092 2989" [label="[2]", style=dashed]; +"2003 QuantizeLinear_2808_1" -> "2004 DequantizeLinear_2808_1" [label="[1024, 324]", style=dashed]; +"2004 DequantizeLinear_2808_1" -> "2005 2773_MatMul" [label="[1024, 324]", style=solid]; +"2005 2773_MatMul" -> "2006 2773_Add" [label="[]", style=solid]; +"2006 2773_Add" -> "2007 2776" [label="[]", style=solid]; +"2007 2776" -> "2008 2947" [label="[]", style=solid]; +"2007 2776" -> "2019 2872" [label="[]", style=solid]; +"2007 2776" -> "2034 2848" [label="[]", style=solid]; +"2007 2776" -> "2050 2860" [label="[]", style=solid]; +"2007 2776" -> "2065 2836" [label="[]", style=solid]; +"2008 2947" -> "2080 2948" [label="[-1]", style=dashed]; +"2009 2775" -> "2010 2777" [label="[]", style=solid]; +"2010 2777" -> "2011 2806" [label="[]", style=solid]; +"2010 2777" -> "2013 2799" [label="[]", style=solid]; +"2010 2777" -> "2027 2826" [label="[]", style=solid]; +"2010 2777" -> "2042 2789" [label="[]", style=solid]; +"2010 2777" -> "2044 2782" [label="[]", style=solid]; +"2010 2777" -> "2058 2816" [label="[]", style=solid]; +"2011 2806" -> "2012 2808" [label="[]", style=solid]; +"2012 2808" -> "2015 2809" [label="[]", style=solid]; +"2013 2799" -> "2014 2801" [label="[]", style=solid]; +"2014 2801" -> "2015 2809" [label="[]", style=solid]; +"2015 2809" -> "2016 2811" [label="[]", style=solid]; +"2016 2811" -> "2017 2923" [label="[]", style=solid]; +"2016 2811" -> "2026 2830" [label="[]", style=solid]; +"2016 2811" -> "2032 2900" [label="[]", style=solid]; +"2017 2923" -> "2018 2924" [label="[]", style=solid]; +"2018 2924" -> "2024 2925" [label="[]", style=solid]; +"2019 2872" -> "2020 2877" [label="[]", style=solid]; +"2020 2877" -> "2021 2879" [label="[]", style=solid]; +"2021 2879" -> "2022 2881" [label="[]", style=solid]; +"2022 2881" -> "2023 2918" [label="[]", style=solid]; +"2023 2918" -> "2024 2925" [label="[]", style=solid]; +"2024 2925" -> "2025 2938" [label="[]", style=solid]; +"2024 2925" -> "2073 2930" [label="[]", style=solid]; +"2025 2938" -> "2039 2939" [label="[]", style=solid]; +"2026 2830" -> "2029 2831" [label="[]", style=solid]; +"2027 2826" -> "2028 2828" [label="[]", style=solid]; +"2028 2828" -> "2029 2831" [label="[]", style=solid]; +"2029 2831" -> "2030 2907" [label="[]", style=solid]; +"2030 2907" -> "2031 2908" [label="[]", style=solid]; +"2031 2908" -> "2038 2909" [label="[]", style=solid]; +"2032 2900" -> "2033 2901" [label="[]", style=solid]; +"2033 2901" -> "2037 2902" [label="[]", style=solid]; +"2034 2848" -> "2035 2853" [label="[]", style=solid]; +"2035 2853" -> "2036 2855" [label="[]", style=solid]; +"2036 2855" -> "2037 2902" [label="[]", style=solid]; +"2037 2902" -> "2038 2909" [label="[]", style=solid]; +"2038 2909" -> "2039 2939" [label="[]", style=solid]; +"2038 2909" -> "2074 2931" [label="[]", style=solid]; +"2039 2939" -> "2040 2941" [label="[]", style=solid]; +"2040 2941" -> "2041 2945" [label="[]", style=solid]; +"2041 2945" -> "2079 2946" [label="[]", style=solid]; +"2042 2789" -> "2043 2791" [label="[]", style=solid]; +"2043 2791" -> "2046 2792" [label="[]", style=solid]; +"2044 2782" -> "2045 2784" [label="[]", style=solid]; +"2045 2784" -> "2046 2792" [label="[]", style=solid]; +"2046 2792" -> "2047 2794" [label="[]", style=solid]; +"2047 2794" -> "2048 2915" [label="[]", style=solid]; +"2047 2794" -> "2057 2820" [label="[]", style=solid]; +"2047 2794" -> "2063 2886" [label="[]", style=solid]; +"2048 2915" -> "2049 2916" [label="[]", style=solid]; +"2049 2916" -> "2055 2917" [label="[]", style=solid]; +"2050 2860" -> "2051 2865" [label="[]", style=solid]; +"2051 2865" -> "2052 2867" [label="[]", style=solid]; +"2052 2867" -> "2053 2880" [label="[]", style=solid]; +"2053 2880" -> "2054 2910" [label="[]", style=solid]; +"2054 2910" -> "2055 2917" [label="[]", style=solid]; +"2055 2917" -> "2056 2933" [label="[]", style=solid]; +"2055 2917" -> "2076 2927" [label="[]", style=solid]; +"2056 2933" -> "2070 2934" [label="[]", style=solid]; +"2057 2820" -> "2060 2821" [label="[]", style=solid]; +"2058 2816" -> "2059 2818" [label="[]", style=solid]; +"2059 2818" -> "2060 2821" [label="[]", style=solid]; +"2060 2821" -> "2061 2893" [label="[]", style=solid]; +"2061 2893" -> "2062 2894" [label="[]", style=solid]; +"2062 2894" -> "2069 2895" [label="[]", style=solid]; +"2063 2886" -> "2064 2887" [label="[]", style=solid]; +"2064 2887" -> "2068 2888" [label="[]", style=solid]; +"2065 2836" -> "2066 2841" [label="[]", style=solid]; +"2066 2841" -> "2067 2843" [label="[]", style=solid]; +"2067 2843" -> "2068 2888" [label="[]", style=solid]; +"2068 2888" -> "2069 2895" [label="[]", style=solid]; +"2069 2895" -> "2070 2934" [label="[]", style=solid]; +"2069 2895" -> "2077 2928" [label="[]", style=solid]; +"2070 2934" -> "2071 2936" [label="[]", style=solid]; +"2071 2936" -> "2072 2944" [label="[]", style=solid]; +"2072 2944" -> "2079 2946" [label="[]", style=solid]; +"2073 2930" -> "2074 2931" [label="[]", style=solid]; +"2074 2931" -> "2075 2943" [label="[]", style=solid]; +"2075 2943" -> "2079 2946" [label="[]", style=solid]; +"2076 2927" -> "2077 2928" [label="[]", style=solid]; +"2077 2928" -> "2078 2942" [label="[]", style=solid]; +"2078 2942" -> "2079 2946" [label="[]", style=solid]; +"2079 2946" -> "2080 2948" [label="[]", style=solid]; +"2080 2948" -> "2081 2953" [label="[]", style=solid]; +"2081 2953" -> "2082 2971" [label="[-1, 4]", style=solid]; +"2081 2953" -> "2086 2960" [label="[-1, 4]", style=solid]; +"2082 2971" -> "2083 2976" [label="[-1, 4]", style=solid]; +"2083 2976" -> "2084 2977" [label="[-1, 2]", style=solid]; +"2084 2977" -> "2085 2979" [label="[-1, 2]", style=solid]; +"2085 2979" -> "2090 2980" [label="[-1, 2, 1]", style=solid]; +"2086 2960" -> "2087 2965" [label="[-1, 4]", style=solid]; +"2087 2965" -> "2088 2966" [label="[-1, 2]", style=solid]; +"2088 2966" -> "2089 2978" [label="[-1, 2]", style=solid]; +"2089 2978" -> "2090 2980" [label="[-1, 2, 1]", style=solid]; +"2090 2980" -> "2091 2982" [label="[-1, 2, 2]", style=solid]; +"2091 2982" -> "2092 2989" [label="[-1, 4]", style=solid]; +"2092 2989" -> "2093 6493" [label="[]", style=solid]; +"2092 2989" -> "2111 6449" [label="[]", style=solid]; +"2092 2989" -> "2129 6405" [label="[]", style=solid]; +"2092 2989" -> "2147 6361" [label="[]", style=solid]; +"2092 2989" -> "2165 6317" [label="[]", style=solid]; +"2092 2989" -> "2183 6273" [label="[]", style=solid]; +"2092 2989" -> "2201 6229" [label="[]", style=solid]; +"2092 2989" -> "2219 6185" [label="[]", style=solid]; +"2092 2989" -> "2237 6141" [label="[]", style=solid]; +"2092 2989" -> "2255 6097" [label="[]", style=solid]; +"2092 2989" -> "2273 6053" [label="[]", style=solid]; +"2092 2989" -> "2291 6009" [label="[]", style=solid]; +"2092 2989" -> "2309 5965" [label="[]", style=solid]; +"2092 2989" -> "2327 5921" [label="[]", style=solid]; +"2092 2989" -> "2345 5877" [label="[]", style=solid]; +"2092 2989" -> "2363 5833" [label="[]", style=solid]; +"2092 2989" -> "2381 5789" [label="[]", style=solid]; +"2092 2989" -> "2399 5745" [label="[]", style=solid]; +"2092 2989" -> "2417 5701" [label="[]", style=solid]; +"2092 2989" -> "2435 5657" [label="[]", style=solid]; +"2092 2989" -> "2453 5613" [label="[]", style=solid]; +"2092 2989" -> "2471 5569" [label="[]", style=solid]; +"2092 2989" -> "2489 5525" [label="[]", style=solid]; +"2092 2989" -> "2507 5481" [label="[]", style=solid]; +"2092 2989" -> "2525 5437" [label="[]", style=solid]; +"2092 2989" -> "2543 5393" [label="[]", style=solid]; +"2092 2989" -> "2561 5349" [label="[]", style=solid]; +"2092 2989" -> "2579 5305" [label="[]", style=solid]; +"2092 2989" -> "2597 5261" [label="[]", style=solid]; +"2092 2989" -> "2615 5217" [label="[]", style=solid]; +"2092 2989" -> "2633 5173" [label="[]", style=solid]; +"2092 2989" -> "2651 5129" [label="[]", style=solid]; +"2092 2989" -> "2669 5085" [label="[]", style=solid]; +"2092 2989" -> "2687 5041" [label="[]", style=solid]; +"2092 2989" -> "2705 4997" [label="[]", style=solid]; +"2092 2989" -> "2723 4953" [label="[]", style=solid]; +"2092 2989" -> "2741 4909" [label="[]", style=solid]; +"2092 2989" -> "2759 4865" [label="[]", style=solid]; +"2092 2989" -> "2777 4821" [label="[]", style=solid]; +"2092 2989" -> "2795 4777" [label="[]", style=solid]; +"2092 2989" -> "2813 4733" [label="[]", style=solid]; +"2092 2989" -> "2831 4689" [label="[]", style=solid]; +"2092 2989" -> "2849 4645" [label="[]", style=solid]; +"2092 2989" -> "2867 4601" [label="[]", style=solid]; +"2092 2989" -> "2885 4557" [label="[]", style=solid]; +"2092 2989" -> "2903 4513" [label="[]", style=solid]; +"2092 2989" -> "2921 4469" [label="[]", style=solid]; +"2092 2989" -> "2939 4425" [label="[]", style=solid]; +"2092 2989" -> "2957 4381" [label="[]", style=solid]; +"2092 2989" -> "2975 4337" [label="[]", style=solid]; +"2092 2989" -> "2993 4293" [label="[]", style=solid]; +"2092 2989" -> "3011 4249" [label="[]", style=solid]; +"2092 2989" -> "3029 4205" [label="[]", style=solid]; +"2092 2989" -> "3047 4161" [label="[]", style=solid]; +"2092 2989" -> "3065 4117" [label="[]", style=solid]; +"2092 2989" -> "3083 4073" [label="[]", style=solid]; +"2092 2989" -> "3101 4029" [label="[]", style=solid]; +"2092 2989" -> "3119 3985" [label="[]", style=solid]; +"2092 2989" -> "3137 3941" [label="[]", style=solid]; +"2092 2989" -> "3155 3897" [label="[]", style=solid]; +"2092 2989" -> "3173 3853" [label="[]", style=solid]; +"2092 2989" -> "3191 3809" [label="[]", style=solid]; +"2092 2989" -> "3209 3765" [label="[]", style=solid]; +"2092 2989" -> "3227 3721" [label="[]", style=solid]; +"2092 2989" -> "3245 3677" [label="[]", style=solid]; +"2092 2989" -> "3263 3633" [label="[]", style=solid]; +"2092 2989" -> "3281 3589" [label="[]", style=solid]; +"2092 2989" -> "3299 3545" [label="[]", style=solid]; +"2092 2989" -> "3317 3501" [label="[]", style=solid]; +"2092 2989" -> "3335 3457" [label="[]", style=solid]; +"2092 2989" -> "3353 3413" [label="[]", style=solid]; +"2092 2989" -> "3371 3369" [label="[]", style=solid]; +"2092 2989" -> "3389 3325" [label="[]", style=solid]; +"2092 2989" -> "3407 3281" [label="[]", style=solid]; +"2092 2989" -> "3425 3237" [label="[]", style=solid]; +"2092 2989" -> "3443 3193" [label="[]", style=solid]; +"2092 2989" -> "3461 3149" [label="[]", style=solid]; +"2092 2989" -> "3479 3105" [label="[]", style=solid]; +"2092 2989" -> "3497 3061" [label="[]", style=solid]; +"2092 2989" -> "3515 3017" [label="[]", style=solid]; +"2093 6493" -> "2094 6495" [label="[]", style=solid]; +"2094 6495" -> "2095 6496" [label="[]", style=solid]; +"2094 6495" -> "3532 6506" [label="[]", style=solid]; +"2095 6496" -> "2096 6501" [label="[]", style=solid]; +"2096 6501" -> "2097 6503" [label="[-1, 3]", style=dashed]; +"2097 6503" -> "2098 6504" [label="[-1, 1]", style=dashed]; +"2098 6504" -> "2099 6508" [label="[-1]", style=dashed]; +"2098 6504" -> "3531 6505" [label="[-1]", style=dashed]; +"2099 6508" -> "3522 6520" [label="[]", style=solid]; +"2100 6434" -> "2101 6436" [label="[]", style=dashed]; +"2101 6436" -> "2102 6437" [label="[]", style=dashed]; +"2102 6437" -> "2103 6438" [label="[]", style=solid]; +"2103 6438" -> "2104 6439" [label="[-1, -1]", style=dashed]; +"2104 6439" -> "2105 6440" [label="[-1, -1]", style=dashed]; +"2105 6440" -> "2106 6443" [label="[-1]", style=dashed]; +"2105 6440" -> "2112 6451" [label="[-1]", style=dashed]; +"2106 6443" -> "2108 6444" [label="[-1]", style=dashed]; +"2107 6442" -> "2108 6444" [label="[]", style=solid]; +"2108 6444" -> "2109 6453" [label="[]", style=solid]; +"2108 6444" -> "2117 6464" [label="[]", style=solid]; +"2109 6453" -> "2110 6454" [label="[]", style=solid]; +"2110 6454" -> "2114 6457" [label="[]", style=solid]; +"2111 6449" -> "2112 6451" [label="[]", style=solid]; +"2112 6451" -> "2113 6452" [label="[]", style=solid]; +"2112 6451" -> "3534 6462" [label="[]", style=solid]; +"2113 6452" -> "2114 6457" [label="[]", style=solid]; +"2114 6457" -> "2115 6459" [label="[-1, 3]", style=dashed]; +"2115 6459" -> "2116 6460" [label="[-1, 1]", style=dashed]; +"2116 6460" -> "2117 6464" [label="[-1]", style=dashed]; +"2116 6460" -> "3533 6461" [label="[-1]", style=dashed]; +"2117 6464" -> "3522 6520" [label="[]", style=solid]; +"2118 6390" -> "2119 6392" [label="[]", style=dashed]; +"2119 6392" -> "2120 6393" [label="[]", style=dashed]; +"2120 6393" -> "2121 6394" [label="[]", style=solid]; +"2121 6394" -> "2122 6395" [label="[-1, -1]", style=dashed]; +"2122 6395" -> "2123 6396" [label="[-1, -1]", style=dashed]; +"2123 6396" -> "2124 6399" [label="[-1]", style=dashed]; +"2123 6396" -> "2130 6407" [label="[-1]", style=dashed]; +"2124 6399" -> "2126 6400" [label="[-1]", style=dashed]; +"2125 6398" -> "2126 6400" [label="[]", style=solid]; +"2126 6400" -> "2127 6409" [label="[]", style=solid]; +"2126 6400" -> "2135 6420" [label="[]", style=solid]; +"2127 6409" -> "2128 6410" [label="[]", style=solid]; +"2128 6410" -> "2132 6413" [label="[]", style=solid]; +"2129 6405" -> "2130 6407" [label="[]", style=solid]; +"2130 6407" -> "2131 6408" [label="[]", style=solid]; +"2130 6407" -> "3536 6418" [label="[]", style=solid]; +"2131 6408" -> "2132 6413" [label="[]", style=solid]; +"2132 6413" -> "2133 6415" [label="[-1, 3]", style=dashed]; +"2133 6415" -> "2134 6416" [label="[-1, 1]", style=dashed]; +"2134 6416" -> "2135 6420" [label="[-1]", style=dashed]; +"2134 6416" -> "3535 6417" [label="[-1]", style=dashed]; +"2135 6420" -> "3522 6520" [label="[]", style=solid]; +"2136 6346" -> "2137 6348" [label="[]", style=dashed]; +"2137 6348" -> "2138 6349" [label="[]", style=dashed]; +"2138 6349" -> "2139 6350" [label="[]", style=solid]; +"2139 6350" -> "2140 6351" [label="[-1, -1]", style=dashed]; +"2140 6351" -> "2141 6352" [label="[-1, -1]", style=dashed]; +"2141 6352" -> "2142 6355" [label="[-1]", style=dashed]; +"2141 6352" -> "2148 6363" [label="[-1]", style=dashed]; +"2142 6355" -> "2144 6356" [label="[-1]", style=dashed]; +"2143 6354" -> "2144 6356" [label="[]", style=solid]; +"2144 6356" -> "2145 6365" [label="[]", style=solid]; +"2144 6356" -> "2153 6376" [label="[]", style=solid]; +"2145 6365" -> "2146 6366" [label="[]", style=solid]; +"2146 6366" -> "2150 6369" [label="[]", style=solid]; +"2147 6361" -> "2148 6363" [label="[]", style=solid]; +"2148 6363" -> "2149 6364" [label="[]", style=solid]; +"2148 6363" -> "3538 6374" [label="[]", style=solid]; +"2149 6364" -> "2150 6369" [label="[]", style=solid]; +"2150 6369" -> "2151 6371" [label="[-1, 3]", style=dashed]; +"2151 6371" -> "2152 6372" [label="[-1, 1]", style=dashed]; +"2152 6372" -> "2153 6376" [label="[-1]", style=dashed]; +"2152 6372" -> "3537 6373" [label="[-1]", style=dashed]; +"2153 6376" -> "3522 6520" [label="[]", style=solid]; +"2154 6302" -> "2155 6304" [label="[]", style=dashed]; +"2155 6304" -> "2156 6305" [label="[]", style=dashed]; +"2156 6305" -> "2157 6306" [label="[]", style=solid]; +"2157 6306" -> "2158 6307" [label="[-1, -1]", style=dashed]; +"2158 6307" -> "2159 6308" [label="[-1, -1]", style=dashed]; +"2159 6308" -> "2160 6311" [label="[-1]", style=dashed]; +"2159 6308" -> "2166 6319" [label="[-1]", style=dashed]; +"2160 6311" -> "2162 6312" [label="[-1]", style=dashed]; +"2161 6310" -> "2162 6312" [label="[]", style=solid]; +"2162 6312" -> "2163 6321" [label="[]", style=solid]; +"2162 6312" -> "2171 6332" [label="[]", style=solid]; +"2163 6321" -> "2164 6322" [label="[]", style=solid]; +"2164 6322" -> "2168 6325" [label="[]", style=solid]; +"2165 6317" -> "2166 6319" [label="[]", style=solid]; +"2166 6319" -> "2167 6320" [label="[]", style=solid]; +"2166 6319" -> "3540 6330" [label="[]", style=solid]; +"2167 6320" -> "2168 6325" [label="[]", style=solid]; +"2168 6325" -> "2169 6327" [label="[-1, 3]", style=dashed]; +"2169 6327" -> "2170 6328" [label="[-1, 1]", style=dashed]; +"2170 6328" -> "2171 6332" [label="[-1]", style=dashed]; +"2170 6328" -> "3539 6329" [label="[-1]", style=dashed]; +"2171 6332" -> "3522 6520" [label="[]", style=solid]; +"2172 6258" -> "2173 6260" [label="[]", style=dashed]; +"2173 6260" -> "2174 6261" [label="[]", style=dashed]; +"2174 6261" -> "2175 6262" [label="[]", style=solid]; +"2175 6262" -> "2176 6263" [label="[-1, -1]", style=dashed]; +"2176 6263" -> "2177 6264" [label="[-1, -1]", style=dashed]; +"2177 6264" -> "2178 6267" [label="[-1]", style=dashed]; +"2177 6264" -> "2184 6275" [label="[-1]", style=dashed]; +"2178 6267" -> "2180 6268" [label="[-1]", style=dashed]; +"2179 6266" -> "2180 6268" [label="[]", style=solid]; +"2180 6268" -> "2181 6277" [label="[]", style=solid]; +"2180 6268" -> "2189 6288" [label="[]", style=solid]; +"2181 6277" -> "2182 6278" [label="[]", style=solid]; +"2182 6278" -> "2186 6281" [label="[]", style=solid]; +"2183 6273" -> "2184 6275" [label="[]", style=solid]; +"2184 6275" -> "2185 6276" [label="[]", style=solid]; +"2184 6275" -> "3542 6286" [label="[]", style=solid]; +"2185 6276" -> "2186 6281" [label="[]", style=solid]; +"2186 6281" -> "2187 6283" [label="[-1, 3]", style=dashed]; +"2187 6283" -> "2188 6284" [label="[-1, 1]", style=dashed]; +"2188 6284" -> "2189 6288" [label="[-1]", style=dashed]; +"2188 6284" -> "3541 6285" [label="[-1]", style=dashed]; +"2189 6288" -> "3522 6520" [label="[]", style=solid]; +"2190 6214" -> "2191 6216" [label="[]", style=dashed]; +"2191 6216" -> "2192 6217" [label="[]", style=dashed]; +"2192 6217" -> "2193 6218" [label="[]", style=solid]; +"2193 6218" -> "2194 6219" [label="[-1, -1]", style=dashed]; +"2194 6219" -> "2195 6220" [label="[-1, -1]", style=dashed]; +"2195 6220" -> "2196 6223" [label="[-1]", style=dashed]; +"2195 6220" -> "2202 6231" [label="[-1]", style=dashed]; +"2196 6223" -> "2198 6224" [label="[-1]", style=dashed]; +"2197 6222" -> "2198 6224" [label="[]", style=solid]; +"2198 6224" -> "2199 6233" [label="[]", style=solid]; +"2198 6224" -> "2207 6244" [label="[]", style=solid]; +"2199 6233" -> "2200 6234" [label="[]", style=solid]; +"2200 6234" -> "2204 6237" [label="[]", style=solid]; +"2201 6229" -> "2202 6231" [label="[]", style=solid]; +"2202 6231" -> "2203 6232" [label="[]", style=solid]; +"2202 6231" -> "3544 6242" [label="[]", style=solid]; +"2203 6232" -> "2204 6237" [label="[]", style=solid]; +"2204 6237" -> "2205 6239" [label="[-1, 3]", style=dashed]; +"2205 6239" -> "2206 6240" [label="[-1, 1]", style=dashed]; +"2206 6240" -> "2207 6244" [label="[-1]", style=dashed]; +"2206 6240" -> "3543 6241" [label="[-1]", style=dashed]; +"2207 6244" -> "3522 6520" [label="[]", style=solid]; +"2208 6170" -> "2209 6172" [label="[]", style=dashed]; +"2209 6172" -> "2210 6173" [label="[]", style=dashed]; +"2210 6173" -> "2211 6174" [label="[]", style=solid]; +"2211 6174" -> "2212 6175" [label="[-1, -1]", style=dashed]; +"2212 6175" -> "2213 6176" [label="[-1, -1]", style=dashed]; +"2213 6176" -> "2214 6179" [label="[-1]", style=dashed]; +"2213 6176" -> "2220 6187" [label="[-1]", style=dashed]; +"2214 6179" -> "2216 6180" [label="[-1]", style=dashed]; +"2215 6178" -> "2216 6180" [label="[]", style=solid]; +"2216 6180" -> "2217 6189" [label="[]", style=solid]; +"2216 6180" -> "2225 6200" [label="[]", style=solid]; +"2217 6189" -> "2218 6190" [label="[]", style=solid]; +"2218 6190" -> "2222 6193" [label="[]", style=solid]; +"2219 6185" -> "2220 6187" [label="[]", style=solid]; +"2220 6187" -> "2221 6188" [label="[]", style=solid]; +"2220 6187" -> "3546 6198" [label="[]", style=solid]; +"2221 6188" -> "2222 6193" [label="[]", style=solid]; +"2222 6193" -> "2223 6195" [label="[-1, 3]", style=dashed]; +"2223 6195" -> "2224 6196" [label="[-1, 1]", style=dashed]; +"2224 6196" -> "2225 6200" [label="[-1]", style=dashed]; +"2224 6196" -> "3545 6197" [label="[-1]", style=dashed]; +"2225 6200" -> "3522 6520" [label="[]", style=solid]; +"2226 6126" -> "2227 6128" [label="[]", style=dashed]; +"2227 6128" -> "2228 6129" [label="[]", style=dashed]; +"2228 6129" -> "2229 6130" [label="[]", style=solid]; +"2229 6130" -> "2230 6131" [label="[-1, -1]", style=dashed]; +"2230 6131" -> "2231 6132" [label="[-1, -1]", style=dashed]; +"2231 6132" -> "2232 6135" [label="[-1]", style=dashed]; +"2231 6132" -> "2238 6143" [label="[-1]", style=dashed]; +"2232 6135" -> "2234 6136" [label="[-1]", style=dashed]; +"2233 6134" -> "2234 6136" [label="[]", style=solid]; +"2234 6136" -> "2235 6145" [label="[]", style=solid]; +"2234 6136" -> "2243 6156" [label="[]", style=solid]; +"2235 6145" -> "2236 6146" [label="[]", style=solid]; +"2236 6146" -> "2240 6149" [label="[]", style=solid]; +"2237 6141" -> "2238 6143" [label="[]", style=solid]; +"2238 6143" -> "2239 6144" [label="[]", style=solid]; +"2238 6143" -> "3548 6154" [label="[]", style=solid]; +"2239 6144" -> "2240 6149" [label="[]", style=solid]; +"2240 6149" -> "2241 6151" [label="[-1, 3]", style=dashed]; +"2241 6151" -> "2242 6152" [label="[-1, 1]", style=dashed]; +"2242 6152" -> "2243 6156" [label="[-1]", style=dashed]; +"2242 6152" -> "3547 6153" [label="[-1]", style=dashed]; +"2243 6156" -> "3522 6520" [label="[]", style=solid]; +"2244 6082" -> "2245 6084" [label="[]", style=dashed]; +"2245 6084" -> "2246 6085" [label="[]", style=dashed]; +"2246 6085" -> "2247 6086" [label="[]", style=solid]; +"2247 6086" -> "2248 6087" [label="[-1, -1]", style=dashed]; +"2248 6087" -> "2249 6088" [label="[-1, -1]", style=dashed]; +"2249 6088" -> "2250 6091" [label="[-1]", style=dashed]; +"2249 6088" -> "2256 6099" [label="[-1]", style=dashed]; +"2250 6091" -> "2252 6092" [label="[-1]", style=dashed]; +"2251 6090" -> "2252 6092" [label="[]", style=solid]; +"2252 6092" -> "2253 6101" [label="[]", style=solid]; +"2252 6092" -> "2261 6112" [label="[]", style=solid]; +"2253 6101" -> "2254 6102" [label="[]", style=solid]; +"2254 6102" -> "2258 6105" [label="[]", style=solid]; +"2255 6097" -> "2256 6099" [label="[]", style=solid]; +"2256 6099" -> "2257 6100" [label="[]", style=solid]; +"2256 6099" -> "3550 6110" [label="[]", style=solid]; +"2257 6100" -> "2258 6105" [label="[]", style=solid]; +"2258 6105" -> "2259 6107" [label="[-1, 3]", style=dashed]; +"2259 6107" -> "2260 6108" [label="[-1, 1]", style=dashed]; +"2260 6108" -> "2261 6112" [label="[-1]", style=dashed]; +"2260 6108" -> "3549 6109" [label="[-1]", style=dashed]; +"2261 6112" -> "3522 6520" [label="[]", style=solid]; +"2262 6038" -> "2263 6040" [label="[]", style=dashed]; +"2263 6040" -> "2264 6041" [label="[]", style=dashed]; +"2264 6041" -> "2265 6042" [label="[]", style=solid]; +"2265 6042" -> "2266 6043" [label="[-1, -1]", style=dashed]; +"2266 6043" -> "2267 6044" [label="[-1, -1]", style=dashed]; +"2267 6044" -> "2268 6047" [label="[-1]", style=dashed]; +"2267 6044" -> "2274 6055" [label="[-1]", style=dashed]; +"2268 6047" -> "2270 6048" [label="[-1]", style=dashed]; +"2269 6046" -> "2270 6048" [label="[]", style=solid]; +"2270 6048" -> "2271 6057" [label="[]", style=solid]; +"2270 6048" -> "2279 6068" [label="[]", style=solid]; +"2271 6057" -> "2272 6058" [label="[]", style=solid]; +"2272 6058" -> "2276 6061" [label="[]", style=solid]; +"2273 6053" -> "2274 6055" [label="[]", style=solid]; +"2274 6055" -> "2275 6056" [label="[]", style=solid]; +"2274 6055" -> "3552 6066" [label="[]", style=solid]; +"2275 6056" -> "2276 6061" [label="[]", style=solid]; +"2276 6061" -> "2277 6063" [label="[-1, 3]", style=dashed]; +"2277 6063" -> "2278 6064" [label="[-1, 1]", style=dashed]; +"2278 6064" -> "2279 6068" [label="[-1]", style=dashed]; +"2278 6064" -> "3551 6065" [label="[-1]", style=dashed]; +"2279 6068" -> "3522 6520" [label="[]", style=solid]; +"2280 5994" -> "2281 5996" [label="[]", style=dashed]; +"2281 5996" -> "2282 5997" [label="[]", style=dashed]; +"2282 5997" -> "2283 5998" [label="[]", style=solid]; +"2283 5998" -> "2284 5999" [label="[-1, -1]", style=dashed]; +"2284 5999" -> "2285 6000" [label="[-1, -1]", style=dashed]; +"2285 6000" -> "2286 6003" [label="[-1]", style=dashed]; +"2285 6000" -> "2292 6011" [label="[-1]", style=dashed]; +"2286 6003" -> "2288 6004" [label="[-1]", style=dashed]; +"2287 6002" -> "2288 6004" [label="[]", style=solid]; +"2288 6004" -> "2289 6013" [label="[]", style=solid]; +"2288 6004" -> "2297 6024" [label="[]", style=solid]; +"2289 6013" -> "2290 6014" [label="[]", style=solid]; +"2290 6014" -> "2294 6017" [label="[]", style=solid]; +"2291 6009" -> "2292 6011" [label="[]", style=solid]; +"2292 6011" -> "2293 6012" [label="[]", style=solid]; +"2292 6011" -> "3554 6022" [label="[]", style=solid]; +"2293 6012" -> "2294 6017" [label="[]", style=solid]; +"2294 6017" -> "2295 6019" [label="[-1, 3]", style=dashed]; +"2295 6019" -> "2296 6020" [label="[-1, 1]", style=dashed]; +"2296 6020" -> "2297 6024" [label="[-1]", style=dashed]; +"2296 6020" -> "3553 6021" [label="[-1]", style=dashed]; +"2297 6024" -> "3522 6520" [label="[]", style=solid]; +"2298 5950" -> "2299 5952" [label="[]", style=dashed]; +"2299 5952" -> "2300 5953" [label="[]", style=dashed]; +"2300 5953" -> "2301 5954" [label="[]", style=solid]; +"2301 5954" -> "2302 5955" [label="[-1, -1]", style=dashed]; +"2302 5955" -> "2303 5956" [label="[-1, -1]", style=dashed]; +"2303 5956" -> "2304 5959" [label="[-1]", style=dashed]; +"2303 5956" -> "2310 5967" [label="[-1]", style=dashed]; +"2304 5959" -> "2306 5960" [label="[-1]", style=dashed]; +"2305 5958" -> "2306 5960" [label="[]", style=solid]; +"2306 5960" -> "2307 5969" [label="[]", style=solid]; +"2306 5960" -> "2315 5980" [label="[]", style=solid]; +"2307 5969" -> "2308 5970" [label="[]", style=solid]; +"2308 5970" -> "2312 5973" [label="[]", style=solid]; +"2309 5965" -> "2310 5967" [label="[]", style=solid]; +"2310 5967" -> "2311 5968" [label="[]", style=solid]; +"2310 5967" -> "3556 5978" [label="[]", style=solid]; +"2311 5968" -> "2312 5973" [label="[]", style=solid]; +"2312 5973" -> "2313 5975" [label="[-1, 3]", style=dashed]; +"2313 5975" -> "2314 5976" [label="[-1, 1]", style=dashed]; +"2314 5976" -> "2315 5980" [label="[-1]", style=dashed]; +"2314 5976" -> "3555 5977" [label="[-1]", style=dashed]; +"2315 5980" -> "3522 6520" [label="[]", style=solid]; +"2316 5906" -> "2317 5908" [label="[]", style=dashed]; +"2317 5908" -> "2318 5909" [label="[]", style=dashed]; +"2318 5909" -> "2319 5910" [label="[]", style=solid]; +"2319 5910" -> "2320 5911" [label="[-1, -1]", style=dashed]; +"2320 5911" -> "2321 5912" [label="[-1, -1]", style=dashed]; +"2321 5912" -> "2322 5915" [label="[-1]", style=dashed]; +"2321 5912" -> "2328 5923" [label="[-1]", style=dashed]; +"2322 5915" -> "2324 5916" [label="[-1]", style=dashed]; +"2323 5914" -> "2324 5916" [label="[]", style=solid]; +"2324 5916" -> "2325 5925" [label="[]", style=solid]; +"2324 5916" -> "2333 5936" [label="[]", style=solid]; +"2325 5925" -> "2326 5926" [label="[]", style=solid]; +"2326 5926" -> "2330 5929" [label="[]", style=solid]; +"2327 5921" -> "2328 5923" [label="[]", style=solid]; +"2328 5923" -> "2329 5924" [label="[]", style=solid]; +"2328 5923" -> "3558 5934" [label="[]", style=solid]; +"2329 5924" -> "2330 5929" [label="[]", style=solid]; +"2330 5929" -> "2331 5931" [label="[-1, 3]", style=dashed]; +"2331 5931" -> "2332 5932" [label="[-1, 1]", style=dashed]; +"2332 5932" -> "2333 5936" [label="[-1]", style=dashed]; +"2332 5932" -> "3557 5933" [label="[-1]", style=dashed]; +"2333 5936" -> "3522 6520" [label="[]", style=solid]; +"2334 5862" -> "2335 5864" [label="[]", style=dashed]; +"2335 5864" -> "2336 5865" [label="[]", style=dashed]; +"2336 5865" -> "2337 5866" [label="[]", style=solid]; +"2337 5866" -> "2338 5867" [label="[-1, -1]", style=dashed]; +"2338 5867" -> "2339 5868" [label="[-1, -1]", style=dashed]; +"2339 5868" -> "2340 5871" [label="[-1]", style=dashed]; +"2339 5868" -> "2346 5879" [label="[-1]", style=dashed]; +"2340 5871" -> "2342 5872" [label="[-1]", style=dashed]; +"2341 5870" -> "2342 5872" [label="[]", style=solid]; +"2342 5872" -> "2343 5881" [label="[]", style=solid]; +"2342 5872" -> "2351 5892" [label="[]", style=solid]; +"2343 5881" -> "2344 5882" [label="[]", style=solid]; +"2344 5882" -> "2348 5885" [label="[]", style=solid]; +"2345 5877" -> "2346 5879" [label="[]", style=solid]; +"2346 5879" -> "2347 5880" [label="[]", style=solid]; +"2346 5879" -> "3560 5890" [label="[]", style=solid]; +"2347 5880" -> "2348 5885" [label="[]", style=solid]; +"2348 5885" -> "2349 5887" [label="[-1, 3]", style=dashed]; +"2349 5887" -> "2350 5888" [label="[-1, 1]", style=dashed]; +"2350 5888" -> "2351 5892" [label="[-1]", style=dashed]; +"2350 5888" -> "3559 5889" [label="[-1]", style=dashed]; +"2351 5892" -> "3522 6520" [label="[]", style=solid]; +"2352 5818" -> "2353 5820" [label="[]", style=dashed]; +"2353 5820" -> "2354 5821" [label="[]", style=dashed]; +"2354 5821" -> "2355 5822" [label="[]", style=solid]; +"2355 5822" -> "2356 5823" [label="[-1, -1]", style=dashed]; +"2356 5823" -> "2357 5824" [label="[-1, -1]", style=dashed]; +"2357 5824" -> "2358 5827" [label="[-1]", style=dashed]; +"2357 5824" -> "2364 5835" [label="[-1]", style=dashed]; +"2358 5827" -> "2360 5828" [label="[-1]", style=dashed]; +"2359 5826" -> "2360 5828" [label="[]", style=solid]; +"2360 5828" -> "2361 5837" [label="[]", style=solid]; +"2360 5828" -> "2369 5848" [label="[]", style=solid]; +"2361 5837" -> "2362 5838" [label="[]", style=solid]; +"2362 5838" -> "2366 5841" [label="[]", style=solid]; +"2363 5833" -> "2364 5835" [label="[]", style=solid]; +"2364 5835" -> "2365 5836" [label="[]", style=solid]; +"2364 5835" -> "3562 5846" [label="[]", style=solid]; +"2365 5836" -> "2366 5841" [label="[]", style=solid]; +"2366 5841" -> "2367 5843" [label="[-1, 3]", style=dashed]; +"2367 5843" -> "2368 5844" [label="[-1, 1]", style=dashed]; +"2368 5844" -> "2369 5848" [label="[-1]", style=dashed]; +"2368 5844" -> "3561 5845" [label="[-1]", style=dashed]; +"2369 5848" -> "3522 6520" [label="[]", style=solid]; +"2370 5774" -> "2371 5776" [label="[]", style=dashed]; +"2371 5776" -> "2372 5777" [label="[]", style=dashed]; +"2372 5777" -> "2373 5778" [label="[]", style=solid]; +"2373 5778" -> "2374 5779" [label="[-1, -1]", style=dashed]; +"2374 5779" -> "2375 5780" [label="[-1, -1]", style=dashed]; +"2375 5780" -> "2376 5783" [label="[-1]", style=dashed]; +"2375 5780" -> "2382 5791" [label="[-1]", style=dashed]; +"2376 5783" -> "2378 5784" [label="[-1]", style=dashed]; +"2377 5782" -> "2378 5784" [label="[]", style=solid]; +"2378 5784" -> "2379 5793" [label="[]", style=solid]; +"2378 5784" -> "2387 5804" [label="[]", style=solid]; +"2379 5793" -> "2380 5794" [label="[]", style=solid]; +"2380 5794" -> "2384 5797" [label="[]", style=solid]; +"2381 5789" -> "2382 5791" [label="[]", style=solid]; +"2382 5791" -> "2383 5792" [label="[]", style=solid]; +"2382 5791" -> "3564 5802" [label="[]", style=solid]; +"2383 5792" -> "2384 5797" [label="[]", style=solid]; +"2384 5797" -> "2385 5799" [label="[-1, 3]", style=dashed]; +"2385 5799" -> "2386 5800" [label="[-1, 1]", style=dashed]; +"2386 5800" -> "2387 5804" [label="[-1]", style=dashed]; +"2386 5800" -> "3563 5801" [label="[-1]", style=dashed]; +"2387 5804" -> "3522 6520" [label="[]", style=solid]; +"2388 5730" -> "2389 5732" [label="[]", style=dashed]; +"2389 5732" -> "2390 5733" [label="[]", style=dashed]; +"2390 5733" -> "2391 5734" [label="[]", style=solid]; +"2391 5734" -> "2392 5735" [label="[-1, -1]", style=dashed]; +"2392 5735" -> "2393 5736" [label="[-1, -1]", style=dashed]; +"2393 5736" -> "2394 5739" [label="[-1]", style=dashed]; +"2393 5736" -> "2400 5747" [label="[-1]", style=dashed]; +"2394 5739" -> "2396 5740" [label="[-1]", style=dashed]; +"2395 5738" -> "2396 5740" [label="[]", style=solid]; +"2396 5740" -> "2397 5749" [label="[]", style=solid]; +"2396 5740" -> "2405 5760" [label="[]", style=solid]; +"2397 5749" -> "2398 5750" [label="[]", style=solid]; +"2398 5750" -> "2402 5753" [label="[]", style=solid]; +"2399 5745" -> "2400 5747" [label="[]", style=solid]; +"2400 5747" -> "2401 5748" [label="[]", style=solid]; +"2400 5747" -> "3566 5758" [label="[]", style=solid]; +"2401 5748" -> "2402 5753" [label="[]", style=solid]; +"2402 5753" -> "2403 5755" [label="[-1, 3]", style=dashed]; +"2403 5755" -> "2404 5756" [label="[-1, 1]", style=dashed]; +"2404 5756" -> "2405 5760" [label="[-1]", style=dashed]; +"2404 5756" -> "3565 5757" [label="[-1]", style=dashed]; +"2405 5760" -> "3522 6520" [label="[]", style=solid]; +"2406 5686" -> "2407 5688" [label="[]", style=dashed]; +"2407 5688" -> "2408 5689" [label="[]", style=dashed]; +"2408 5689" -> "2409 5690" [label="[]", style=solid]; +"2409 5690" -> "2410 5691" [label="[-1, -1]", style=dashed]; +"2410 5691" -> "2411 5692" [label="[-1, -1]", style=dashed]; +"2411 5692" -> "2412 5695" [label="[-1]", style=dashed]; +"2411 5692" -> "2418 5703" [label="[-1]", style=dashed]; +"2412 5695" -> "2414 5696" [label="[-1]", style=dashed]; +"2413 5694" -> "2414 5696" [label="[]", style=solid]; +"2414 5696" -> "2415 5705" [label="[]", style=solid]; +"2414 5696" -> "2423 5716" [label="[]", style=solid]; +"2415 5705" -> "2416 5706" [label="[]", style=solid]; +"2416 5706" -> "2420 5709" [label="[]", style=solid]; +"2417 5701" -> "2418 5703" [label="[]", style=solid]; +"2418 5703" -> "2419 5704" [label="[]", style=solid]; +"2418 5703" -> "3568 5714" [label="[]", style=solid]; +"2419 5704" -> "2420 5709" [label="[]", style=solid]; +"2420 5709" -> "2421 5711" [label="[-1, 3]", style=dashed]; +"2421 5711" -> "2422 5712" [label="[-1, 1]", style=dashed]; +"2422 5712" -> "2423 5716" [label="[-1]", style=dashed]; +"2422 5712" -> "3567 5713" [label="[-1]", style=dashed]; +"2423 5716" -> "3522 6520" [label="[]", style=solid]; +"2424 5642" -> "2425 5644" [label="[]", style=dashed]; +"2425 5644" -> "2426 5645" [label="[]", style=dashed]; +"2426 5645" -> "2427 5646" [label="[]", style=solid]; +"2427 5646" -> "2428 5647" [label="[-1, -1]", style=dashed]; +"2428 5647" -> "2429 5648" [label="[-1, -1]", style=dashed]; +"2429 5648" -> "2430 5651" [label="[-1]", style=dashed]; +"2429 5648" -> "2436 5659" [label="[-1]", style=dashed]; +"2430 5651" -> "2432 5652" [label="[-1]", style=dashed]; +"2431 5650" -> "2432 5652" [label="[]", style=solid]; +"2432 5652" -> "2433 5661" [label="[]", style=solid]; +"2432 5652" -> "2441 5672" [label="[]", style=solid]; +"2433 5661" -> "2434 5662" [label="[]", style=solid]; +"2434 5662" -> "2438 5665" [label="[]", style=solid]; +"2435 5657" -> "2436 5659" [label="[]", style=solid]; +"2436 5659" -> "2437 5660" [label="[]", style=solid]; +"2436 5659" -> "3570 5670" [label="[]", style=solid]; +"2437 5660" -> "2438 5665" [label="[]", style=solid]; +"2438 5665" -> "2439 5667" [label="[-1, 3]", style=dashed]; +"2439 5667" -> "2440 5668" [label="[-1, 1]", style=dashed]; +"2440 5668" -> "2441 5672" [label="[-1]", style=dashed]; +"2440 5668" -> "3569 5669" [label="[-1]", style=dashed]; +"2441 5672" -> "3522 6520" [label="[]", style=solid]; +"2442 5598" -> "2443 5600" [label="[]", style=dashed]; +"2443 5600" -> "2444 5601" [label="[]", style=dashed]; +"2444 5601" -> "2445 5602" [label="[]", style=solid]; +"2445 5602" -> "2446 5603" [label="[-1, -1]", style=dashed]; +"2446 5603" -> "2447 5604" [label="[-1, -1]", style=dashed]; +"2447 5604" -> "2448 5607" [label="[-1]", style=dashed]; +"2447 5604" -> "2454 5615" [label="[-1]", style=dashed]; +"2448 5607" -> "2450 5608" [label="[-1]", style=dashed]; +"2449 5606" -> "2450 5608" [label="[]", style=solid]; +"2450 5608" -> "2451 5617" [label="[]", style=solid]; +"2450 5608" -> "2459 5628" [label="[]", style=solid]; +"2451 5617" -> "2452 5618" [label="[]", style=solid]; +"2452 5618" -> "2456 5621" [label="[]", style=solid]; +"2453 5613" -> "2454 5615" [label="[]", style=solid]; +"2454 5615" -> "2455 5616" [label="[]", style=solid]; +"2454 5615" -> "3572 5626" [label="[]", style=solid]; +"2455 5616" -> "2456 5621" [label="[]", style=solid]; +"2456 5621" -> "2457 5623" [label="[-1, 3]", style=dashed]; +"2457 5623" -> "2458 5624" [label="[-1, 1]", style=dashed]; +"2458 5624" -> "2459 5628" [label="[-1]", style=dashed]; +"2458 5624" -> "3571 5625" [label="[-1]", style=dashed]; +"2459 5628" -> "3522 6520" [label="[]", style=solid]; +"2460 5554" -> "2461 5556" [label="[]", style=dashed]; +"2461 5556" -> "2462 5557" [label="[]", style=dashed]; +"2462 5557" -> "2463 5558" [label="[]", style=solid]; +"2463 5558" -> "2464 5559" [label="[-1, -1]", style=dashed]; +"2464 5559" -> "2465 5560" [label="[-1, -1]", style=dashed]; +"2465 5560" -> "2466 5563" [label="[-1]", style=dashed]; +"2465 5560" -> "2472 5571" [label="[-1]", style=dashed]; +"2466 5563" -> "2468 5564" [label="[-1]", style=dashed]; +"2467 5562" -> "2468 5564" [label="[]", style=solid]; +"2468 5564" -> "2469 5573" [label="[]", style=solid]; +"2468 5564" -> "2477 5584" [label="[]", style=solid]; +"2469 5573" -> "2470 5574" [label="[]", style=solid]; +"2470 5574" -> "2474 5577" [label="[]", style=solid]; +"2471 5569" -> "2472 5571" [label="[]", style=solid]; +"2472 5571" -> "2473 5572" [label="[]", style=solid]; +"2472 5571" -> "3574 5582" [label="[]", style=solid]; +"2473 5572" -> "2474 5577" [label="[]", style=solid]; +"2474 5577" -> "2475 5579" [label="[-1, 3]", style=dashed]; +"2475 5579" -> "2476 5580" [label="[-1, 1]", style=dashed]; +"2476 5580" -> "2477 5584" [label="[-1]", style=dashed]; +"2476 5580" -> "3573 5581" [label="[-1]", style=dashed]; +"2477 5584" -> "3522 6520" [label="[]", style=solid]; +"2478 5510" -> "2479 5512" [label="[]", style=dashed]; +"2479 5512" -> "2480 5513" [label="[]", style=dashed]; +"2480 5513" -> "2481 5514" [label="[]", style=solid]; +"2481 5514" -> "2482 5515" [label="[-1, -1]", style=dashed]; +"2482 5515" -> "2483 5516" [label="[-1, -1]", style=dashed]; +"2483 5516" -> "2484 5519" [label="[-1]", style=dashed]; +"2483 5516" -> "2490 5527" [label="[-1]", style=dashed]; +"2484 5519" -> "2486 5520" [label="[-1]", style=dashed]; +"2485 5518" -> "2486 5520" [label="[]", style=solid]; +"2486 5520" -> "2487 5529" [label="[]", style=solid]; +"2486 5520" -> "2495 5540" [label="[]", style=solid]; +"2487 5529" -> "2488 5530" [label="[]", style=solid]; +"2488 5530" -> "2492 5533" [label="[]", style=solid]; +"2489 5525" -> "2490 5527" [label="[]", style=solid]; +"2490 5527" -> "2491 5528" [label="[]", style=solid]; +"2490 5527" -> "3576 5538" [label="[]", style=solid]; +"2491 5528" -> "2492 5533" [label="[]", style=solid]; +"2492 5533" -> "2493 5535" [label="[-1, 3]", style=dashed]; +"2493 5535" -> "2494 5536" [label="[-1, 1]", style=dashed]; +"2494 5536" -> "2495 5540" [label="[-1]", style=dashed]; +"2494 5536" -> "3575 5537" [label="[-1]", style=dashed]; +"2495 5540" -> "3522 6520" [label="[]", style=solid]; +"2496 5466" -> "2497 5468" [label="[]", style=dashed]; +"2497 5468" -> "2498 5469" [label="[]", style=dashed]; +"2498 5469" -> "2499 5470" [label="[]", style=solid]; +"2499 5470" -> "2500 5471" [label="[-1, -1]", style=dashed]; +"2500 5471" -> "2501 5472" [label="[-1, -1]", style=dashed]; +"2501 5472" -> "2502 5475" [label="[-1]", style=dashed]; +"2501 5472" -> "2508 5483" [label="[-1]", style=dashed]; +"2502 5475" -> "2504 5476" [label="[-1]", style=dashed]; +"2503 5474" -> "2504 5476" [label="[]", style=solid]; +"2504 5476" -> "2505 5485" [label="[]", style=solid]; +"2504 5476" -> "2513 5496" [label="[]", style=solid]; +"2505 5485" -> "2506 5486" [label="[]", style=solid]; +"2506 5486" -> "2510 5489" [label="[]", style=solid]; +"2507 5481" -> "2508 5483" [label="[]", style=solid]; +"2508 5483" -> "2509 5484" [label="[]", style=solid]; +"2508 5483" -> "3578 5494" [label="[]", style=solid]; +"2509 5484" -> "2510 5489" [label="[]", style=solid]; +"2510 5489" -> "2511 5491" [label="[-1, 3]", style=dashed]; +"2511 5491" -> "2512 5492" [label="[-1, 1]", style=dashed]; +"2512 5492" -> "2513 5496" [label="[-1]", style=dashed]; +"2512 5492" -> "3577 5493" [label="[-1]", style=dashed]; +"2513 5496" -> "3522 6520" [label="[]", style=solid]; +"2514 5422" -> "2515 5424" [label="[]", style=dashed]; +"2515 5424" -> "2516 5425" [label="[]", style=dashed]; +"2516 5425" -> "2517 5426" [label="[]", style=solid]; +"2517 5426" -> "2518 5427" [label="[-1, -1]", style=dashed]; +"2518 5427" -> "2519 5428" [label="[-1, -1]", style=dashed]; +"2519 5428" -> "2520 5431" [label="[-1]", style=dashed]; +"2519 5428" -> "2526 5439" [label="[-1]", style=dashed]; +"2520 5431" -> "2522 5432" [label="[-1]", style=dashed]; +"2521 5430" -> "2522 5432" [label="[]", style=solid]; +"2522 5432" -> "2523 5441" [label="[]", style=solid]; +"2522 5432" -> "2531 5452" [label="[]", style=solid]; +"2523 5441" -> "2524 5442" [label="[]", style=solid]; +"2524 5442" -> "2528 5445" [label="[]", style=solid]; +"2525 5437" -> "2526 5439" [label="[]", style=solid]; +"2526 5439" -> "2527 5440" [label="[]", style=solid]; +"2526 5439" -> "3580 5450" [label="[]", style=solid]; +"2527 5440" -> "2528 5445" [label="[]", style=solid]; +"2528 5445" -> "2529 5447" [label="[-1, 3]", style=dashed]; +"2529 5447" -> "2530 5448" [label="[-1, 1]", style=dashed]; +"2530 5448" -> "2531 5452" [label="[-1]", style=dashed]; +"2530 5448" -> "3579 5449" [label="[-1]", style=dashed]; +"2531 5452" -> "3522 6520" [label="[]", style=solid]; +"2532 5378" -> "2533 5380" [label="[]", style=dashed]; +"2533 5380" -> "2534 5381" [label="[]", style=dashed]; +"2534 5381" -> "2535 5382" [label="[]", style=solid]; +"2535 5382" -> "2536 5383" [label="[-1, -1]", style=dashed]; +"2536 5383" -> "2537 5384" [label="[-1, -1]", style=dashed]; +"2537 5384" -> "2538 5387" [label="[-1]", style=dashed]; +"2537 5384" -> "2544 5395" [label="[-1]", style=dashed]; +"2538 5387" -> "2540 5388" [label="[-1]", style=dashed]; +"2539 5386" -> "2540 5388" [label="[]", style=solid]; +"2540 5388" -> "2541 5397" [label="[]", style=solid]; +"2540 5388" -> "2549 5408" [label="[]", style=solid]; +"2541 5397" -> "2542 5398" [label="[]", style=solid]; +"2542 5398" -> "2546 5401" [label="[]", style=solid]; +"2543 5393" -> "2544 5395" [label="[]", style=solid]; +"2544 5395" -> "2545 5396" [label="[]", style=solid]; +"2544 5395" -> "3582 5406" [label="[]", style=solid]; +"2545 5396" -> "2546 5401" [label="[]", style=solid]; +"2546 5401" -> "2547 5403" [label="[-1, 3]", style=dashed]; +"2547 5403" -> "2548 5404" [label="[-1, 1]", style=dashed]; +"2548 5404" -> "2549 5408" [label="[-1]", style=dashed]; +"2548 5404" -> "3581 5405" [label="[-1]", style=dashed]; +"2549 5408" -> "3522 6520" [label="[]", style=solid]; +"2550 5334" -> "2551 5336" [label="[]", style=dashed]; +"2551 5336" -> "2552 5337" [label="[]", style=dashed]; +"2552 5337" -> "2553 5338" [label="[]", style=solid]; +"2553 5338" -> "2554 5339" [label="[-1, -1]", style=dashed]; +"2554 5339" -> "2555 5340" [label="[-1, -1]", style=dashed]; +"2555 5340" -> "2556 5343" [label="[-1]", style=dashed]; +"2555 5340" -> "2562 5351" [label="[-1]", style=dashed]; +"2556 5343" -> "2558 5344" [label="[-1]", style=dashed]; +"2557 5342" -> "2558 5344" [label="[]", style=solid]; +"2558 5344" -> "2559 5353" [label="[]", style=solid]; +"2558 5344" -> "2567 5364" [label="[]", style=solid]; +"2559 5353" -> "2560 5354" [label="[]", style=solid]; +"2560 5354" -> "2564 5357" [label="[]", style=solid]; +"2561 5349" -> "2562 5351" [label="[]", style=solid]; +"2562 5351" -> "2563 5352" [label="[]", style=solid]; +"2562 5351" -> "3584 5362" [label="[]", style=solid]; +"2563 5352" -> "2564 5357" [label="[]", style=solid]; +"2564 5357" -> "2565 5359" [label="[-1, 3]", style=dashed]; +"2565 5359" -> "2566 5360" [label="[-1, 1]", style=dashed]; +"2566 5360" -> "2567 5364" [label="[-1]", style=dashed]; +"2566 5360" -> "3583 5361" [label="[-1]", style=dashed]; +"2567 5364" -> "3522 6520" [label="[]", style=solid]; +"2568 5290" -> "2569 5292" [label="[]", style=dashed]; +"2569 5292" -> "2570 5293" [label="[]", style=dashed]; +"2570 5293" -> "2571 5294" [label="[]", style=solid]; +"2571 5294" -> "2572 5295" [label="[-1, -1]", style=dashed]; +"2572 5295" -> "2573 5296" [label="[-1, -1]", style=dashed]; +"2573 5296" -> "2574 5299" [label="[-1]", style=dashed]; +"2573 5296" -> "2580 5307" [label="[-1]", style=dashed]; +"2574 5299" -> "2576 5300" [label="[-1]", style=dashed]; +"2575 5298" -> "2576 5300" [label="[]", style=solid]; +"2576 5300" -> "2577 5309" [label="[]", style=solid]; +"2576 5300" -> "2585 5320" [label="[]", style=solid]; +"2577 5309" -> "2578 5310" [label="[]", style=solid]; +"2578 5310" -> "2582 5313" [label="[]", style=solid]; +"2579 5305" -> "2580 5307" [label="[]", style=solid]; +"2580 5307" -> "2581 5308" [label="[]", style=solid]; +"2580 5307" -> "3586 5318" [label="[]", style=solid]; +"2581 5308" -> "2582 5313" [label="[]", style=solid]; +"2582 5313" -> "2583 5315" [label="[-1, 3]", style=dashed]; +"2583 5315" -> "2584 5316" [label="[-1, 1]", style=dashed]; +"2584 5316" -> "2585 5320" [label="[-1]", style=dashed]; +"2584 5316" -> "3585 5317" [label="[-1]", style=dashed]; +"2585 5320" -> "3522 6520" [label="[]", style=solid]; +"2586 5246" -> "2587 5248" [label="[]", style=dashed]; +"2587 5248" -> "2588 5249" [label="[]", style=dashed]; +"2588 5249" -> "2589 5250" [label="[]", style=solid]; +"2589 5250" -> "2590 5251" [label="[-1, -1]", style=dashed]; +"2590 5251" -> "2591 5252" [label="[-1, -1]", style=dashed]; +"2591 5252" -> "2592 5255" [label="[-1]", style=dashed]; +"2591 5252" -> "2598 5263" [label="[-1]", style=dashed]; +"2592 5255" -> "2594 5256" [label="[-1]", style=dashed]; +"2593 5254" -> "2594 5256" [label="[]", style=solid]; +"2594 5256" -> "2595 5265" [label="[]", style=solid]; +"2594 5256" -> "2603 5276" [label="[]", style=solid]; +"2595 5265" -> "2596 5266" [label="[]", style=solid]; +"2596 5266" -> "2600 5269" [label="[]", style=solid]; +"2597 5261" -> "2598 5263" [label="[]", style=solid]; +"2598 5263" -> "2599 5264" [label="[]", style=solid]; +"2598 5263" -> "3588 5274" [label="[]", style=solid]; +"2599 5264" -> "2600 5269" [label="[]", style=solid]; +"2600 5269" -> "2601 5271" [label="[-1, 3]", style=dashed]; +"2601 5271" -> "2602 5272" [label="[-1, 1]", style=dashed]; +"2602 5272" -> "2603 5276" [label="[-1]", style=dashed]; +"2602 5272" -> "3587 5273" [label="[-1]", style=dashed]; +"2603 5276" -> "3522 6520" [label="[]", style=solid]; +"2604 5202" -> "2605 5204" [label="[]", style=dashed]; +"2605 5204" -> "2606 5205" [label="[]", style=dashed]; +"2606 5205" -> "2607 5206" [label="[]", style=solid]; +"2607 5206" -> "2608 5207" [label="[-1, -1]", style=dashed]; +"2608 5207" -> "2609 5208" [label="[-1, -1]", style=dashed]; +"2609 5208" -> "2610 5211" [label="[-1]", style=dashed]; +"2609 5208" -> "2616 5219" [label="[-1]", style=dashed]; +"2610 5211" -> "2612 5212" [label="[-1]", style=dashed]; +"2611 5210" -> "2612 5212" [label="[]", style=solid]; +"2612 5212" -> "2613 5221" [label="[]", style=solid]; +"2612 5212" -> "2621 5232" [label="[]", style=solid]; +"2613 5221" -> "2614 5222" [label="[]", style=solid]; +"2614 5222" -> "2618 5225" [label="[]", style=solid]; +"2615 5217" -> "2616 5219" [label="[]", style=solid]; +"2616 5219" -> "2617 5220" [label="[]", style=solid]; +"2616 5219" -> "3590 5230" [label="[]", style=solid]; +"2617 5220" -> "2618 5225" [label="[]", style=solid]; +"2618 5225" -> "2619 5227" [label="[-1, 3]", style=dashed]; +"2619 5227" -> "2620 5228" [label="[-1, 1]", style=dashed]; +"2620 5228" -> "2621 5232" [label="[-1]", style=dashed]; +"2620 5228" -> "3589 5229" [label="[-1]", style=dashed]; +"2621 5232" -> "3522 6520" [label="[]", style=solid]; +"2622 5158" -> "2623 5160" [label="[]", style=dashed]; +"2623 5160" -> "2624 5161" [label="[]", style=dashed]; +"2624 5161" -> "2625 5162" [label="[]", style=solid]; +"2625 5162" -> "2626 5163" [label="[-1, -1]", style=dashed]; +"2626 5163" -> "2627 5164" [label="[-1, -1]", style=dashed]; +"2627 5164" -> "2628 5167" [label="[-1]", style=dashed]; +"2627 5164" -> "2634 5175" [label="[-1]", style=dashed]; +"2628 5167" -> "2630 5168" [label="[-1]", style=dashed]; +"2629 5166" -> "2630 5168" [label="[]", style=solid]; +"2630 5168" -> "2631 5177" [label="[]", style=solid]; +"2630 5168" -> "2639 5188" [label="[]", style=solid]; +"2631 5177" -> "2632 5178" [label="[]", style=solid]; +"2632 5178" -> "2636 5181" [label="[]", style=solid]; +"2633 5173" -> "2634 5175" [label="[]", style=solid]; +"2634 5175" -> "2635 5176" [label="[]", style=solid]; +"2634 5175" -> "3592 5186" [label="[]", style=solid]; +"2635 5176" -> "2636 5181" [label="[]", style=solid]; +"2636 5181" -> "2637 5183" [label="[-1, 3]", style=dashed]; +"2637 5183" -> "2638 5184" [label="[-1, 1]", style=dashed]; +"2638 5184" -> "2639 5188" [label="[-1]", style=dashed]; +"2638 5184" -> "3591 5185" [label="[-1]", style=dashed]; +"2639 5188" -> "3522 6520" [label="[]", style=solid]; +"2640 5114" -> "2641 5116" [label="[]", style=dashed]; +"2641 5116" -> "2642 5117" [label="[]", style=dashed]; +"2642 5117" -> "2643 5118" [label="[]", style=solid]; +"2643 5118" -> "2644 5119" [label="[-1, -1]", style=dashed]; +"2644 5119" -> "2645 5120" [label="[-1, -1]", style=dashed]; +"2645 5120" -> "2646 5123" [label="[-1]", style=dashed]; +"2645 5120" -> "2652 5131" [label="[-1]", style=dashed]; +"2646 5123" -> "2648 5124" [label="[-1]", style=dashed]; +"2647 5122" -> "2648 5124" [label="[]", style=solid]; +"2648 5124" -> "2649 5133" [label="[]", style=solid]; +"2648 5124" -> "2657 5144" [label="[]", style=solid]; +"2649 5133" -> "2650 5134" [label="[]", style=solid]; +"2650 5134" -> "2654 5137" [label="[]", style=solid]; +"2651 5129" -> "2652 5131" [label="[]", style=solid]; +"2652 5131" -> "2653 5132" [label="[]", style=solid]; +"2652 5131" -> "3594 5142" [label="[]", style=solid]; +"2653 5132" -> "2654 5137" [label="[]", style=solid]; +"2654 5137" -> "2655 5139" [label="[-1, 3]", style=dashed]; +"2655 5139" -> "2656 5140" [label="[-1, 1]", style=dashed]; +"2656 5140" -> "2657 5144" [label="[-1]", style=dashed]; +"2656 5140" -> "3593 5141" [label="[-1]", style=dashed]; +"2657 5144" -> "3522 6520" [label="[]", style=solid]; +"2658 5070" -> "2659 5072" [label="[]", style=dashed]; +"2659 5072" -> "2660 5073" [label="[]", style=dashed]; +"2660 5073" -> "2661 5074" [label="[]", style=solid]; +"2661 5074" -> "2662 5075" [label="[-1, -1]", style=dashed]; +"2662 5075" -> "2663 5076" [label="[-1, -1]", style=dashed]; +"2663 5076" -> "2664 5079" [label="[-1]", style=dashed]; +"2663 5076" -> "2670 5087" [label="[-1]", style=dashed]; +"2664 5079" -> "2666 5080" [label="[-1]", style=dashed]; +"2665 5078" -> "2666 5080" [label="[]", style=solid]; +"2666 5080" -> "2667 5089" [label="[]", style=solid]; +"2666 5080" -> "2675 5100" [label="[]", style=solid]; +"2667 5089" -> "2668 5090" [label="[]", style=solid]; +"2668 5090" -> "2672 5093" [label="[]", style=solid]; +"2669 5085" -> "2670 5087" [label="[]", style=solid]; +"2670 5087" -> "2671 5088" [label="[]", style=solid]; +"2670 5087" -> "3596 5098" [label="[]", style=solid]; +"2671 5088" -> "2672 5093" [label="[]", style=solid]; +"2672 5093" -> "2673 5095" [label="[-1, 3]", style=dashed]; +"2673 5095" -> "2674 5096" [label="[-1, 1]", style=dashed]; +"2674 5096" -> "2675 5100" [label="[-1]", style=dashed]; +"2674 5096" -> "3595 5097" [label="[-1]", style=dashed]; +"2675 5100" -> "3522 6520" [label="[]", style=solid]; +"2676 5026" -> "2677 5028" [label="[]", style=dashed]; +"2677 5028" -> "2678 5029" [label="[]", style=dashed]; +"2678 5029" -> "2679 5030" [label="[]", style=solid]; +"2679 5030" -> "2680 5031" [label="[-1, -1]", style=dashed]; +"2680 5031" -> "2681 5032" [label="[-1, -1]", style=dashed]; +"2681 5032" -> "2682 5035" [label="[-1]", style=dashed]; +"2681 5032" -> "2688 5043" [label="[-1]", style=dashed]; +"2682 5035" -> "2684 5036" [label="[-1]", style=dashed]; +"2683 5034" -> "2684 5036" [label="[]", style=solid]; +"2684 5036" -> "2685 5045" [label="[]", style=solid]; +"2684 5036" -> "2693 5056" [label="[]", style=solid]; +"2685 5045" -> "2686 5046" [label="[]", style=solid]; +"2686 5046" -> "2690 5049" [label="[]", style=solid]; +"2687 5041" -> "2688 5043" [label="[]", style=solid]; +"2688 5043" -> "2689 5044" [label="[]", style=solid]; +"2688 5043" -> "3598 5054" [label="[]", style=solid]; +"2689 5044" -> "2690 5049" [label="[]", style=solid]; +"2690 5049" -> "2691 5051" [label="[-1, 3]", style=dashed]; +"2691 5051" -> "2692 5052" [label="[-1, 1]", style=dashed]; +"2692 5052" -> "2693 5056" [label="[-1]", style=dashed]; +"2692 5052" -> "3597 5053" [label="[-1]", style=dashed]; +"2693 5056" -> "3522 6520" [label="[]", style=solid]; +"2694 4982" -> "2695 4984" [label="[]", style=dashed]; +"2695 4984" -> "2696 4985" [label="[]", style=dashed]; +"2696 4985" -> "2697 4986" [label="[]", style=solid]; +"2697 4986" -> "2698 4987" [label="[-1, -1]", style=dashed]; +"2698 4987" -> "2699 4988" [label="[-1, -1]", style=dashed]; +"2699 4988" -> "2700 4991" [label="[-1]", style=dashed]; +"2699 4988" -> "2706 4999" [label="[-1]", style=dashed]; +"2700 4991" -> "2702 4992" [label="[-1]", style=dashed]; +"2701 4990" -> "2702 4992" [label="[]", style=solid]; +"2702 4992" -> "2703 5001" [label="[]", style=solid]; +"2702 4992" -> "2711 5012" [label="[]", style=solid]; +"2703 5001" -> "2704 5002" [label="[]", style=solid]; +"2704 5002" -> "2708 5005" [label="[]", style=solid]; +"2705 4997" -> "2706 4999" [label="[]", style=solid]; +"2706 4999" -> "2707 5000" [label="[]", style=solid]; +"2706 4999" -> "3600 5010" [label="[]", style=solid]; +"2707 5000" -> "2708 5005" [label="[]", style=solid]; +"2708 5005" -> "2709 5007" [label="[-1, 3]", style=dashed]; +"2709 5007" -> "2710 5008" [label="[-1, 1]", style=dashed]; +"2710 5008" -> "2711 5012" [label="[-1]", style=dashed]; +"2710 5008" -> "3599 5009" [label="[-1]", style=dashed]; +"2711 5012" -> "3522 6520" [label="[]", style=solid]; +"2712 4938" -> "2713 4940" [label="[]", style=dashed]; +"2713 4940" -> "2714 4941" [label="[]", style=dashed]; +"2714 4941" -> "2715 4942" [label="[]", style=solid]; +"2715 4942" -> "2716 4943" [label="[-1, -1]", style=dashed]; +"2716 4943" -> "2717 4944" [label="[-1, -1]", style=dashed]; +"2717 4944" -> "2718 4947" [label="[-1]", style=dashed]; +"2717 4944" -> "2724 4955" [label="[-1]", style=dashed]; +"2718 4947" -> "2720 4948" [label="[-1]", style=dashed]; +"2719 4946" -> "2720 4948" [label="[]", style=solid]; +"2720 4948" -> "2721 4957" [label="[]", style=solid]; +"2720 4948" -> "2729 4968" [label="[]", style=solid]; +"2721 4957" -> "2722 4958" [label="[]", style=solid]; +"2722 4958" -> "2726 4961" [label="[]", style=solid]; +"2723 4953" -> "2724 4955" [label="[]", style=solid]; +"2724 4955" -> "2725 4956" [label="[]", style=solid]; +"2724 4955" -> "3602 4966" [label="[]", style=solid]; +"2725 4956" -> "2726 4961" [label="[]", style=solid]; +"2726 4961" -> "2727 4963" [label="[-1, 3]", style=dashed]; +"2727 4963" -> "2728 4964" [label="[-1, 1]", style=dashed]; +"2728 4964" -> "2729 4968" [label="[-1]", style=dashed]; +"2728 4964" -> "3601 4965" [label="[-1]", style=dashed]; +"2729 4968" -> "3522 6520" [label="[]", style=solid]; +"2730 4894" -> "2731 4896" [label="[]", style=dashed]; +"2731 4896" -> "2732 4897" [label="[]", style=dashed]; +"2732 4897" -> "2733 4898" [label="[]", style=solid]; +"2733 4898" -> "2734 4899" [label="[-1, -1]", style=dashed]; +"2734 4899" -> "2735 4900" [label="[-1, -1]", style=dashed]; +"2735 4900" -> "2736 4903" [label="[-1]", style=dashed]; +"2735 4900" -> "2742 4911" [label="[-1]", style=dashed]; +"2736 4903" -> "2738 4904" [label="[-1]", style=dashed]; +"2737 4902" -> "2738 4904" [label="[]", style=solid]; +"2738 4904" -> "2739 4913" [label="[]", style=solid]; +"2738 4904" -> "2747 4924" [label="[]", style=solid]; +"2739 4913" -> "2740 4914" [label="[]", style=solid]; +"2740 4914" -> "2744 4917" [label="[]", style=solid]; +"2741 4909" -> "2742 4911" [label="[]", style=solid]; +"2742 4911" -> "2743 4912" [label="[]", style=solid]; +"2742 4911" -> "3604 4922" [label="[]", style=solid]; +"2743 4912" -> "2744 4917" [label="[]", style=solid]; +"2744 4917" -> "2745 4919" [label="[-1, 3]", style=dashed]; +"2745 4919" -> "2746 4920" [label="[-1, 1]", style=dashed]; +"2746 4920" -> "2747 4924" [label="[-1]", style=dashed]; +"2746 4920" -> "3603 4921" [label="[-1]", style=dashed]; +"2747 4924" -> "3522 6520" [label="[]", style=solid]; +"2748 4850" -> "2749 4852" [label="[]", style=dashed]; +"2749 4852" -> "2750 4853" [label="[]", style=dashed]; +"2750 4853" -> "2751 4854" [label="[]", style=solid]; +"2751 4854" -> "2752 4855" [label="[-1, -1]", style=dashed]; +"2752 4855" -> "2753 4856" [label="[-1, -1]", style=dashed]; +"2753 4856" -> "2754 4859" [label="[-1]", style=dashed]; +"2753 4856" -> "2760 4867" [label="[-1]", style=dashed]; +"2754 4859" -> "2756 4860" [label="[-1]", style=dashed]; +"2755 4858" -> "2756 4860" [label="[]", style=solid]; +"2756 4860" -> "2757 4869" [label="[]", style=solid]; +"2756 4860" -> "2765 4880" [label="[]", style=solid]; +"2757 4869" -> "2758 4870" [label="[]", style=solid]; +"2758 4870" -> "2762 4873" [label="[]", style=solid]; +"2759 4865" -> "2760 4867" [label="[]", style=solid]; +"2760 4867" -> "2761 4868" [label="[]", style=solid]; +"2760 4867" -> "3606 4878" [label="[]", style=solid]; +"2761 4868" -> "2762 4873" [label="[]", style=solid]; +"2762 4873" -> "2763 4875" [label="[-1, 3]", style=dashed]; +"2763 4875" -> "2764 4876" [label="[-1, 1]", style=dashed]; +"2764 4876" -> "2765 4880" [label="[-1]", style=dashed]; +"2764 4876" -> "3605 4877" [label="[-1]", style=dashed]; +"2765 4880" -> "3522 6520" [label="[]", style=solid]; +"2766 4806" -> "2767 4808" [label="[]", style=dashed]; +"2767 4808" -> "2768 4809" [label="[]", style=dashed]; +"2768 4809" -> "2769 4810" [label="[]", style=solid]; +"2769 4810" -> "2770 4811" [label="[-1, -1]", style=dashed]; +"2770 4811" -> "2771 4812" [label="[-1, -1]", style=dashed]; +"2771 4812" -> "2772 4815" [label="[-1]", style=dashed]; +"2771 4812" -> "2778 4823" [label="[-1]", style=dashed]; +"2772 4815" -> "2774 4816" [label="[-1]", style=dashed]; +"2773 4814" -> "2774 4816" [label="[]", style=solid]; +"2774 4816" -> "2775 4825" [label="[]", style=solid]; +"2774 4816" -> "2783 4836" [label="[]", style=solid]; +"2775 4825" -> "2776 4826" [label="[]", style=solid]; +"2776 4826" -> "2780 4829" [label="[]", style=solid]; +"2777 4821" -> "2778 4823" [label="[]", style=solid]; +"2778 4823" -> "2779 4824" [label="[]", style=solid]; +"2778 4823" -> "3608 4834" [label="[]", style=solid]; +"2779 4824" -> "2780 4829" [label="[]", style=solid]; +"2780 4829" -> "2781 4831" [label="[-1, 3]", style=dashed]; +"2781 4831" -> "2782 4832" [label="[-1, 1]", style=dashed]; +"2782 4832" -> "2783 4836" [label="[-1]", style=dashed]; +"2782 4832" -> "3607 4833" [label="[-1]", style=dashed]; +"2783 4836" -> "3522 6520" [label="[]", style=solid]; +"2784 4762" -> "2785 4764" [label="[]", style=dashed]; +"2785 4764" -> "2786 4765" [label="[]", style=dashed]; +"2786 4765" -> "2787 4766" [label="[]", style=solid]; +"2787 4766" -> "2788 4767" [label="[-1, -1]", style=dashed]; +"2788 4767" -> "2789 4768" [label="[-1, -1]", style=dashed]; +"2789 4768" -> "2790 4771" [label="[-1]", style=dashed]; +"2789 4768" -> "2796 4779" [label="[-1]", style=dashed]; +"2790 4771" -> "2792 4772" [label="[-1]", style=dashed]; +"2791 4770" -> "2792 4772" [label="[]", style=solid]; +"2792 4772" -> "2793 4781" [label="[]", style=solid]; +"2792 4772" -> "2801 4792" [label="[]", style=solid]; +"2793 4781" -> "2794 4782" [label="[]", style=solid]; +"2794 4782" -> "2798 4785" [label="[]", style=solid]; +"2795 4777" -> "2796 4779" [label="[]", style=solid]; +"2796 4779" -> "2797 4780" [label="[]", style=solid]; +"2796 4779" -> "3610 4790" [label="[]", style=solid]; +"2797 4780" -> "2798 4785" [label="[]", style=solid]; +"2798 4785" -> "2799 4787" [label="[-1, 3]", style=dashed]; +"2799 4787" -> "2800 4788" [label="[-1, 1]", style=dashed]; +"2800 4788" -> "2801 4792" [label="[-1]", style=dashed]; +"2800 4788" -> "3609 4789" [label="[-1]", style=dashed]; +"2801 4792" -> "3522 6520" [label="[]", style=solid]; +"2802 4718" -> "2803 4720" [label="[]", style=dashed]; +"2803 4720" -> "2804 4721" [label="[]", style=dashed]; +"2804 4721" -> "2805 4722" [label="[]", style=solid]; +"2805 4722" -> "2806 4723" [label="[-1, -1]", style=dashed]; +"2806 4723" -> "2807 4724" [label="[-1, -1]", style=dashed]; +"2807 4724" -> "2808 4727" [label="[-1]", style=dashed]; +"2807 4724" -> "2814 4735" [label="[-1]", style=dashed]; +"2808 4727" -> "2810 4728" [label="[-1]", style=dashed]; +"2809 4726" -> "2810 4728" [label="[]", style=solid]; +"2810 4728" -> "2811 4737" [label="[]", style=solid]; +"2810 4728" -> "2819 4748" [label="[]", style=solid]; +"2811 4737" -> "2812 4738" [label="[]", style=solid]; +"2812 4738" -> "2816 4741" [label="[]", style=solid]; +"2813 4733" -> "2814 4735" [label="[]", style=solid]; +"2814 4735" -> "2815 4736" [label="[]", style=solid]; +"2814 4735" -> "3612 4746" [label="[]", style=solid]; +"2815 4736" -> "2816 4741" [label="[]", style=solid]; +"2816 4741" -> "2817 4743" [label="[-1, 3]", style=dashed]; +"2817 4743" -> "2818 4744" [label="[-1, 1]", style=dashed]; +"2818 4744" -> "2819 4748" [label="[-1]", style=dashed]; +"2818 4744" -> "3611 4745" [label="[-1]", style=dashed]; +"2819 4748" -> "3522 6520" [label="[]", style=solid]; +"2820 4674" -> "2821 4676" [label="[]", style=dashed]; +"2821 4676" -> "2822 4677" [label="[]", style=dashed]; +"2822 4677" -> "2823 4678" [label="[]", style=solid]; +"2823 4678" -> "2824 4679" [label="[-1, -1]", style=dashed]; +"2824 4679" -> "2825 4680" [label="[-1, -1]", style=dashed]; +"2825 4680" -> "2826 4683" [label="[-1]", style=dashed]; +"2825 4680" -> "2832 4691" [label="[-1]", style=dashed]; +"2826 4683" -> "2828 4684" [label="[-1]", style=dashed]; +"2827 4682" -> "2828 4684" [label="[]", style=solid]; +"2828 4684" -> "2829 4693" [label="[]", style=solid]; +"2828 4684" -> "2837 4704" [label="[]", style=solid]; +"2829 4693" -> "2830 4694" [label="[]", style=solid]; +"2830 4694" -> "2834 4697" [label="[]", style=solid]; +"2831 4689" -> "2832 4691" [label="[]", style=solid]; +"2832 4691" -> "2833 4692" [label="[]", style=solid]; +"2832 4691" -> "3614 4702" [label="[]", style=solid]; +"2833 4692" -> "2834 4697" [label="[]", style=solid]; +"2834 4697" -> "2835 4699" [label="[-1, 3]", style=dashed]; +"2835 4699" -> "2836 4700" [label="[-1, 1]", style=dashed]; +"2836 4700" -> "2837 4704" [label="[-1]", style=dashed]; +"2836 4700" -> "3613 4701" [label="[-1]", style=dashed]; +"2837 4704" -> "3522 6520" [label="[]", style=solid]; +"2838 4630" -> "2839 4632" [label="[]", style=dashed]; +"2839 4632" -> "2840 4633" [label="[]", style=dashed]; +"2840 4633" -> "2841 4634" [label="[]", style=solid]; +"2841 4634" -> "2842 4635" [label="[-1, -1]", style=dashed]; +"2842 4635" -> "2843 4636" [label="[-1, -1]", style=dashed]; +"2843 4636" -> "2844 4639" [label="[-1]", style=dashed]; +"2843 4636" -> "2850 4647" [label="[-1]", style=dashed]; +"2844 4639" -> "2846 4640" [label="[-1]", style=dashed]; +"2845 4638" -> "2846 4640" [label="[]", style=solid]; +"2846 4640" -> "2847 4649" [label="[]", style=solid]; +"2846 4640" -> "2855 4660" [label="[]", style=solid]; +"2847 4649" -> "2848 4650" [label="[]", style=solid]; +"2848 4650" -> "2852 4653" [label="[]", style=solid]; +"2849 4645" -> "2850 4647" [label="[]", style=solid]; +"2850 4647" -> "2851 4648" [label="[]", style=solid]; +"2850 4647" -> "3616 4658" [label="[]", style=solid]; +"2851 4648" -> "2852 4653" [label="[]", style=solid]; +"2852 4653" -> "2853 4655" [label="[-1, 3]", style=dashed]; +"2853 4655" -> "2854 4656" [label="[-1, 1]", style=dashed]; +"2854 4656" -> "2855 4660" [label="[-1]", style=dashed]; +"2854 4656" -> "3615 4657" [label="[-1]", style=dashed]; +"2855 4660" -> "3522 6520" [label="[]", style=solid]; +"2856 4586" -> "2857 4588" [label="[]", style=dashed]; +"2857 4588" -> "2858 4589" [label="[]", style=dashed]; +"2858 4589" -> "2859 4590" [label="[]", style=solid]; +"2859 4590" -> "2860 4591" [label="[-1, -1]", style=dashed]; +"2860 4591" -> "2861 4592" [label="[-1, -1]", style=dashed]; +"2861 4592" -> "2862 4595" [label="[-1]", style=dashed]; +"2861 4592" -> "2868 4603" [label="[-1]", style=dashed]; +"2862 4595" -> "2864 4596" [label="[-1]", style=dashed]; +"2863 4594" -> "2864 4596" [label="[]", style=solid]; +"2864 4596" -> "2865 4605" [label="[]", style=solid]; +"2864 4596" -> "2873 4616" [label="[]", style=solid]; +"2865 4605" -> "2866 4606" [label="[]", style=solid]; +"2866 4606" -> "2870 4609" [label="[]", style=solid]; +"2867 4601" -> "2868 4603" [label="[]", style=solid]; +"2868 4603" -> "2869 4604" [label="[]", style=solid]; +"2868 4603" -> "3618 4614" [label="[]", style=solid]; +"2869 4604" -> "2870 4609" [label="[]", style=solid]; +"2870 4609" -> "2871 4611" [label="[-1, 3]", style=dashed]; +"2871 4611" -> "2872 4612" [label="[-1, 1]", style=dashed]; +"2872 4612" -> "2873 4616" [label="[-1]", style=dashed]; +"2872 4612" -> "3617 4613" [label="[-1]", style=dashed]; +"2873 4616" -> "3522 6520" [label="[]", style=solid]; +"2874 4542" -> "2875 4544" [label="[]", style=dashed]; +"2875 4544" -> "2876 4545" [label="[]", style=dashed]; +"2876 4545" -> "2877 4546" [label="[]", style=solid]; +"2877 4546" -> "2878 4547" [label="[-1, -1]", style=dashed]; +"2878 4547" -> "2879 4548" [label="[-1, -1]", style=dashed]; +"2879 4548" -> "2880 4551" [label="[-1]", style=dashed]; +"2879 4548" -> "2886 4559" [label="[-1]", style=dashed]; +"2880 4551" -> "2882 4552" [label="[-1]", style=dashed]; +"2881 4550" -> "2882 4552" [label="[]", style=solid]; +"2882 4552" -> "2883 4561" [label="[]", style=solid]; +"2882 4552" -> "2891 4572" [label="[]", style=solid]; +"2883 4561" -> "2884 4562" [label="[]", style=solid]; +"2884 4562" -> "2888 4565" [label="[]", style=solid]; +"2885 4557" -> "2886 4559" [label="[]", style=solid]; +"2886 4559" -> "2887 4560" [label="[]", style=solid]; +"2886 4559" -> "3620 4570" [label="[]", style=solid]; +"2887 4560" -> "2888 4565" [label="[]", style=solid]; +"2888 4565" -> "2889 4567" [label="[-1, 3]", style=dashed]; +"2889 4567" -> "2890 4568" [label="[-1, 1]", style=dashed]; +"2890 4568" -> "2891 4572" [label="[-1]", style=dashed]; +"2890 4568" -> "3619 4569" [label="[-1]", style=dashed]; +"2891 4572" -> "3522 6520" [label="[]", style=solid]; +"2892 4498" -> "2893 4500" [label="[]", style=dashed]; +"2893 4500" -> "2894 4501" [label="[]", style=dashed]; +"2894 4501" -> "2895 4502" [label="[]", style=solid]; +"2895 4502" -> "2896 4503" [label="[-1, -1]", style=dashed]; +"2896 4503" -> "2897 4504" [label="[-1, -1]", style=dashed]; +"2897 4504" -> "2898 4507" [label="[-1]", style=dashed]; +"2897 4504" -> "2904 4515" [label="[-1]", style=dashed]; +"2898 4507" -> "2900 4508" [label="[-1]", style=dashed]; +"2899 4506" -> "2900 4508" [label="[]", style=solid]; +"2900 4508" -> "2901 4517" [label="[]", style=solid]; +"2900 4508" -> "2909 4528" [label="[]", style=solid]; +"2901 4517" -> "2902 4518" [label="[]", style=solid]; +"2902 4518" -> "2906 4521" [label="[]", style=solid]; +"2903 4513" -> "2904 4515" [label="[]", style=solid]; +"2904 4515" -> "2905 4516" [label="[]", style=solid]; +"2904 4515" -> "3622 4526" [label="[]", style=solid]; +"2905 4516" -> "2906 4521" [label="[]", style=solid]; +"2906 4521" -> "2907 4523" [label="[-1, 3]", style=dashed]; +"2907 4523" -> "2908 4524" [label="[-1, 1]", style=dashed]; +"2908 4524" -> "2909 4528" [label="[-1]", style=dashed]; +"2908 4524" -> "3621 4525" [label="[-1]", style=dashed]; +"2909 4528" -> "3522 6520" [label="[]", style=solid]; +"2910 4454" -> "2911 4456" [label="[]", style=dashed]; +"2911 4456" -> "2912 4457" [label="[]", style=dashed]; +"2912 4457" -> "2913 4458" [label="[]", style=solid]; +"2913 4458" -> "2914 4459" [label="[-1, -1]", style=dashed]; +"2914 4459" -> "2915 4460" [label="[-1, -1]", style=dashed]; +"2915 4460" -> "2916 4463" [label="[-1]", style=dashed]; +"2915 4460" -> "2922 4471" [label="[-1]", style=dashed]; +"2916 4463" -> "2918 4464" [label="[-1]", style=dashed]; +"2917 4462" -> "2918 4464" [label="[]", style=solid]; +"2918 4464" -> "2919 4473" [label="[]", style=solid]; +"2918 4464" -> "2927 4484" [label="[]", style=solid]; +"2919 4473" -> "2920 4474" [label="[]", style=solid]; +"2920 4474" -> "2924 4477" [label="[]", style=solid]; +"2921 4469" -> "2922 4471" [label="[]", style=solid]; +"2922 4471" -> "2923 4472" [label="[]", style=solid]; +"2922 4471" -> "3624 4482" [label="[]", style=solid]; +"2923 4472" -> "2924 4477" [label="[]", style=solid]; +"2924 4477" -> "2925 4479" [label="[-1, 3]", style=dashed]; +"2925 4479" -> "2926 4480" [label="[-1, 1]", style=dashed]; +"2926 4480" -> "2927 4484" [label="[-1]", style=dashed]; +"2926 4480" -> "3623 4481" [label="[-1]", style=dashed]; +"2927 4484" -> "3522 6520" [label="[]", style=solid]; +"2928 4410" -> "2929 4412" [label="[]", style=dashed]; +"2929 4412" -> "2930 4413" [label="[]", style=dashed]; +"2930 4413" -> "2931 4414" [label="[]", style=solid]; +"2931 4414" -> "2932 4415" [label="[-1, -1]", style=dashed]; +"2932 4415" -> "2933 4416" [label="[-1, -1]", style=dashed]; +"2933 4416" -> "2934 4419" [label="[-1]", style=dashed]; +"2933 4416" -> "2940 4427" [label="[-1]", style=dashed]; +"2934 4419" -> "2936 4420" [label="[-1]", style=dashed]; +"2935 4418" -> "2936 4420" [label="[]", style=solid]; +"2936 4420" -> "2937 4429" [label="[]", style=solid]; +"2936 4420" -> "2945 4440" [label="[]", style=solid]; +"2937 4429" -> "2938 4430" [label="[]", style=solid]; +"2938 4430" -> "2942 4433" [label="[]", style=solid]; +"2939 4425" -> "2940 4427" [label="[]", style=solid]; +"2940 4427" -> "2941 4428" [label="[]", style=solid]; +"2940 4427" -> "3626 4438" [label="[]", style=solid]; +"2941 4428" -> "2942 4433" [label="[]", style=solid]; +"2942 4433" -> "2943 4435" [label="[-1, 3]", style=dashed]; +"2943 4435" -> "2944 4436" [label="[-1, 1]", style=dashed]; +"2944 4436" -> "2945 4440" [label="[-1]", style=dashed]; +"2944 4436" -> "3625 4437" [label="[-1]", style=dashed]; +"2945 4440" -> "3522 6520" [label="[]", style=solid]; +"2946 4366" -> "2947 4368" [label="[]", style=dashed]; +"2947 4368" -> "2948 4369" [label="[]", style=dashed]; +"2948 4369" -> "2949 4370" [label="[]", style=solid]; +"2949 4370" -> "2950 4371" [label="[-1, -1]", style=dashed]; +"2950 4371" -> "2951 4372" [label="[-1, -1]", style=dashed]; +"2951 4372" -> "2952 4375" [label="[-1]", style=dashed]; +"2951 4372" -> "2958 4383" [label="[-1]", style=dashed]; +"2952 4375" -> "2954 4376" [label="[-1]", style=dashed]; +"2953 4374" -> "2954 4376" [label="[]", style=solid]; +"2954 4376" -> "2955 4385" [label="[]", style=solid]; +"2954 4376" -> "2963 4396" [label="[]", style=solid]; +"2955 4385" -> "2956 4386" [label="[]", style=solid]; +"2956 4386" -> "2960 4389" [label="[]", style=solid]; +"2957 4381" -> "2958 4383" [label="[]", style=solid]; +"2958 4383" -> "2959 4384" [label="[]", style=solid]; +"2958 4383" -> "3628 4394" [label="[]", style=solid]; +"2959 4384" -> "2960 4389" [label="[]", style=solid]; +"2960 4389" -> "2961 4391" [label="[-1, 3]", style=dashed]; +"2961 4391" -> "2962 4392" [label="[-1, 1]", style=dashed]; +"2962 4392" -> "2963 4396" [label="[-1]", style=dashed]; +"2962 4392" -> "3627 4393" [label="[-1]", style=dashed]; +"2963 4396" -> "3522 6520" [label="[]", style=solid]; +"2964 4322" -> "2965 4324" [label="[]", style=dashed]; +"2965 4324" -> "2966 4325" [label="[]", style=dashed]; +"2966 4325" -> "2967 4326" [label="[]", style=solid]; +"2967 4326" -> "2968 4327" [label="[-1, -1]", style=dashed]; +"2968 4327" -> "2969 4328" [label="[-1, -1]", style=dashed]; +"2969 4328" -> "2970 4331" [label="[-1]", style=dashed]; +"2969 4328" -> "2976 4339" [label="[-1]", style=dashed]; +"2970 4331" -> "2972 4332" [label="[-1]", style=dashed]; +"2971 4330" -> "2972 4332" [label="[]", style=solid]; +"2972 4332" -> "2973 4341" [label="[]", style=solid]; +"2972 4332" -> "2981 4352" [label="[]", style=solid]; +"2973 4341" -> "2974 4342" [label="[]", style=solid]; +"2974 4342" -> "2978 4345" [label="[]", style=solid]; +"2975 4337" -> "2976 4339" [label="[]", style=solid]; +"2976 4339" -> "2977 4340" [label="[]", style=solid]; +"2976 4339" -> "3630 4350" [label="[]", style=solid]; +"2977 4340" -> "2978 4345" [label="[]", style=solid]; +"2978 4345" -> "2979 4347" [label="[-1, 3]", style=dashed]; +"2979 4347" -> "2980 4348" [label="[-1, 1]", style=dashed]; +"2980 4348" -> "2981 4352" [label="[-1]", style=dashed]; +"2980 4348" -> "3629 4349" [label="[-1]", style=dashed]; +"2981 4352" -> "3522 6520" [label="[]", style=solid]; +"2982 4278" -> "2983 4280" [label="[]", style=dashed]; +"2983 4280" -> "2984 4281" [label="[]", style=dashed]; +"2984 4281" -> "2985 4282" [label="[]", style=solid]; +"2985 4282" -> "2986 4283" [label="[-1, -1]", style=dashed]; +"2986 4283" -> "2987 4284" [label="[-1, -1]", style=dashed]; +"2987 4284" -> "2988 4287" [label="[-1]", style=dashed]; +"2987 4284" -> "2994 4295" [label="[-1]", style=dashed]; +"2988 4287" -> "2990 4288" [label="[-1]", style=dashed]; +"2989 4286" -> "2990 4288" [label="[]", style=solid]; +"2990 4288" -> "2991 4297" [label="[]", style=solid]; +"2990 4288" -> "2999 4308" [label="[]", style=solid]; +"2991 4297" -> "2992 4298" [label="[]", style=solid]; +"2992 4298" -> "2996 4301" [label="[]", style=solid]; +"2993 4293" -> "2994 4295" [label="[]", style=solid]; +"2994 4295" -> "2995 4296" [label="[]", style=solid]; +"2994 4295" -> "3632 4306" [label="[]", style=solid]; +"2995 4296" -> "2996 4301" [label="[]", style=solid]; +"2996 4301" -> "2997 4303" [label="[-1, 3]", style=dashed]; +"2997 4303" -> "2998 4304" [label="[-1, 1]", style=dashed]; +"2998 4304" -> "2999 4308" [label="[-1]", style=dashed]; +"2998 4304" -> "3631 4305" [label="[-1]", style=dashed]; +"2999 4308" -> "3522 6520" [label="[]", style=solid]; +"3000 4234" -> "3001 4236" [label="[]", style=dashed]; +"3001 4236" -> "3002 4237" [label="[]", style=dashed]; +"3002 4237" -> "3003 4238" [label="[]", style=solid]; +"3003 4238" -> "3004 4239" [label="[-1, -1]", style=dashed]; +"3004 4239" -> "3005 4240" [label="[-1, -1]", style=dashed]; +"3005 4240" -> "3006 4243" [label="[-1]", style=dashed]; +"3005 4240" -> "3012 4251" [label="[-1]", style=dashed]; +"3006 4243" -> "3008 4244" [label="[-1]", style=dashed]; +"3007 4242" -> "3008 4244" [label="[]", style=solid]; +"3008 4244" -> "3009 4253" [label="[]", style=solid]; +"3008 4244" -> "3017 4264" [label="[]", style=solid]; +"3009 4253" -> "3010 4254" [label="[]", style=solid]; +"3010 4254" -> "3014 4257" [label="[]", style=solid]; +"3011 4249" -> "3012 4251" [label="[]", style=solid]; +"3012 4251" -> "3013 4252" [label="[]", style=solid]; +"3012 4251" -> "3634 4262" [label="[]", style=solid]; +"3013 4252" -> "3014 4257" [label="[]", style=solid]; +"3014 4257" -> "3015 4259" [label="[-1, 3]", style=dashed]; +"3015 4259" -> "3016 4260" [label="[-1, 1]", style=dashed]; +"3016 4260" -> "3017 4264" [label="[-1]", style=dashed]; +"3016 4260" -> "3633 4261" [label="[-1]", style=dashed]; +"3017 4264" -> "3522 6520" [label="[]", style=solid]; +"3018 4190" -> "3019 4192" [label="[]", style=dashed]; +"3019 4192" -> "3020 4193" [label="[]", style=dashed]; +"3020 4193" -> "3021 4194" [label="[]", style=solid]; +"3021 4194" -> "3022 4195" [label="[-1, -1]", style=dashed]; +"3022 4195" -> "3023 4196" [label="[-1, -1]", style=dashed]; +"3023 4196" -> "3024 4199" [label="[-1]", style=dashed]; +"3023 4196" -> "3030 4207" [label="[-1]", style=dashed]; +"3024 4199" -> "3026 4200" [label="[-1]", style=dashed]; +"3025 4198" -> "3026 4200" [label="[]", style=solid]; +"3026 4200" -> "3027 4209" [label="[]", style=solid]; +"3026 4200" -> "3035 4220" [label="[]", style=solid]; +"3027 4209" -> "3028 4210" [label="[]", style=solid]; +"3028 4210" -> "3032 4213" [label="[]", style=solid]; +"3029 4205" -> "3030 4207" [label="[]", style=solid]; +"3030 4207" -> "3031 4208" [label="[]", style=solid]; +"3030 4207" -> "3636 4218" [label="[]", style=solid]; +"3031 4208" -> "3032 4213" [label="[]", style=solid]; +"3032 4213" -> "3033 4215" [label="[-1, 3]", style=dashed]; +"3033 4215" -> "3034 4216" [label="[-1, 1]", style=dashed]; +"3034 4216" -> "3035 4220" [label="[-1]", style=dashed]; +"3034 4216" -> "3635 4217" [label="[-1]", style=dashed]; +"3035 4220" -> "3522 6520" [label="[]", style=solid]; +"3036 4146" -> "3037 4148" [label="[]", style=dashed]; +"3037 4148" -> "3038 4149" [label="[]", style=dashed]; +"3038 4149" -> "3039 4150" [label="[]", style=solid]; +"3039 4150" -> "3040 4151" [label="[-1, -1]", style=dashed]; +"3040 4151" -> "3041 4152" [label="[-1, -1]", style=dashed]; +"3041 4152" -> "3042 4155" [label="[-1]", style=dashed]; +"3041 4152" -> "3048 4163" [label="[-1]", style=dashed]; +"3042 4155" -> "3044 4156" [label="[-1]", style=dashed]; +"3043 4154" -> "3044 4156" [label="[]", style=solid]; +"3044 4156" -> "3045 4165" [label="[]", style=solid]; +"3044 4156" -> "3053 4176" [label="[]", style=solid]; +"3045 4165" -> "3046 4166" [label="[]", style=solid]; +"3046 4166" -> "3050 4169" [label="[]", style=solid]; +"3047 4161" -> "3048 4163" [label="[]", style=solid]; +"3048 4163" -> "3049 4164" [label="[]", style=solid]; +"3048 4163" -> "3638 4174" [label="[]", style=solid]; +"3049 4164" -> "3050 4169" [label="[]", style=solid]; +"3050 4169" -> "3051 4171" [label="[-1, 3]", style=dashed]; +"3051 4171" -> "3052 4172" [label="[-1, 1]", style=dashed]; +"3052 4172" -> "3053 4176" [label="[-1]", style=dashed]; +"3052 4172" -> "3637 4173" [label="[-1]", style=dashed]; +"3053 4176" -> "3522 6520" [label="[]", style=solid]; +"3054 4102" -> "3055 4104" [label="[]", style=dashed]; +"3055 4104" -> "3056 4105" [label="[]", style=dashed]; +"3056 4105" -> "3057 4106" [label="[]", style=solid]; +"3057 4106" -> "3058 4107" [label="[-1, -1]", style=dashed]; +"3058 4107" -> "3059 4108" [label="[-1, -1]", style=dashed]; +"3059 4108" -> "3060 4111" [label="[-1]", style=dashed]; +"3059 4108" -> "3066 4119" [label="[-1]", style=dashed]; +"3060 4111" -> "3062 4112" [label="[-1]", style=dashed]; +"3061 4110" -> "3062 4112" [label="[]", style=solid]; +"3062 4112" -> "3063 4121" [label="[]", style=solid]; +"3062 4112" -> "3071 4132" [label="[]", style=solid]; +"3063 4121" -> "3064 4122" [label="[]", style=solid]; +"3064 4122" -> "3068 4125" [label="[]", style=solid]; +"3065 4117" -> "3066 4119" [label="[]", style=solid]; +"3066 4119" -> "3067 4120" [label="[]", style=solid]; +"3066 4119" -> "3640 4130" [label="[]", style=solid]; +"3067 4120" -> "3068 4125" [label="[]", style=solid]; +"3068 4125" -> "3069 4127" [label="[-1, 3]", style=dashed]; +"3069 4127" -> "3070 4128" [label="[-1, 1]", style=dashed]; +"3070 4128" -> "3071 4132" [label="[-1]", style=dashed]; +"3070 4128" -> "3639 4129" [label="[-1]", style=dashed]; +"3071 4132" -> "3522 6520" [label="[]", style=solid]; +"3072 4058" -> "3073 4060" [label="[]", style=dashed]; +"3073 4060" -> "3074 4061" [label="[]", style=dashed]; +"3074 4061" -> "3075 4062" [label="[]", style=solid]; +"3075 4062" -> "3076 4063" [label="[-1, -1]", style=dashed]; +"3076 4063" -> "3077 4064" [label="[-1, -1]", style=dashed]; +"3077 4064" -> "3078 4067" [label="[-1]", style=dashed]; +"3077 4064" -> "3084 4075" [label="[-1]", style=dashed]; +"3078 4067" -> "3080 4068" [label="[-1]", style=dashed]; +"3079 4066" -> "3080 4068" [label="[]", style=solid]; +"3080 4068" -> "3081 4077" [label="[]", style=solid]; +"3080 4068" -> "3089 4088" [label="[]", style=solid]; +"3081 4077" -> "3082 4078" [label="[]", style=solid]; +"3082 4078" -> "3086 4081" [label="[]", style=solid]; +"3083 4073" -> "3084 4075" [label="[]", style=solid]; +"3084 4075" -> "3085 4076" [label="[]", style=solid]; +"3084 4075" -> "3642 4086" [label="[]", style=solid]; +"3085 4076" -> "3086 4081" [label="[]", style=solid]; +"3086 4081" -> "3087 4083" [label="[-1, 3]", style=dashed]; +"3087 4083" -> "3088 4084" [label="[-1, 1]", style=dashed]; +"3088 4084" -> "3089 4088" [label="[-1]", style=dashed]; +"3088 4084" -> "3641 4085" [label="[-1]", style=dashed]; +"3089 4088" -> "3522 6520" [label="[]", style=solid]; +"3090 4014" -> "3091 4016" [label="[]", style=dashed]; +"3091 4016" -> "3092 4017" [label="[]", style=dashed]; +"3092 4017" -> "3093 4018" [label="[]", style=solid]; +"3093 4018" -> "3094 4019" [label="[-1, -1]", style=dashed]; +"3094 4019" -> "3095 4020" [label="[-1, -1]", style=dashed]; +"3095 4020" -> "3096 4023" [label="[-1]", style=dashed]; +"3095 4020" -> "3102 4031" [label="[-1]", style=dashed]; +"3096 4023" -> "3098 4024" [label="[-1]", style=dashed]; +"3097 4022" -> "3098 4024" [label="[]", style=solid]; +"3098 4024" -> "3099 4033" [label="[]", style=solid]; +"3098 4024" -> "3107 4044" [label="[]", style=solid]; +"3099 4033" -> "3100 4034" [label="[]", style=solid]; +"3100 4034" -> "3104 4037" [label="[]", style=solid]; +"3101 4029" -> "3102 4031" [label="[]", style=solid]; +"3102 4031" -> "3103 4032" [label="[]", style=solid]; +"3102 4031" -> "3644 4042" [label="[]", style=solid]; +"3103 4032" -> "3104 4037" [label="[]", style=solid]; +"3104 4037" -> "3105 4039" [label="[-1, 3]", style=dashed]; +"3105 4039" -> "3106 4040" [label="[-1, 1]", style=dashed]; +"3106 4040" -> "3107 4044" [label="[-1]", style=dashed]; +"3106 4040" -> "3643 4041" [label="[-1]", style=dashed]; +"3107 4044" -> "3522 6520" [label="[]", style=solid]; +"3108 3970" -> "3109 3972" [label="[]", style=dashed]; +"3109 3972" -> "3110 3973" [label="[]", style=dashed]; +"3110 3973" -> "3111 3974" [label="[]", style=solid]; +"3111 3974" -> "3112 3975" [label="[-1, -1]", style=dashed]; +"3112 3975" -> "3113 3976" [label="[-1, -1]", style=dashed]; +"3113 3976" -> "3114 3979" [label="[-1]", style=dashed]; +"3113 3976" -> "3120 3987" [label="[-1]", style=dashed]; +"3114 3979" -> "3116 3980" [label="[-1]", style=dashed]; +"3115 3978" -> "3116 3980" [label="[]", style=solid]; +"3116 3980" -> "3117 3989" [label="[]", style=solid]; +"3116 3980" -> "3125 4000" [label="[]", style=solid]; +"3117 3989" -> "3118 3990" [label="[]", style=solid]; +"3118 3990" -> "3122 3993" [label="[]", style=solid]; +"3119 3985" -> "3120 3987" [label="[]", style=solid]; +"3120 3987" -> "3121 3988" [label="[]", style=solid]; +"3120 3987" -> "3646 3998" [label="[]", style=solid]; +"3121 3988" -> "3122 3993" [label="[]", style=solid]; +"3122 3993" -> "3123 3995" [label="[-1, 3]", style=dashed]; +"3123 3995" -> "3124 3996" [label="[-1, 1]", style=dashed]; +"3124 3996" -> "3125 4000" [label="[-1]", style=dashed]; +"3124 3996" -> "3645 3997" [label="[-1]", style=dashed]; +"3125 4000" -> "3522 6520" [label="[]", style=solid]; +"3126 3926" -> "3127 3928" [label="[]", style=dashed]; +"3127 3928" -> "3128 3929" [label="[]", style=dashed]; +"3128 3929" -> "3129 3930" [label="[]", style=solid]; +"3129 3930" -> "3130 3931" [label="[-1, -1]", style=dashed]; +"3130 3931" -> "3131 3932" [label="[-1, -1]", style=dashed]; +"3131 3932" -> "3132 3935" [label="[-1]", style=dashed]; +"3131 3932" -> "3138 3943" [label="[-1]", style=dashed]; +"3132 3935" -> "3134 3936" [label="[-1]", style=dashed]; +"3133 3934" -> "3134 3936" [label="[]", style=solid]; +"3134 3936" -> "3135 3945" [label="[]", style=solid]; +"3134 3936" -> "3143 3956" [label="[]", style=solid]; +"3135 3945" -> "3136 3946" [label="[]", style=solid]; +"3136 3946" -> "3140 3949" [label="[]", style=solid]; +"3137 3941" -> "3138 3943" [label="[]", style=solid]; +"3138 3943" -> "3139 3944" [label="[]", style=solid]; +"3138 3943" -> "3648 3954" [label="[]", style=solid]; +"3139 3944" -> "3140 3949" [label="[]", style=solid]; +"3140 3949" -> "3141 3951" [label="[-1, 3]", style=dashed]; +"3141 3951" -> "3142 3952" [label="[-1, 1]", style=dashed]; +"3142 3952" -> "3143 3956" [label="[-1]", style=dashed]; +"3142 3952" -> "3647 3953" [label="[-1]", style=dashed]; +"3143 3956" -> "3522 6520" [label="[]", style=solid]; +"3144 3882" -> "3145 3884" [label="[]", style=dashed]; +"3145 3884" -> "3146 3885" [label="[]", style=dashed]; +"3146 3885" -> "3147 3886" [label="[]", style=solid]; +"3147 3886" -> "3148 3887" [label="[-1, -1]", style=dashed]; +"3148 3887" -> "3149 3888" [label="[-1, -1]", style=dashed]; +"3149 3888" -> "3150 3891" [label="[-1]", style=dashed]; +"3149 3888" -> "3156 3899" [label="[-1]", style=dashed]; +"3150 3891" -> "3152 3892" [label="[-1]", style=dashed]; +"3151 3890" -> "3152 3892" [label="[]", style=solid]; +"3152 3892" -> "3153 3901" [label="[]", style=solid]; +"3152 3892" -> "3161 3912" [label="[]", style=solid]; +"3153 3901" -> "3154 3902" [label="[]", style=solid]; +"3154 3902" -> "3158 3905" [label="[]", style=solid]; +"3155 3897" -> "3156 3899" [label="[]", style=solid]; +"3156 3899" -> "3157 3900" [label="[]", style=solid]; +"3156 3899" -> "3650 3910" [label="[]", style=solid]; +"3157 3900" -> "3158 3905" [label="[]", style=solid]; +"3158 3905" -> "3159 3907" [label="[-1, 3]", style=dashed]; +"3159 3907" -> "3160 3908" [label="[-1, 1]", style=dashed]; +"3160 3908" -> "3161 3912" [label="[-1]", style=dashed]; +"3160 3908" -> "3649 3909" [label="[-1]", style=dashed]; +"3161 3912" -> "3522 6520" [label="[]", style=solid]; +"3162 3838" -> "3163 3840" [label="[]", style=dashed]; +"3163 3840" -> "3164 3841" [label="[]", style=dashed]; +"3164 3841" -> "3165 3842" [label="[]", style=solid]; +"3165 3842" -> "3166 3843" [label="[-1, -1]", style=dashed]; +"3166 3843" -> "3167 3844" [label="[-1, -1]", style=dashed]; +"3167 3844" -> "3168 3847" [label="[-1]", style=dashed]; +"3167 3844" -> "3174 3855" [label="[-1]", style=dashed]; +"3168 3847" -> "3170 3848" [label="[-1]", style=dashed]; +"3169 3846" -> "3170 3848" [label="[]", style=solid]; +"3170 3848" -> "3171 3857" [label="[]", style=solid]; +"3170 3848" -> "3179 3868" [label="[]", style=solid]; +"3171 3857" -> "3172 3858" [label="[]", style=solid]; +"3172 3858" -> "3176 3861" [label="[]", style=solid]; +"3173 3853" -> "3174 3855" [label="[]", style=solid]; +"3174 3855" -> "3175 3856" [label="[]", style=solid]; +"3174 3855" -> "3652 3866" [label="[]", style=solid]; +"3175 3856" -> "3176 3861" [label="[]", style=solid]; +"3176 3861" -> "3177 3863" [label="[-1, 3]", style=dashed]; +"3177 3863" -> "3178 3864" [label="[-1, 1]", style=dashed]; +"3178 3864" -> "3179 3868" [label="[-1]", style=dashed]; +"3178 3864" -> "3651 3865" [label="[-1]", style=dashed]; +"3179 3868" -> "3522 6520" [label="[]", style=solid]; +"3180 3794" -> "3181 3796" [label="[]", style=dashed]; +"3181 3796" -> "3182 3797" [label="[]", style=dashed]; +"3182 3797" -> "3183 3798" [label="[]", style=solid]; +"3183 3798" -> "3184 3799" [label="[-1, -1]", style=dashed]; +"3184 3799" -> "3185 3800" [label="[-1, -1]", style=dashed]; +"3185 3800" -> "3186 3803" [label="[-1]", style=dashed]; +"3185 3800" -> "3192 3811" [label="[-1]", style=dashed]; +"3186 3803" -> "3188 3804" [label="[-1]", style=dashed]; +"3187 3802" -> "3188 3804" [label="[]", style=solid]; +"3188 3804" -> "3189 3813" [label="[]", style=solid]; +"3188 3804" -> "3197 3824" [label="[]", style=solid]; +"3189 3813" -> "3190 3814" [label="[]", style=solid]; +"3190 3814" -> "3194 3817" [label="[]", style=solid]; +"3191 3809" -> "3192 3811" [label="[]", style=solid]; +"3192 3811" -> "3193 3812" [label="[]", style=solid]; +"3192 3811" -> "3654 3822" [label="[]", style=solid]; +"3193 3812" -> "3194 3817" [label="[]", style=solid]; +"3194 3817" -> "3195 3819" [label="[-1, 3]", style=dashed]; +"3195 3819" -> "3196 3820" [label="[-1, 1]", style=dashed]; +"3196 3820" -> "3197 3824" [label="[-1]", style=dashed]; +"3196 3820" -> "3653 3821" [label="[-1]", style=dashed]; +"3197 3824" -> "3522 6520" [label="[]", style=solid]; +"3198 3750" -> "3199 3752" [label="[]", style=dashed]; +"3199 3752" -> "3200 3753" [label="[]", style=dashed]; +"3200 3753" -> "3201 3754" [label="[]", style=solid]; +"3201 3754" -> "3202 3755" [label="[-1, -1]", style=dashed]; +"3202 3755" -> "3203 3756" [label="[-1, -1]", style=dashed]; +"3203 3756" -> "3204 3759" [label="[-1]", style=dashed]; +"3203 3756" -> "3210 3767" [label="[-1]", style=dashed]; +"3204 3759" -> "3206 3760" [label="[-1]", style=dashed]; +"3205 3758" -> "3206 3760" [label="[]", style=solid]; +"3206 3760" -> "3207 3769" [label="[]", style=solid]; +"3206 3760" -> "3215 3780" [label="[]", style=solid]; +"3207 3769" -> "3208 3770" [label="[]", style=solid]; +"3208 3770" -> "3212 3773" [label="[]", style=solid]; +"3209 3765" -> "3210 3767" [label="[]", style=solid]; +"3210 3767" -> "3211 3768" [label="[]", style=solid]; +"3210 3767" -> "3656 3778" [label="[]", style=solid]; +"3211 3768" -> "3212 3773" [label="[]", style=solid]; +"3212 3773" -> "3213 3775" [label="[-1, 3]", style=dashed]; +"3213 3775" -> "3214 3776" [label="[-1, 1]", style=dashed]; +"3214 3776" -> "3215 3780" [label="[-1]", style=dashed]; +"3214 3776" -> "3655 3777" [label="[-1]", style=dashed]; +"3215 3780" -> "3522 6520" [label="[]", style=solid]; +"3216 3706" -> "3217 3708" [label="[]", style=dashed]; +"3217 3708" -> "3218 3709" [label="[]", style=dashed]; +"3218 3709" -> "3219 3710" [label="[]", style=solid]; +"3219 3710" -> "3220 3711" [label="[-1, -1]", style=dashed]; +"3220 3711" -> "3221 3712" [label="[-1, -1]", style=dashed]; +"3221 3712" -> "3222 3715" [label="[-1]", style=dashed]; +"3221 3712" -> "3228 3723" [label="[-1]", style=dashed]; +"3222 3715" -> "3224 3716" [label="[-1]", style=dashed]; +"3223 3714" -> "3224 3716" [label="[]", style=solid]; +"3224 3716" -> "3225 3725" [label="[]", style=solid]; +"3224 3716" -> "3233 3736" [label="[]", style=solid]; +"3225 3725" -> "3226 3726" [label="[]", style=solid]; +"3226 3726" -> "3230 3729" [label="[]", style=solid]; +"3227 3721" -> "3228 3723" [label="[]", style=solid]; +"3228 3723" -> "3229 3724" [label="[]", style=solid]; +"3228 3723" -> "3658 3734" [label="[]", style=solid]; +"3229 3724" -> "3230 3729" [label="[]", style=solid]; +"3230 3729" -> "3231 3731" [label="[-1, 3]", style=dashed]; +"3231 3731" -> "3232 3732" [label="[-1, 1]", style=dashed]; +"3232 3732" -> "3233 3736" [label="[-1]", style=dashed]; +"3232 3732" -> "3657 3733" [label="[-1]", style=dashed]; +"3233 3736" -> "3522 6520" [label="[]", style=solid]; +"3234 3662" -> "3235 3664" [label="[]", style=dashed]; +"3235 3664" -> "3236 3665" [label="[]", style=dashed]; +"3236 3665" -> "3237 3666" [label="[]", style=solid]; +"3237 3666" -> "3238 3667" [label="[-1, -1]", style=dashed]; +"3238 3667" -> "3239 3668" [label="[-1, -1]", style=dashed]; +"3239 3668" -> "3240 3671" [label="[-1]", style=dashed]; +"3239 3668" -> "3246 3679" [label="[-1]", style=dashed]; +"3240 3671" -> "3242 3672" [label="[-1]", style=dashed]; +"3241 3670" -> "3242 3672" [label="[]", style=solid]; +"3242 3672" -> "3243 3681" [label="[]", style=solid]; +"3242 3672" -> "3251 3692" [label="[]", style=solid]; +"3243 3681" -> "3244 3682" [label="[]", style=solid]; +"3244 3682" -> "3248 3685" [label="[]", style=solid]; +"3245 3677" -> "3246 3679" [label="[]", style=solid]; +"3246 3679" -> "3247 3680" [label="[]", style=solid]; +"3246 3679" -> "3660 3690" [label="[]", style=solid]; +"3247 3680" -> "3248 3685" [label="[]", style=solid]; +"3248 3685" -> "3249 3687" [label="[-1, 3]", style=dashed]; +"3249 3687" -> "3250 3688" [label="[-1, 1]", style=dashed]; +"3250 3688" -> "3251 3692" [label="[-1]", style=dashed]; +"3250 3688" -> "3659 3689" [label="[-1]", style=dashed]; +"3251 3692" -> "3522 6520" [label="[]", style=solid]; +"3252 3618" -> "3253 3620" [label="[]", style=dashed]; +"3253 3620" -> "3254 3621" [label="[]", style=dashed]; +"3254 3621" -> "3255 3622" [label="[]", style=solid]; +"3255 3622" -> "3256 3623" [label="[-1, -1]", style=dashed]; +"3256 3623" -> "3257 3624" [label="[-1, -1]", style=dashed]; +"3257 3624" -> "3258 3627" [label="[-1]", style=dashed]; +"3257 3624" -> "3264 3635" [label="[-1]", style=dashed]; +"3258 3627" -> "3260 3628" [label="[-1]", style=dashed]; +"3259 3626" -> "3260 3628" [label="[]", style=solid]; +"3260 3628" -> "3261 3637" [label="[]", style=solid]; +"3260 3628" -> "3269 3648" [label="[]", style=solid]; +"3261 3637" -> "3262 3638" [label="[]", style=solid]; +"3262 3638" -> "3266 3641" [label="[]", style=solid]; +"3263 3633" -> "3264 3635" [label="[]", style=solid]; +"3264 3635" -> "3265 3636" [label="[]", style=solid]; +"3264 3635" -> "3662 3646" [label="[]", style=solid]; +"3265 3636" -> "3266 3641" [label="[]", style=solid]; +"3266 3641" -> "3267 3643" [label="[-1, 3]", style=dashed]; +"3267 3643" -> "3268 3644" [label="[-1, 1]", style=dashed]; +"3268 3644" -> "3269 3648" [label="[-1]", style=dashed]; +"3268 3644" -> "3661 3645" [label="[-1]", style=dashed]; +"3269 3648" -> "3522 6520" [label="[]", style=solid]; +"3270 3574" -> "3271 3576" [label="[]", style=dashed]; +"3271 3576" -> "3272 3577" [label="[]", style=dashed]; +"3272 3577" -> "3273 3578" [label="[]", style=solid]; +"3273 3578" -> "3274 3579" [label="[-1, -1]", style=dashed]; +"3274 3579" -> "3275 3580" [label="[-1, -1]", style=dashed]; +"3275 3580" -> "3276 3583" [label="[-1]", style=dashed]; +"3275 3580" -> "3282 3591" [label="[-1]", style=dashed]; +"3276 3583" -> "3278 3584" [label="[-1]", style=dashed]; +"3277 3582" -> "3278 3584" [label="[]", style=solid]; +"3278 3584" -> "3279 3593" [label="[]", style=solid]; +"3278 3584" -> "3287 3604" [label="[]", style=solid]; +"3279 3593" -> "3280 3594" [label="[]", style=solid]; +"3280 3594" -> "3284 3597" [label="[]", style=solid]; +"3281 3589" -> "3282 3591" [label="[]", style=solid]; +"3282 3591" -> "3283 3592" [label="[]", style=solid]; +"3282 3591" -> "3664 3602" [label="[]", style=solid]; +"3283 3592" -> "3284 3597" [label="[]", style=solid]; +"3284 3597" -> "3285 3599" [label="[-1, 3]", style=dashed]; +"3285 3599" -> "3286 3600" [label="[-1, 1]", style=dashed]; +"3286 3600" -> "3287 3604" [label="[-1]", style=dashed]; +"3286 3600" -> "3663 3601" [label="[-1]", style=dashed]; +"3287 3604" -> "3522 6520" [label="[]", style=solid]; +"3288 3530" -> "3289 3532" [label="[]", style=dashed]; +"3289 3532" -> "3290 3533" [label="[]", style=dashed]; +"3290 3533" -> "3291 3534" [label="[]", style=solid]; +"3291 3534" -> "3292 3535" [label="[-1, -1]", style=dashed]; +"3292 3535" -> "3293 3536" [label="[-1, -1]", style=dashed]; +"3293 3536" -> "3294 3539" [label="[-1]", style=dashed]; +"3293 3536" -> "3300 3547" [label="[-1]", style=dashed]; +"3294 3539" -> "3296 3540" [label="[-1]", style=dashed]; +"3295 3538" -> "3296 3540" [label="[]", style=solid]; +"3296 3540" -> "3297 3549" [label="[]", style=solid]; +"3296 3540" -> "3305 3560" [label="[]", style=solid]; +"3297 3549" -> "3298 3550" [label="[]", style=solid]; +"3298 3550" -> "3302 3553" [label="[]", style=solid]; +"3299 3545" -> "3300 3547" [label="[]", style=solid]; +"3300 3547" -> "3301 3548" [label="[]", style=solid]; +"3300 3547" -> "3666 3558" [label="[]", style=solid]; +"3301 3548" -> "3302 3553" [label="[]", style=solid]; +"3302 3553" -> "3303 3555" [label="[-1, 3]", style=dashed]; +"3303 3555" -> "3304 3556" [label="[-1, 1]", style=dashed]; +"3304 3556" -> "3305 3560" [label="[-1]", style=dashed]; +"3304 3556" -> "3665 3557" [label="[-1]", style=dashed]; +"3305 3560" -> "3522 6520" [label="[]", style=solid]; +"3306 3486" -> "3307 3488" [label="[]", style=dashed]; +"3307 3488" -> "3308 3489" [label="[]", style=dashed]; +"3308 3489" -> "3309 3490" [label="[]", style=solid]; +"3309 3490" -> "3310 3491" [label="[-1, -1]", style=dashed]; +"3310 3491" -> "3311 3492" [label="[-1, -1]", style=dashed]; +"3311 3492" -> "3312 3495" [label="[-1]", style=dashed]; +"3311 3492" -> "3318 3503" [label="[-1]", style=dashed]; +"3312 3495" -> "3314 3496" [label="[-1]", style=dashed]; +"3313 3494" -> "3314 3496" [label="[]", style=solid]; +"3314 3496" -> "3315 3505" [label="[]", style=solid]; +"3314 3496" -> "3323 3516" [label="[]", style=solid]; +"3315 3505" -> "3316 3506" [label="[]", style=solid]; +"3316 3506" -> "3320 3509" [label="[]", style=solid]; +"3317 3501" -> "3318 3503" [label="[]", style=solid]; +"3318 3503" -> "3319 3504" [label="[]", style=solid]; +"3318 3503" -> "3668 3514" [label="[]", style=solid]; +"3319 3504" -> "3320 3509" [label="[]", style=solid]; +"3320 3509" -> "3321 3511" [label="[-1, 3]", style=dashed]; +"3321 3511" -> "3322 3512" [label="[-1, 1]", style=dashed]; +"3322 3512" -> "3323 3516" [label="[-1]", style=dashed]; +"3322 3512" -> "3667 3513" [label="[-1]", style=dashed]; +"3323 3516" -> "3522 6520" [label="[]", style=solid]; +"3324 3442" -> "3325 3444" [label="[]", style=dashed]; +"3325 3444" -> "3326 3445" [label="[]", style=dashed]; +"3326 3445" -> "3327 3446" [label="[]", style=solid]; +"3327 3446" -> "3328 3447" [label="[-1, -1]", style=dashed]; +"3328 3447" -> "3329 3448" [label="[-1, -1]", style=dashed]; +"3329 3448" -> "3330 3451" [label="[-1]", style=dashed]; +"3329 3448" -> "3336 3459" [label="[-1]", style=dashed]; +"3330 3451" -> "3332 3452" [label="[-1]", style=dashed]; +"3331 3450" -> "3332 3452" [label="[]", style=solid]; +"3332 3452" -> "3333 3461" [label="[]", style=solid]; +"3332 3452" -> "3341 3472" [label="[]", style=solid]; +"3333 3461" -> "3334 3462" [label="[]", style=solid]; +"3334 3462" -> "3338 3465" [label="[]", style=solid]; +"3335 3457" -> "3336 3459" [label="[]", style=solid]; +"3336 3459" -> "3337 3460" [label="[]", style=solid]; +"3336 3459" -> "3670 3470" [label="[]", style=solid]; +"3337 3460" -> "3338 3465" [label="[]", style=solid]; +"3338 3465" -> "3339 3467" [label="[-1, 3]", style=dashed]; +"3339 3467" -> "3340 3468" [label="[-1, 1]", style=dashed]; +"3340 3468" -> "3341 3472" [label="[-1]", style=dashed]; +"3340 3468" -> "3669 3469" [label="[-1]", style=dashed]; +"3341 3472" -> "3522 6520" [label="[]", style=solid]; +"3342 3398" -> "3343 3400" [label="[]", style=dashed]; +"3343 3400" -> "3344 3401" [label="[]", style=dashed]; +"3344 3401" -> "3345 3402" [label="[]", style=solid]; +"3345 3402" -> "3346 3403" [label="[-1, -1]", style=dashed]; +"3346 3403" -> "3347 3404" [label="[-1, -1]", style=dashed]; +"3347 3404" -> "3348 3407" [label="[-1]", style=dashed]; +"3347 3404" -> "3354 3415" [label="[-1]", style=dashed]; +"3348 3407" -> "3350 3408" [label="[-1]", style=dashed]; +"3349 3406" -> "3350 3408" [label="[]", style=solid]; +"3350 3408" -> "3351 3417" [label="[]", style=solid]; +"3350 3408" -> "3359 3428" [label="[]", style=solid]; +"3351 3417" -> "3352 3418" [label="[]", style=solid]; +"3352 3418" -> "3356 3421" [label="[]", style=solid]; +"3353 3413" -> "3354 3415" [label="[]", style=solid]; +"3354 3415" -> "3355 3416" [label="[]", style=solid]; +"3354 3415" -> "3672 3426" [label="[]", style=solid]; +"3355 3416" -> "3356 3421" [label="[]", style=solid]; +"3356 3421" -> "3357 3423" [label="[-1, 3]", style=dashed]; +"3357 3423" -> "3358 3424" [label="[-1, 1]", style=dashed]; +"3358 3424" -> "3359 3428" [label="[-1]", style=dashed]; +"3358 3424" -> "3671 3425" [label="[-1]", style=dashed]; +"3359 3428" -> "3522 6520" [label="[]", style=solid]; +"3360 3354" -> "3361 3356" [label="[]", style=dashed]; +"3361 3356" -> "3362 3357" [label="[]", style=dashed]; +"3362 3357" -> "3363 3358" [label="[]", style=solid]; +"3363 3358" -> "3364 3359" [label="[-1, -1]", style=dashed]; +"3364 3359" -> "3365 3360" [label="[-1, -1]", style=dashed]; +"3365 3360" -> "3366 3363" [label="[-1]", style=dashed]; +"3365 3360" -> "3372 3371" [label="[-1]", style=dashed]; +"3366 3363" -> "3368 3364" [label="[-1]", style=dashed]; +"3367 3362" -> "3368 3364" [label="[]", style=solid]; +"3368 3364" -> "3369 3373" [label="[]", style=solid]; +"3368 3364" -> "3377 3384" [label="[]", style=solid]; +"3369 3373" -> "3370 3374" [label="[]", style=solid]; +"3370 3374" -> "3374 3377" [label="[]", style=solid]; +"3371 3369" -> "3372 3371" [label="[]", style=solid]; +"3372 3371" -> "3373 3372" [label="[]", style=solid]; +"3372 3371" -> "3674 3382" [label="[]", style=solid]; +"3373 3372" -> "3374 3377" [label="[]", style=solid]; +"3374 3377" -> "3375 3379" [label="[-1, 3]", style=dashed]; +"3375 3379" -> "3376 3380" [label="[-1, 1]", style=dashed]; +"3376 3380" -> "3377 3384" [label="[-1]", style=dashed]; +"3376 3380" -> "3673 3381" [label="[-1]", style=dashed]; +"3377 3384" -> "3522 6520" [label="[]", style=solid]; +"3378 3310" -> "3379 3312" [label="[]", style=dashed]; +"3379 3312" -> "3380 3313" [label="[]", style=dashed]; +"3380 3313" -> "3381 3314" [label="[]", style=solid]; +"3381 3314" -> "3382 3315" [label="[-1, -1]", style=dashed]; +"3382 3315" -> "3383 3316" [label="[-1, -1]", style=dashed]; +"3383 3316" -> "3384 3319" [label="[-1]", style=dashed]; +"3383 3316" -> "3390 3327" [label="[-1]", style=dashed]; +"3384 3319" -> "3386 3320" [label="[-1]", style=dashed]; +"3385 3318" -> "3386 3320" [label="[]", style=solid]; +"3386 3320" -> "3387 3329" [label="[]", style=solid]; +"3386 3320" -> "3395 3340" [label="[]", style=solid]; +"3387 3329" -> "3388 3330" [label="[]", style=solid]; +"3388 3330" -> "3392 3333" [label="[]", style=solid]; +"3389 3325" -> "3390 3327" [label="[]", style=solid]; +"3390 3327" -> "3391 3328" [label="[]", style=solid]; +"3390 3327" -> "3676 3338" [label="[]", style=solid]; +"3391 3328" -> "3392 3333" [label="[]", style=solid]; +"3392 3333" -> "3393 3335" [label="[-1, 3]", style=dashed]; +"3393 3335" -> "3394 3336" [label="[-1, 1]", style=dashed]; +"3394 3336" -> "3395 3340" [label="[-1]", style=dashed]; +"3394 3336" -> "3675 3337" [label="[-1]", style=dashed]; +"3395 3340" -> "3522 6520" [label="[]", style=solid]; +"3396 3266" -> "3397 3268" [label="[]", style=dashed]; +"3397 3268" -> "3398 3269" [label="[]", style=dashed]; +"3398 3269" -> "3399 3270" [label="[]", style=solid]; +"3399 3270" -> "3400 3271" [label="[-1, -1]", style=dashed]; +"3400 3271" -> "3401 3272" [label="[-1, -1]", style=dashed]; +"3401 3272" -> "3402 3275" [label="[-1]", style=dashed]; +"3401 3272" -> "3408 3283" [label="[-1]", style=dashed]; +"3402 3275" -> "3404 3276" [label="[-1]", style=dashed]; +"3403 3274" -> "3404 3276" [label="[]", style=solid]; +"3404 3276" -> "3405 3285" [label="[]", style=solid]; +"3404 3276" -> "3413 3296" [label="[]", style=solid]; +"3405 3285" -> "3406 3286" [label="[]", style=solid]; +"3406 3286" -> "3410 3289" [label="[]", style=solid]; +"3407 3281" -> "3408 3283" [label="[]", style=solid]; +"3408 3283" -> "3409 3284" [label="[]", style=solid]; +"3408 3283" -> "3678 3294" [label="[]", style=solid]; +"3409 3284" -> "3410 3289" [label="[]", style=solid]; +"3410 3289" -> "3411 3291" [label="[-1, 3]", style=dashed]; +"3411 3291" -> "3412 3292" [label="[-1, 1]", style=dashed]; +"3412 3292" -> "3413 3296" [label="[-1]", style=dashed]; +"3412 3292" -> "3677 3293" [label="[-1]", style=dashed]; +"3413 3296" -> "3522 6520" [label="[]", style=solid]; +"3414 3222" -> "3415 3224" [label="[]", style=dashed]; +"3415 3224" -> "3416 3225" [label="[]", style=dashed]; +"3416 3225" -> "3417 3226" [label="[]", style=solid]; +"3417 3226" -> "3418 3227" [label="[-1, -1]", style=dashed]; +"3418 3227" -> "3419 3228" [label="[-1, -1]", style=dashed]; +"3419 3228" -> "3420 3231" [label="[-1]", style=dashed]; +"3419 3228" -> "3426 3239" [label="[-1]", style=dashed]; +"3420 3231" -> "3422 3232" [label="[-1]", style=dashed]; +"3421 3230" -> "3422 3232" [label="[]", style=solid]; +"3422 3232" -> "3423 3241" [label="[]", style=solid]; +"3422 3232" -> "3431 3252" [label="[]", style=solid]; +"3423 3241" -> "3424 3242" [label="[]", style=solid]; +"3424 3242" -> "3428 3245" [label="[]", style=solid]; +"3425 3237" -> "3426 3239" [label="[]", style=solid]; +"3426 3239" -> "3427 3240" [label="[]", style=solid]; +"3426 3239" -> "3680 3250" [label="[]", style=solid]; +"3427 3240" -> "3428 3245" [label="[]", style=solid]; +"3428 3245" -> "3429 3247" [label="[-1, 3]", style=dashed]; +"3429 3247" -> "3430 3248" [label="[-1, 1]", style=dashed]; +"3430 3248" -> "3431 3252" [label="[-1]", style=dashed]; +"3430 3248" -> "3679 3249" [label="[-1]", style=dashed]; +"3431 3252" -> "3522 6520" [label="[]", style=solid]; +"3432 3178" -> "3433 3180" [label="[]", style=dashed]; +"3433 3180" -> "3434 3181" [label="[]", style=dashed]; +"3434 3181" -> "3435 3182" [label="[]", style=solid]; +"3435 3182" -> "3436 3183" [label="[-1, -1]", style=dashed]; +"3436 3183" -> "3437 3184" [label="[-1, -1]", style=dashed]; +"3437 3184" -> "3438 3187" [label="[-1]", style=dashed]; +"3437 3184" -> "3444 3195" [label="[-1]", style=dashed]; +"3438 3187" -> "3440 3188" [label="[-1]", style=dashed]; +"3439 3186" -> "3440 3188" [label="[]", style=solid]; +"3440 3188" -> "3441 3197" [label="[]", style=solid]; +"3440 3188" -> "3449 3208" [label="[]", style=solid]; +"3441 3197" -> "3442 3198" [label="[]", style=solid]; +"3442 3198" -> "3446 3201" [label="[]", style=solid]; +"3443 3193" -> "3444 3195" [label="[]", style=solid]; +"3444 3195" -> "3445 3196" [label="[]", style=solid]; +"3444 3195" -> "3682 3206" [label="[]", style=solid]; +"3445 3196" -> "3446 3201" [label="[]", style=solid]; +"3446 3201" -> "3447 3203" [label="[-1, 3]", style=dashed]; +"3447 3203" -> "3448 3204" [label="[-1, 1]", style=dashed]; +"3448 3204" -> "3449 3208" [label="[-1]", style=dashed]; +"3448 3204" -> "3681 3205" [label="[-1]", style=dashed]; +"3449 3208" -> "3522 6520" [label="[]", style=solid]; +"3450 3134" -> "3451 3136" [label="[]", style=dashed]; +"3451 3136" -> "3452 3137" [label="[]", style=dashed]; +"3452 3137" -> "3453 3138" [label="[]", style=solid]; +"3453 3138" -> "3454 3139" [label="[-1, -1]", style=dashed]; +"3454 3139" -> "3455 3140" [label="[-1, -1]", style=dashed]; +"3455 3140" -> "3456 3143" [label="[-1]", style=dashed]; +"3455 3140" -> "3462 3151" [label="[-1]", style=dashed]; +"3456 3143" -> "3458 3144" [label="[-1]", style=dashed]; +"3457 3142" -> "3458 3144" [label="[]", style=solid]; +"3458 3144" -> "3459 3153" [label="[]", style=solid]; +"3458 3144" -> "3467 3164" [label="[]", style=solid]; +"3459 3153" -> "3460 3154" [label="[]", style=solid]; +"3460 3154" -> "3464 3157" [label="[]", style=solid]; +"3461 3149" -> "3462 3151" [label="[]", style=solid]; +"3462 3151" -> "3463 3152" [label="[]", style=solid]; +"3462 3151" -> "3684 3162" [label="[]", style=solid]; +"3463 3152" -> "3464 3157" [label="[]", style=solid]; +"3464 3157" -> "3465 3159" [label="[-1, 3]", style=dashed]; +"3465 3159" -> "3466 3160" [label="[-1, 1]", style=dashed]; +"3466 3160" -> "3467 3164" [label="[-1]", style=dashed]; +"3466 3160" -> "3683 3161" [label="[-1]", style=dashed]; +"3467 3164" -> "3522 6520" [label="[]", style=solid]; +"3468 3090" -> "3469 3092" [label="[]", style=dashed]; +"3469 3092" -> "3470 3093" [label="[]", style=dashed]; +"3470 3093" -> "3471 3094" [label="[]", style=solid]; +"3471 3094" -> "3472 3095" [label="[-1, -1]", style=dashed]; +"3472 3095" -> "3473 3096" [label="[-1, -1]", style=dashed]; +"3473 3096" -> "3474 3099" [label="[-1]", style=dashed]; +"3473 3096" -> "3480 3107" [label="[-1]", style=dashed]; +"3474 3099" -> "3476 3100" [label="[-1]", style=dashed]; +"3475 3098" -> "3476 3100" [label="[]", style=solid]; +"3476 3100" -> "3477 3109" [label="[]", style=solid]; +"3476 3100" -> "3485 3120" [label="[]", style=solid]; +"3477 3109" -> "3478 3110" [label="[]", style=solid]; +"3478 3110" -> "3482 3113" [label="[]", style=solid]; +"3479 3105" -> "3480 3107" [label="[]", style=solid]; +"3480 3107" -> "3481 3108" [label="[]", style=solid]; +"3480 3107" -> "3686 3118" [label="[]", style=solid]; +"3481 3108" -> "3482 3113" [label="[]", style=solid]; +"3482 3113" -> "3483 3115" [label="[-1, 3]", style=dashed]; +"3483 3115" -> "3484 3116" [label="[-1, 1]", style=dashed]; +"3484 3116" -> "3485 3120" [label="[-1]", style=dashed]; +"3484 3116" -> "3685 3117" [label="[-1]", style=dashed]; +"3485 3120" -> "3522 6520" [label="[]", style=solid]; +"3486 3046" -> "3487 3048" [label="[]", style=dashed]; +"3487 3048" -> "3488 3049" [label="[]", style=dashed]; +"3488 3049" -> "3489 3050" [label="[]", style=solid]; +"3489 3050" -> "3490 3051" [label="[-1, -1]", style=dashed]; +"3490 3051" -> "3491 3052" [label="[-1, -1]", style=dashed]; +"3491 3052" -> "3492 3055" [label="[-1]", style=dashed]; +"3491 3052" -> "3498 3063" [label="[-1]", style=dashed]; +"3492 3055" -> "3494 3056" [label="[-1]", style=dashed]; +"3493 3054" -> "3494 3056" [label="[]", style=solid]; +"3494 3056" -> "3495 3065" [label="[]", style=solid]; +"3494 3056" -> "3503 3076" [label="[]", style=solid]; +"3495 3065" -> "3496 3066" [label="[]", style=solid]; +"3496 3066" -> "3500 3069" [label="[]", style=solid]; +"3497 3061" -> "3498 3063" [label="[]", style=solid]; +"3498 3063" -> "3499 3064" [label="[]", style=solid]; +"3498 3063" -> "3688 3074" [label="[]", style=solid]; +"3499 3064" -> "3500 3069" [label="[]", style=solid]; +"3500 3069" -> "3501 3071" [label="[-1, 3]", style=dashed]; +"3501 3071" -> "3502 3072" [label="[-1, 1]", style=dashed]; +"3502 3072" -> "3503 3076" [label="[-1]", style=dashed]; +"3502 3072" -> "3687 3073" [label="[-1]", style=dashed]; +"3503 3076" -> "3522 6520" [label="[]", style=solid]; +"3504 3002" -> "3505 3004" [label="[]", style=dashed]; +"3505 3004" -> "3506 3005" [label="[]", style=dashed]; +"3506 3005" -> "3507 3006" [label="[]", style=solid]; +"3507 3006" -> "3508 3007" [label="[-1, -1]", style=dashed]; +"3508 3007" -> "3509 3008" [label="[-1, -1]", style=dashed]; +"3509 3008" -> "3510 3011" [label="[-1]", style=dashed]; +"3509 3008" -> "3516 3019" [label="[-1]", style=dashed]; +"3510 3011" -> "3512 3012" [label="[-1]", style=dashed]; +"3511 3010" -> "3512 3012" [label="[]", style=solid]; +"3512 3012" -> "3513 3021" [label="[]", style=solid]; +"3512 3012" -> "3521 3032" [label="[]", style=solid]; +"3513 3021" -> "3514 3022" [label="[]", style=solid]; +"3514 3022" -> "3518 3025" [label="[]", style=solid]; +"3515 3017" -> "3516 3019" [label="[]", style=solid]; +"3516 3019" -> "3517 3020" [label="[]", style=solid]; +"3516 3019" -> "3690 3030" [label="[]", style=solid]; +"3517 3020" -> "3518 3025" [label="[]", style=solid]; +"3518 3025" -> "3519 3027" [label="[-1, 3]", style=dashed]; +"3519 3027" -> "3520 3028" [label="[-1, 1]", style=dashed]; +"3520 3028" -> "3521 3032" [label="[-1]", style=dashed]; +"3520 3028" -> "3689 3029" [label="[-1]", style=dashed]; +"3521 3032" -> "3522 6520" [label="[]", style=solid]; +"3522 6520" -> "3523 6521" [label="[]", style=solid]; +"3522 6520" -> "3529 6528" [label="[]", style=solid]; +"3522 6520" -> "4279 6534" [label="[]", style=solid]; +"3523 6521" -> "3524 6523" [label="[-1]", style=dashed]; +"3524 6523" -> "3525 6524" [label="[-1]", style=dashed]; +"3525 6524" -> "3526 6525" [label="[-1]", style=dashed]; +"3526 6525" -> "3527 6526" [label="[]", style=dashed]; +"3527 6526" -> "3528 6527" [label="[]", style=dashed]; +"3528 6527" -> "3529 6528" [label="[1]", style=dashed]; +"3529 6528" -> "3530 6529" [label="[]", style=dashed]; +"3529 6528" -> "4264 6532" [label="[]", style=dashed]; +"3529 6528" -> "4278 6533" [label="[]", style=dashed]; +"3530 6529" -> "3692 6530" [label="[]", style=dashed]; +"3531 6505" -> "3532 6506" [label="[-1]", style=dashed]; +"3532 6506" -> "3691 6518" [label="[]", style=solid]; +"3532 6506" -> "3943 6513" [label="[]", style=solid]; +"3533 6461" -> "3534 6462" [label="[-1]", style=dashed]; +"3534 6462" -> "3691 6518" [label="[]", style=solid]; +"3534 6462" -> "3947 6469" [label="[]", style=solid]; +"3535 6417" -> "3536 6418" [label="[-1]", style=dashed]; +"3536 6418" -> "3691 6518" [label="[]", style=solid]; +"3536 6418" -> "3951 6425" [label="[]", style=solid]; +"3537 6373" -> "3538 6374" [label="[-1]", style=dashed]; +"3538 6374" -> "3691 6518" [label="[]", style=solid]; +"3538 6374" -> "3955 6381" [label="[]", style=solid]; +"3539 6329" -> "3540 6330" [label="[-1]", style=dashed]; +"3540 6330" -> "3691 6518" [label="[]", style=solid]; +"3540 6330" -> "3959 6337" [label="[]", style=solid]; +"3541 6285" -> "3542 6286" [label="[-1]", style=dashed]; +"3542 6286" -> "3691 6518" [label="[]", style=solid]; +"3542 6286" -> "3963 6293" [label="[]", style=solid]; +"3543 6241" -> "3544 6242" [label="[-1]", style=dashed]; +"3544 6242" -> "3691 6518" [label="[]", style=solid]; +"3544 6242" -> "3967 6249" [label="[]", style=solid]; +"3545 6197" -> "3546 6198" [label="[-1]", style=dashed]; +"3546 6198" -> "3691 6518" [label="[]", style=solid]; +"3546 6198" -> "3971 6205" [label="[]", style=solid]; +"3547 6153" -> "3548 6154" [label="[-1]", style=dashed]; +"3548 6154" -> "3691 6518" [label="[]", style=solid]; +"3548 6154" -> "3975 6161" [label="[]", style=solid]; +"3549 6109" -> "3550 6110" [label="[-1]", style=dashed]; +"3550 6110" -> "3691 6518" [label="[]", style=solid]; +"3550 6110" -> "3979 6117" [label="[]", style=solid]; +"3551 6065" -> "3552 6066" [label="[-1]", style=dashed]; +"3552 6066" -> "3691 6518" [label="[]", style=solid]; +"3552 6066" -> "3983 6073" [label="[]", style=solid]; +"3553 6021" -> "3554 6022" [label="[-1]", style=dashed]; +"3554 6022" -> "3691 6518" [label="[]", style=solid]; +"3554 6022" -> "3987 6029" [label="[]", style=solid]; +"3555 5977" -> "3556 5978" [label="[-1]", style=dashed]; +"3556 5978" -> "3691 6518" [label="[]", style=solid]; +"3556 5978" -> "3991 5985" [label="[]", style=solid]; +"3557 5933" -> "3558 5934" [label="[-1]", style=dashed]; +"3558 5934" -> "3691 6518" [label="[]", style=solid]; +"3558 5934" -> "3995 5941" [label="[]", style=solid]; +"3559 5889" -> "3560 5890" [label="[-1]", style=dashed]; +"3560 5890" -> "3691 6518" [label="[]", style=solid]; +"3560 5890" -> "3999 5897" [label="[]", style=solid]; +"3561 5845" -> "3562 5846" [label="[-1]", style=dashed]; +"3562 5846" -> "3691 6518" [label="[]", style=solid]; +"3562 5846" -> "4003 5853" [label="[]", style=solid]; +"3563 5801" -> "3564 5802" [label="[-1]", style=dashed]; +"3564 5802" -> "3691 6518" [label="[]", style=solid]; +"3564 5802" -> "4007 5809" [label="[]", style=solid]; +"3565 5757" -> "3566 5758" [label="[-1]", style=dashed]; +"3566 5758" -> "3691 6518" [label="[]", style=solid]; +"3566 5758" -> "4011 5765" [label="[]", style=solid]; +"3567 5713" -> "3568 5714" [label="[-1]", style=dashed]; +"3568 5714" -> "3691 6518" [label="[]", style=solid]; +"3568 5714" -> "4015 5721" [label="[]", style=solid]; +"3569 5669" -> "3570 5670" [label="[-1]", style=dashed]; +"3570 5670" -> "3691 6518" [label="[]", style=solid]; +"3570 5670" -> "4019 5677" [label="[]", style=solid]; +"3571 5625" -> "3572 5626" [label="[-1]", style=dashed]; +"3572 5626" -> "3691 6518" [label="[]", style=solid]; +"3572 5626" -> "4023 5633" [label="[]", style=solid]; +"3573 5581" -> "3574 5582" [label="[-1]", style=dashed]; +"3574 5582" -> "3691 6518" [label="[]", style=solid]; +"3574 5582" -> "4027 5589" [label="[]", style=solid]; +"3575 5537" -> "3576 5538" [label="[-1]", style=dashed]; +"3576 5538" -> "3691 6518" [label="[]", style=solid]; +"3576 5538" -> "4031 5545" [label="[]", style=solid]; +"3577 5493" -> "3578 5494" [label="[-1]", style=dashed]; +"3578 5494" -> "3691 6518" [label="[]", style=solid]; +"3578 5494" -> "4035 5501" [label="[]", style=solid]; +"3579 5449" -> "3580 5450" [label="[-1]", style=dashed]; +"3580 5450" -> "3691 6518" [label="[]", style=solid]; +"3580 5450" -> "4039 5457" [label="[]", style=solid]; +"3581 5405" -> "3582 5406" [label="[-1]", style=dashed]; +"3582 5406" -> "3691 6518" [label="[]", style=solid]; +"3582 5406" -> "4043 5413" [label="[]", style=solid]; +"3583 5361" -> "3584 5362" [label="[-1]", style=dashed]; +"3584 5362" -> "3691 6518" [label="[]", style=solid]; +"3584 5362" -> "4047 5369" [label="[]", style=solid]; +"3585 5317" -> "3586 5318" [label="[-1]", style=dashed]; +"3586 5318" -> "3691 6518" [label="[]", style=solid]; +"3586 5318" -> "4051 5325" [label="[]", style=solid]; +"3587 5273" -> "3588 5274" [label="[-1]", style=dashed]; +"3588 5274" -> "3691 6518" [label="[]", style=solid]; +"3588 5274" -> "4055 5281" [label="[]", style=solid]; +"3589 5229" -> "3590 5230" [label="[-1]", style=dashed]; +"3590 5230" -> "3691 6518" [label="[]", style=solid]; +"3590 5230" -> "4059 5237" [label="[]", style=solid]; +"3591 5185" -> "3592 5186" [label="[-1]", style=dashed]; +"3592 5186" -> "3691 6518" [label="[]", style=solid]; +"3592 5186" -> "4063 5193" [label="[]", style=solid]; +"3593 5141" -> "3594 5142" [label="[-1]", style=dashed]; +"3594 5142" -> "3691 6518" [label="[]", style=solid]; +"3594 5142" -> "4067 5149" [label="[]", style=solid]; +"3595 5097" -> "3596 5098" [label="[-1]", style=dashed]; +"3596 5098" -> "3691 6518" [label="[]", style=solid]; +"3596 5098" -> "4071 5105" [label="[]", style=solid]; +"3597 5053" -> "3598 5054" [label="[-1]", style=dashed]; +"3598 5054" -> "3691 6518" [label="[]", style=solid]; +"3598 5054" -> "4075 5061" [label="[]", style=solid]; +"3599 5009" -> "3600 5010" [label="[-1]", style=dashed]; +"3600 5010" -> "3691 6518" [label="[]", style=solid]; +"3600 5010" -> "4079 5017" [label="[]", style=solid]; +"3601 4965" -> "3602 4966" [label="[-1]", style=dashed]; +"3602 4966" -> "3691 6518" [label="[]", style=solid]; +"3602 4966" -> "4083 4973" [label="[]", style=solid]; +"3603 4921" -> "3604 4922" [label="[-1]", style=dashed]; +"3604 4922" -> "3691 6518" [label="[]", style=solid]; +"3604 4922" -> "4087 4929" [label="[]", style=solid]; +"3605 4877" -> "3606 4878" [label="[-1]", style=dashed]; +"3606 4878" -> "3691 6518" [label="[]", style=solid]; +"3606 4878" -> "4091 4885" [label="[]", style=solid]; +"3607 4833" -> "3608 4834" [label="[-1]", style=dashed]; +"3608 4834" -> "3691 6518" [label="[]", style=solid]; +"3608 4834" -> "4095 4841" [label="[]", style=solid]; +"3609 4789" -> "3610 4790" [label="[-1]", style=dashed]; +"3610 4790" -> "3691 6518" [label="[]", style=solid]; +"3610 4790" -> "4099 4797" [label="[]", style=solid]; +"3611 4745" -> "3612 4746" [label="[-1]", style=dashed]; +"3612 4746" -> "3691 6518" [label="[]", style=solid]; +"3612 4746" -> "4103 4753" [label="[]", style=solid]; +"3613 4701" -> "3614 4702" [label="[-1]", style=dashed]; +"3614 4702" -> "3691 6518" [label="[]", style=solid]; +"3614 4702" -> "4107 4709" [label="[]", style=solid]; +"3615 4657" -> "3616 4658" [label="[-1]", style=dashed]; +"3616 4658" -> "3691 6518" [label="[]", style=solid]; +"3616 4658" -> "4111 4665" [label="[]", style=solid]; +"3617 4613" -> "3618 4614" [label="[-1]", style=dashed]; +"3618 4614" -> "3691 6518" [label="[]", style=solid]; +"3618 4614" -> "4115 4621" [label="[]", style=solid]; +"3619 4569" -> "3620 4570" [label="[-1]", style=dashed]; +"3620 4570" -> "3691 6518" [label="[]", style=solid]; +"3620 4570" -> "4119 4577" [label="[]", style=solid]; +"3621 4525" -> "3622 4526" [label="[-1]", style=dashed]; +"3622 4526" -> "3691 6518" [label="[]", style=solid]; +"3622 4526" -> "4123 4533" [label="[]", style=solid]; +"3623 4481" -> "3624 4482" [label="[-1]", style=dashed]; +"3624 4482" -> "3691 6518" [label="[]", style=solid]; +"3624 4482" -> "4127 4489" [label="[]", style=solid]; +"3625 4437" -> "3626 4438" [label="[-1]", style=dashed]; +"3626 4438" -> "3691 6518" [label="[]", style=solid]; +"3626 4438" -> "4131 4445" [label="[]", style=solid]; +"3627 4393" -> "3628 4394" [label="[-1]", style=dashed]; +"3628 4394" -> "3691 6518" [label="[]", style=solid]; +"3628 4394" -> "4135 4401" [label="[]", style=solid]; +"3629 4349" -> "3630 4350" [label="[-1]", style=dashed]; +"3630 4350" -> "3691 6518" [label="[]", style=solid]; +"3630 4350" -> "4139 4357" [label="[]", style=solid]; +"3631 4305" -> "3632 4306" [label="[-1]", style=dashed]; +"3632 4306" -> "3691 6518" [label="[]", style=solid]; +"3632 4306" -> "4143 4313" [label="[]", style=solid]; +"3633 4261" -> "3634 4262" [label="[-1]", style=dashed]; +"3634 4262" -> "3691 6518" [label="[]", style=solid]; +"3634 4262" -> "4147 4269" [label="[]", style=solid]; +"3635 4217" -> "3636 4218" [label="[-1]", style=dashed]; +"3636 4218" -> "3691 6518" [label="[]", style=solid]; +"3636 4218" -> "4151 4225" [label="[]", style=solid]; +"3637 4173" -> "3638 4174" [label="[-1]", style=dashed]; +"3638 4174" -> "3691 6518" [label="[]", style=solid]; +"3638 4174" -> "4155 4181" [label="[]", style=solid]; +"3639 4129" -> "3640 4130" [label="[-1]", style=dashed]; +"3640 4130" -> "3691 6518" [label="[]", style=solid]; +"3640 4130" -> "4159 4137" [label="[]", style=solid]; +"3641 4085" -> "3642 4086" [label="[-1]", style=dashed]; +"3642 4086" -> "3691 6518" [label="[]", style=solid]; +"3642 4086" -> "4163 4093" [label="[]", style=solid]; +"3643 4041" -> "3644 4042" [label="[-1]", style=dashed]; +"3644 4042" -> "3691 6518" [label="[]", style=solid]; +"3644 4042" -> "4167 4049" [label="[]", style=solid]; +"3645 3997" -> "3646 3998" [label="[-1]", style=dashed]; +"3646 3998" -> "3691 6518" [label="[]", style=solid]; +"3646 3998" -> "4171 4005" [label="[]", style=solid]; +"3647 3953" -> "3648 3954" [label="[-1]", style=dashed]; +"3648 3954" -> "3691 6518" [label="[]", style=solid]; +"3648 3954" -> "4175 3961" [label="[]", style=solid]; +"3649 3909" -> "3650 3910" [label="[-1]", style=dashed]; +"3650 3910" -> "3691 6518" [label="[]", style=solid]; +"3650 3910" -> "4179 3917" [label="[]", style=solid]; +"3651 3865" -> "3652 3866" [label="[-1]", style=dashed]; +"3652 3866" -> "3691 6518" [label="[]", style=solid]; +"3652 3866" -> "4183 3873" [label="[]", style=solid]; +"3653 3821" -> "3654 3822" [label="[-1]", style=dashed]; +"3654 3822" -> "3691 6518" [label="[]", style=solid]; +"3654 3822" -> "4187 3829" [label="[]", style=solid]; +"3655 3777" -> "3656 3778" [label="[-1]", style=dashed]; +"3656 3778" -> "3691 6518" [label="[]", style=solid]; +"3656 3778" -> "4191 3785" [label="[]", style=solid]; +"3657 3733" -> "3658 3734" [label="[-1]", style=dashed]; +"3658 3734" -> "3691 6518" [label="[]", style=solid]; +"3658 3734" -> "4195 3741" [label="[]", style=solid]; +"3659 3689" -> "3660 3690" [label="[-1]", style=dashed]; +"3660 3690" -> "3691 6518" [label="[]", style=solid]; +"3660 3690" -> "4199 3697" [label="[]", style=solid]; +"3661 3645" -> "3662 3646" [label="[-1]", style=dashed]; +"3662 3646" -> "3691 6518" [label="[]", style=solid]; +"3662 3646" -> "4203 3653" [label="[]", style=solid]; +"3663 3601" -> "3664 3602" [label="[-1]", style=dashed]; +"3664 3602" -> "3691 6518" [label="[]", style=solid]; +"3664 3602" -> "4207 3609" [label="[]", style=solid]; +"3665 3557" -> "3666 3558" [label="[-1]", style=dashed]; +"3666 3558" -> "3691 6518" [label="[]", style=solid]; +"3666 3558" -> "4211 3565" [label="[]", style=solid]; +"3667 3513" -> "3668 3514" [label="[-1]", style=dashed]; +"3668 3514" -> "3691 6518" [label="[]", style=solid]; +"3668 3514" -> "4215 3521" [label="[]", style=solid]; +"3669 3469" -> "3670 3470" [label="[-1]", style=dashed]; +"3670 3470" -> "3691 6518" [label="[]", style=solid]; +"3670 3470" -> "4219 3477" [label="[]", style=solid]; +"3671 3425" -> "3672 3426" [label="[-1]", style=dashed]; +"3672 3426" -> "3691 6518" [label="[]", style=solid]; +"3672 3426" -> "4223 3433" [label="[]", style=solid]; +"3673 3381" -> "3674 3382" [label="[-1]", style=dashed]; +"3674 3382" -> "3691 6518" [label="[]", style=solid]; +"3674 3382" -> "4227 3389" [label="[]", style=solid]; +"3675 3337" -> "3676 3338" [label="[-1]", style=dashed]; +"3676 3338" -> "3691 6518" [label="[]", style=solid]; +"3676 3338" -> "4231 3345" [label="[]", style=solid]; +"3677 3293" -> "3678 3294" [label="[-1]", style=dashed]; +"3678 3294" -> "3691 6518" [label="[]", style=solid]; +"3678 3294" -> "4235 3301" [label="[]", style=solid]; +"3679 3249" -> "3680 3250" [label="[-1]", style=dashed]; +"3680 3250" -> "3691 6518" [label="[]", style=solid]; +"3680 3250" -> "4239 3257" [label="[]", style=solid]; +"3681 3205" -> "3682 3206" [label="[-1]", style=dashed]; +"3682 3206" -> "3691 6518" [label="[]", style=solid]; +"3682 3206" -> "4243 3213" [label="[]", style=solid]; +"3683 3161" -> "3684 3162" [label="[-1]", style=dashed]; +"3684 3162" -> "3691 6518" [label="[]", style=solid]; +"3684 3162" -> "4247 3169" [label="[]", style=solid]; +"3685 3117" -> "3686 3118" [label="[-1]", style=dashed]; +"3686 3118" -> "3691 6518" [label="[]", style=solid]; +"3686 3118" -> "4251 3125" [label="[]", style=solid]; +"3687 3073" -> "3688 3074" [label="[-1]", style=dashed]; +"3688 3074" -> "3691 6518" [label="[]", style=solid]; +"3688 3074" -> "4255 3081" [label="[]", style=solid]; +"3689 3029" -> "3690 3030" [label="[-1]", style=dashed]; +"3690 3030" -> "3691 6518" [label="[]", style=solid]; +"3690 3030" -> "4259 3037" [label="[]", style=solid]; +"3691 6518" -> "3692 6530" [label="[]", style=solid]; +"3692 6530" -> "3693 QuantizeLinear_6568_4" [label="[-1, 4]", style=solid]; +"3692 6530" -> "3695 QuantizeLinear_6568_3" [label="[-1, 4]", style=solid]; +"3692 6530" -> "3697 QuantizeLinear_6568_2" [label="[-1, 4]", style=solid]; +"3692 6530" -> "3699 QuantizeLinear_6568_1" [label="[-1, 4]", style=solid]; +"3692 6530" -> "3746 6539" [label="[-1, 4]", style=solid]; +"3692 6530" -> "3750 6547" [label="[-1, 4]", style=solid]; +"3692 6530" -> "4281 nncf_model_output_0" [label="[-1, 4]", style=solid]; +"3693 QuantizeLinear_6568_4" -> "3694 DequantizeLinear_6568_4" [label="[-1, 4]", style=dashed]; +"3694 DequantizeLinear_6568_4" -> "3711 6552" [label="[-1, 4]", style=solid]; +"3695 QuantizeLinear_6568_3" -> "3696 DequantizeLinear_6568_3" [label="[-1, 4]", style=dashed]; +"3696 DequantizeLinear_6568_3" -> "3709 6559" [label="[-1, 4]", style=solid]; +"3697 QuantizeLinear_6568_2" -> "3698 DequantizeLinear_6568_2" [label="[-1, 4]", style=dashed]; +"3698 DequantizeLinear_6568_2" -> "3703 6569" [label="[-1, 4]", style=solid]; +"3699 QuantizeLinear_6568_1" -> "3700 DequantizeLinear_6568_1" [label="[-1, 4]", style=dashed]; +"3700 DequantizeLinear_6568_1" -> "3701 6576" [label="[-1, 4]", style=solid]; +"3701 6576" -> "3702 6578" [label="[-1, 4]", style=solid]; +"3702 6578" -> "3705 6579" [label="[-1]", style=solid]; +"3703 6569" -> "3704 6571" [label="[-1, 4]", style=solid]; +"3704 6571" -> "3705 6579" [label="[-1]", style=solid]; +"3705 6579" -> "3706 QuantizeLinear_6617_1" [label="[-1]", style=solid]; +"3706 QuantizeLinear_6617_1" -> "3707 DequantizeLinear_6617_1" [label="[-1]", style=dashed]; +"3707 DequantizeLinear_6617_1" -> "3708 6581" [label="[-1]", style=solid]; +"3708 6581" -> "3717 QuantizeLinear_6619_1" [label="[-1]", style=solid]; +"3709 6559" -> "3710 6561" [label="[-1, 4]", style=solid]; +"3710 6561" -> "3713 6562" [label="[-1]", style=solid]; +"3711 6552" -> "3712 6554" [label="[-1, 4]", style=solid]; +"3712 6554" -> "3713 6562" [label="[-1]", style=solid]; +"3713 6562" -> "3714 QuantizeLinear_6600_1" [label="[-1]", style=solid]; +"3714 QuantizeLinear_6600_1" -> "3715 DequantizeLinear_6600_1" [label="[-1]", style=dashed]; +"3715 DequantizeLinear_6600_1" -> "3716 6564" [label="[-1]", style=solid]; +"3716 6564" -> "3719 QuantizeLinear_6602_1" [label="[-1]", style=solid]; +"3717 QuantizeLinear_6619_1" -> "3718 DequantizeLinear_6619_1" [label="[-1]", style=dashed]; +"3718 DequantizeLinear_6619_1" -> "3721 6582" [label="[-1]", style=solid]; +"3719 QuantizeLinear_6602_1" -> "3720 DequantizeLinear_6602_1" [label="[-1]", style=dashed]; +"3720 DequantizeLinear_6602_1" -> "3721 6582" [label="[-1]", style=solid]; +"3721 6582" -> "3722 QuantizeLinear_6620_1" [label="[-1]", style=solid]; +"3722 QuantizeLinear_6620_1" -> "3723 DequantizeLinear_6620_1" [label="[-1]", style=dashed]; +"3723 DequantizeLinear_6620_1" -> "3724 6583" [label="[-1]", style=solid]; +"3724 6583" -> "3725 6586" [label="[-1]", style=solid]; +"3725 6586" -> "3726 QuantizeLinear_6624_1" [label="[-1]", style=solid]; +"3726 QuantizeLinear_6624_1" -> "3727 DequantizeLinear_6624_1" [label="[-1]", style=dashed]; +"3727 DequantizeLinear_6624_1" -> "3728 6587" [label="[-1]", style=solid]; +"3728 6587" -> "3729 6588" [label="[-1]", style=solid]; +"3729 6588" -> "3730 6590" [label="[-1]", style=solid]; +"3730 6590" -> "3731 QuantizeLinear_6628_1" [label="[-1]", style=solid]; +"3731 QuantizeLinear_6628_1" -> "3732 DequantizeLinear_6628_1" [label="[-1]", style=dashed]; +"3732 DequantizeLinear_6628_1" -> "3733 6592" [label="[-1]", style=solid]; +"3733 6592" -> "3734 QuantizeLinear_6630_1" [label="[-1]", style=solid]; +"3734 QuantizeLinear_6630_1" -> "3735 DequantizeLinear_6630_1" [label="[-1]", style=dashed]; +"3735 DequantizeLinear_6630_1" -> "3736 6593" [label="[-1]", style=solid]; +"3736 6593" -> "3737 6594" [label="[-1]", style=solid]; +"3737 6594" -> "3738 6595" [label="[-1]", style=solid]; +"3738 6595" -> "3739 6597" [label="[-1]", style=dashed]; +"3739 6597" -> "3740 6599" [label="[-1]", style=dashed]; +"3739 6597" -> "3767 6685" [label="[-1]", style=dashed]; +"3739 6597" -> "3787 6667" [label="[-1]", style=dashed]; +"3739 6597" -> "3793 6616" [label="[-1]", style=dashed]; +"3739 6597" -> "3815 6713" [label="[-1]", style=dashed]; +"3739 6597" -> "3827 6633" [label="[-1]", style=dashed]; +"3739 6597" -> "3849 6741" [label="[-1]", style=dashed]; +"3739 6597" -> "3861 6650" [label="[-1]", style=dashed]; +"3739 6597" -> "3883 6769" [label="[-1]", style=dashed]; +"3740 6599" -> "3741 6601" [label="[-1]", style=dashed]; +"3741 6601" -> "3742 6602" [label="[-1]", style=solid]; +"3742 6602" -> "3743 6603" [label="[1, -1]", style=dashed]; +"3743 6603" -> "3744 6604" [label="[-1, 1]", style=dashed]; +"3744 6604" -> "3745 6605" [label="[-1]", style=dashed]; +"3745 6605" -> "3751 6606" [label="[-1]", style=dashed]; +"3746 6539" -> "3747 6544" [label="[-1, 4]", style=solid]; +"3747 6544" -> "3748 6545" [label="[-1, 1]", style=solid]; +"3748 6545" -> "3749 6546" [label="[2]", style=dashed]; +"3749 6546" -> "3750 6547" [label="[-1, -1]", style=solid]; +"3750 6547" -> "3751 6606" [label="[-1, -1]", style=solid]; +"3750 6547" -> "3799 6623" [label="[-1, -1]", style=solid]; +"3750 6547" -> "3833 6640" [label="[-1, -1]", style=solid]; +"3750 6547" -> "3867 6657" [label="[-1, -1]", style=solid]; +"3751 6606" -> "3752 6612" [label="[-1, -1]", style=solid]; +"3751 6606" -> "3753 6608" [label="[-1, -1]", style=solid]; +"3752 6612" -> "3756 6613" [label="[-1, 4]", style=solid]; +"3753 6608" -> "3754 6609" [label="[-1, 1]", style=solid]; +"3754 6609" -> "3755 6610" [label="[-1]", style=solid]; +"3755 6610" -> "3756 6613" [label="[-1]", style=dashed]; +"3756 6613" -> "3757 6614" [label="[-1, 256, 14, 14]", style=solid]; +"3757 6614" -> "3758 6702" [label="[-1, 256, 14, 14]", style=solid]; +"3757 6614" -> "3761 6699" [label="[-1, 256, 14, 14]", style=solid]; +"3757 6614" -> "3764 6696" [label="[-1, 256, 14, 14]", style=solid]; +"3757 6614" -> "3778 6676" [label="[-1, 256, 14, 14]", style=solid]; +"3757 6614" -> "3781 6673" [label="[-1, 256, 14, 14]", style=solid]; +"3757 6614" -> "3784 6670" [label="[-1, 256, 14, 14]", style=solid]; +"3757 6614" -> "3792 6711" [label="[-1, 256, 14, 14]", style=solid]; +"3758 6702" -> "3759 6703" [label="[4]", style=dashed]; +"3759 6703" -> "3760 6707" [label="[]", style=dashed]; +"3760 6707" -> "3775 6708" [label="[1]", style=dashed]; +"3761 6699" -> "3762 6700" [label="[4]", style=dashed]; +"3762 6700" -> "3763 6706" [label="[]", style=dashed]; +"3763 6706" -> "3775 6708" [label="[1]", style=dashed]; +"3764 6696" -> "3765 6697" [label="[4]", style=dashed]; +"3765 6697" -> "3766 6705" [label="[]", style=dashed]; +"3766 6705" -> "3775 6708" [label="[1]", style=dashed]; +"3767 6685" -> "3768 6687" [label="[-1]", style=dashed]; +"3768 6687" -> "3769 6688" [label="[-1]", style=solid]; +"3769 6688" -> "3770 6689" [label="[1, -1]", style=dashed]; +"3770 6689" -> "3771 6691" [label="[-1, 1]", style=dashed]; +"3771 6691" -> "3772 6693" [label="[-1, 1, 1, 1]", style=dashed]; +"3771 6691" -> "3776 6709" [label="[-1, 1, 1, 1]", style=dashed]; +"3772 6693" -> "3773 6694" [label="[4]", style=dashed]; +"3773 6694" -> "3774 6704" [label="[]", style=dashed]; +"3774 6704" -> "3775 6708" [label="[1]", style=dashed]; +"3775 6708" -> "3776 6709" [label="[4]", style=dashed]; +"3776 6709" -> "3777 6710" [label="[-1, -1, -1, -1]", style=dashed]; +"3777 6710" -> "3792 6711" [label="[-1, -1, -1, -1]", style=dashed]; +"3778 6676" -> "3779 6677" [label="[4]", style=dashed]; +"3779 6677" -> "3780 6681" [label="[]", style=dashed]; +"3780 6681" -> "3790 6682" [label="[1]", style=dashed]; +"3781 6673" -> "3782 6674" [label="[4]", style=dashed]; +"3782 6674" -> "3783 6680" [label="[]", style=dashed]; +"3783 6680" -> "3790 6682" [label="[1]", style=dashed]; +"3784 6670" -> "3785 6671" [label="[4]", style=dashed]; +"3785 6671" -> "3786 6679" [label="[]", style=dashed]; +"3786 6679" -> "3790 6682" [label="[1]", style=dashed]; +"3787 6667" -> "3788 6668" [label="[1]", style=dashed]; +"3788 6668" -> "3789 6678" [label="[]", style=dashed]; +"3789 6678" -> "3790 6682" [label="[1]", style=dashed]; +"3790 6682" -> "3791 6683" [label="[4]", style=dashed]; +"3791 6683" -> "3792 6711" [label="[-1, -1, -1, -1]", style=solid]; +"3792 6711" -> "3826 6739" [label="[-1, -1, -1, -1]", style=solid]; +"3793 6616" -> "3794 6618" [label="[-1]", style=dashed]; +"3794 6618" -> "3795 6619" [label="[-1]", style=solid]; +"3795 6619" -> "3796 6620" [label="[1, -1]", style=dashed]; +"3796 6620" -> "3797 6621" [label="[-1, 1]", style=dashed]; +"3797 6621" -> "3798 6622" [label="[-1]", style=dashed]; +"3798 6622" -> "3799 6623" [label="[-1]", style=dashed]; +"3799 6623" -> "3800 6629" [label="[-1, -1]", style=solid]; +"3799 6623" -> "3801 6625" [label="[-1, -1]", style=solid]; +"3800 6629" -> "3804 6630" [label="[-1, 4]", style=solid]; +"3801 6625" -> "3802 6626" [label="[-1, 1]", style=solid]; +"3802 6626" -> "3803 6627" [label="[-1]", style=solid]; +"3803 6627" -> "3804 6630" [label="[-1]", style=dashed]; +"3804 6630" -> "3805 6631" [label="[-1, 256, 14, 14]", style=solid]; +"3805 6631" -> "3806 6730" [label="[-1, 256, 14, 14]", style=solid]; +"3805 6631" -> "3809 6727" [label="[-1, 256, 14, 14]", style=solid]; +"3805 6631" -> "3812 6724" [label="[-1, 256, 14, 14]", style=solid]; +"3805 6631" -> "3826 6739" [label="[-1, 256, 14, 14]", style=solid]; +"3806 6730" -> "3807 6731" [label="[4]", style=dashed]; +"3807 6731" -> "3808 6735" [label="[]", style=dashed]; +"3808 6735" -> "3823 6736" [label="[1]", style=dashed]; +"3809 6727" -> "3810 6728" [label="[4]", style=dashed]; +"3810 6728" -> "3811 6734" [label="[]", style=dashed]; +"3811 6734" -> "3823 6736" [label="[1]", style=dashed]; +"3812 6724" -> "3813 6725" [label="[4]", style=dashed]; +"3813 6725" -> "3814 6733" [label="[]", style=dashed]; +"3814 6733" -> "3823 6736" [label="[1]", style=dashed]; +"3815 6713" -> "3816 6715" [label="[-1]", style=dashed]; +"3816 6715" -> "3817 6716" [label="[-1]", style=solid]; +"3817 6716" -> "3818 6717" [label="[1, -1]", style=dashed]; +"3818 6717" -> "3819 6719" [label="[-1, 1]", style=dashed]; +"3819 6719" -> "3820 6721" [label="[-1, 1, 1, 1]", style=dashed]; +"3819 6719" -> "3824 6737" [label="[-1, 1, 1, 1]", style=dashed]; +"3820 6721" -> "3821 6722" [label="[4]", style=dashed]; +"3821 6722" -> "3822 6732" [label="[]", style=dashed]; +"3822 6732" -> "3823 6736" [label="[1]", style=dashed]; +"3823 6736" -> "3824 6737" [label="[4]", style=dashed]; +"3824 6737" -> "3825 6738" [label="[-1, -1, -1, -1]", style=dashed]; +"3825 6738" -> "3826 6739" [label="[-1, -1, -1, -1]", style=dashed]; +"3826 6739" -> "3860 6767" [label="[-1, -1, -1, -1]", style=solid]; +"3827 6633" -> "3828 6635" [label="[-1]", style=dashed]; +"3828 6635" -> "3829 6636" [label="[-1]", style=solid]; +"3829 6636" -> "3830 6637" [label="[1, -1]", style=dashed]; +"3830 6637" -> "3831 6638" [label="[-1, 1]", style=dashed]; +"3831 6638" -> "3832 6639" [label="[-1]", style=dashed]; +"3832 6639" -> "3833 6640" [label="[-1]", style=dashed]; +"3833 6640" -> "3834 6646" [label="[-1, -1]", style=solid]; +"3833 6640" -> "3835 6642" [label="[-1, -1]", style=solid]; +"3834 6646" -> "3838 6647" [label="[-1, 4]", style=solid]; +"3835 6642" -> "3836 6643" [label="[-1, 1]", style=solid]; +"3836 6643" -> "3837 6644" [label="[-1]", style=solid]; +"3837 6644" -> "3838 6647" [label="[-1]", style=dashed]; +"3838 6647" -> "3839 6648" [label="[-1, 256, 14, 14]", style=solid]; +"3839 6648" -> "3840 6758" [label="[-1, 256, 14, 14]", style=solid]; +"3839 6648" -> "3843 6755" [label="[-1, 256, 14, 14]", style=solid]; +"3839 6648" -> "3846 6752" [label="[-1, 256, 14, 14]", style=solid]; +"3839 6648" -> "3860 6767" [label="[-1, 256, 14, 14]", style=solid]; +"3840 6758" -> "3841 6759" [label="[4]", style=dashed]; +"3841 6759" -> "3842 6763" [label="[]", style=dashed]; +"3842 6763" -> "3857 6764" [label="[1]", style=dashed]; +"3843 6755" -> "3844 6756" [label="[4]", style=dashed]; +"3844 6756" -> "3845 6762" [label="[]", style=dashed]; +"3845 6762" -> "3857 6764" [label="[1]", style=dashed]; +"3846 6752" -> "3847 6753" [label="[4]", style=dashed]; +"3847 6753" -> "3848 6761" [label="[]", style=dashed]; +"3848 6761" -> "3857 6764" [label="[1]", style=dashed]; +"3849 6741" -> "3850 6743" [label="[-1]", style=dashed]; +"3850 6743" -> "3851 6744" [label="[-1]", style=solid]; +"3851 6744" -> "3852 6745" [label="[1, -1]", style=dashed]; +"3852 6745" -> "3853 6747" [label="[-1, 1]", style=dashed]; +"3853 6747" -> "3854 6749" [label="[-1, 1, 1, 1]", style=dashed]; +"3853 6747" -> "3858 6765" [label="[-1, 1, 1, 1]", style=dashed]; +"3854 6749" -> "3855 6750" [label="[4]", style=dashed]; +"3855 6750" -> "3856 6760" [label="[]", style=dashed]; +"3856 6760" -> "3857 6764" [label="[1]", style=dashed]; +"3857 6764" -> "3858 6765" [label="[4]", style=dashed]; +"3858 6765" -> "3859 6766" [label="[-1, -1, -1, -1]", style=dashed]; +"3859 6766" -> "3860 6767" [label="[-1, -1, -1, -1]", style=dashed]; +"3860 6767" -> "3894 6795" [label="[-1, -1, -1, -1]", style=solid]; +"3861 6650" -> "3862 6652" [label="[-1]", style=dashed]; +"3862 6652" -> "3863 6653" [label="[-1]", style=solid]; +"3863 6653" -> "3864 6654" [label="[1, -1]", style=dashed]; +"3864 6654" -> "3865 6655" [label="[-1, 1]", style=dashed]; +"3865 6655" -> "3866 6656" [label="[-1]", style=dashed]; +"3866 6656" -> "3867 6657" [label="[-1]", style=dashed]; +"3867 6657" -> "3868 6663" [label="[-1, -1]", style=solid]; +"3867 6657" -> "3869 6659" [label="[-1, -1]", style=solid]; +"3868 6663" -> "3872 6664" [label="[-1, 4]", style=solid]; +"3869 6659" -> "3870 6660" [label="[-1, 1]", style=solid]; +"3870 6660" -> "3871 6661" [label="[-1]", style=solid]; +"3871 6661" -> "3872 6664" [label="[-1]", style=dashed]; +"3872 6664" -> "3873 6665" [label="[-1, 256, 14, 14]", style=solid]; +"3873 6665" -> "3874 6786" [label="[-1, 256, 14, 14]", style=solid]; +"3873 6665" -> "3877 6783" [label="[-1, 256, 14, 14]", style=solid]; +"3873 6665" -> "3880 6780" [label="[-1, 256, 14, 14]", style=solid]; +"3873 6665" -> "3894 6795" [label="[-1, 256, 14, 14]", style=solid]; +"3874 6786" -> "3875 6787" [label="[4]", style=dashed]; +"3875 6787" -> "3876 6791" [label="[]", style=dashed]; +"3876 6791" -> "3891 6792" [label="[1]", style=dashed]; +"3877 6783" -> "3878 6784" [label="[4]", style=dashed]; +"3878 6784" -> "3879 6790" [label="[]", style=dashed]; +"3879 6790" -> "3891 6792" [label="[1]", style=dashed]; +"3880 6780" -> "3881 6781" [label="[4]", style=dashed]; +"3881 6781" -> "3882 6789" [label="[]", style=dashed]; +"3882 6789" -> "3891 6792" [label="[1]", style=dashed]; +"3883 6769" -> "3884 6771" [label="[-1]", style=dashed]; +"3884 6771" -> "3885 6772" [label="[-1]", style=solid]; +"3885 6772" -> "3886 6773" [label="[1, -1]", style=dashed]; +"3886 6773" -> "3887 6775" [label="[-1, 1]", style=dashed]; +"3887 6775" -> "3888 6777" [label="[-1, 1, 1, 1]", style=dashed]; +"3887 6775" -> "3892 6793" [label="[-1, 1, 1, 1]", style=dashed]; +"3888 6777" -> "3889 6778" [label="[4]", style=dashed]; +"3889 6778" -> "3890 6788" [label="[]", style=dashed]; +"3890 6788" -> "3891 6792" [label="[1]", style=dashed]; +"3891 6792" -> "3892 6793" [label="[4]", style=dashed]; +"3892 6793" -> "3893 6794" [label="[-1, -1, -1, -1]", style=dashed]; +"3893 6794" -> "3894 6795" [label="[-1, -1, -1, -1]", style=dashed]; +"3894 6795" -> "3895 QuantizeLinear_6833_1" [label="[-1, -1, -1, -1]", style=solid]; +"3895 QuantizeLinear_6833_1" -> "3896 DequantizeLinear_6833_1" [label="[-1, -1, -1, -1]", style=dashed]; +"3896 DequantizeLinear_6833_1" -> "3899 6798" [label="[-1, -1, -1, -1]", style=solid]; +"3897 QuantizeLinear_6834_1" -> "3898 DequantizeLinear_6834_1" [label="[256, 256, 3, 3]", style=dashed]; +"3898 DequantizeLinear_6834_1" -> "3899 6798" [label="[256, 256, 3, 3]", style=solid]; +"3899 6798" -> "3900 6799" [label="[-1, 256, -1, -1]", style=solid]; +"3900 6799" -> "3901 QuantizeLinear_6837_1" [label="[-1, 256, -1, -1]", style=solid]; +"3901 QuantizeLinear_6837_1" -> "3902 DequantizeLinear_6837_1" [label="[-1, 256, -1, -1]", style=dashed]; +"3902 DequantizeLinear_6837_1" -> "3905 6802" [label="[-1, 256, -1, -1]", style=solid]; +"3903 QuantizeLinear_6838_1" -> "3904 DequantizeLinear_6838_1" [label="[256, 256, 3, 3]", style=dashed]; +"3904 DequantizeLinear_6838_1" -> "3905 6802" [label="[256, 256, 3, 3]", style=solid]; +"3905 6802" -> "3906 6803" [label="[-1, 256, -1, -1]", style=solid]; +"3906 6803" -> "3907 QuantizeLinear_6841_1" [label="[-1, 256, -1, -1]", style=solid]; +"3907 QuantizeLinear_6841_1" -> "3908 DequantizeLinear_6841_1" [label="[-1, 256, -1, -1]", style=dashed]; +"3908 DequantizeLinear_6841_1" -> "3911 6806" [label="[-1, 256, -1, -1]", style=solid]; +"3909 QuantizeLinear_6842_1" -> "3910 DequantizeLinear_6842_1" [label="[256, 256, 3, 3]", style=dashed]; +"3910 DequantizeLinear_6842_1" -> "3911 6806" [label="[256, 256, 3, 3]", style=solid]; +"3911 6806" -> "3912 6807" [label="[-1, 256, -1, -1]", style=solid]; +"3912 6807" -> "3913 QuantizeLinear_6845_1" [label="[-1, 256, -1, -1]", style=solid]; +"3913 QuantizeLinear_6845_1" -> "3914 DequantizeLinear_6845_1" [label="[-1, 256, -1, -1]", style=dashed]; +"3914 DequantizeLinear_6845_1" -> "3917 6810" [label="[-1, 256, -1, -1]", style=solid]; +"3915 QuantizeLinear_6846_1" -> "3916 DequantizeLinear_6846_1" [label="[256, 256, 3, 3]", style=dashed]; +"3916 DequantizeLinear_6846_1" -> "3917 6810" [label="[256, 256, 3, 3]", style=solid]; +"3917 6810" -> "3918 6811" [label="[-1, 256, -1, -1]", style=solid]; +"3918 6811" -> "3919 QuantizeLinear_6849_1" [label="[-1, 256, -1, -1]", style=solid]; +"3919 QuantizeLinear_6849_1" -> "3920 DequantizeLinear_6849_1" [label="[-1, 256, -1, -1]", style=dashed]; +"3920 DequantizeLinear_6849_1" -> "3923 6814" [label="[-1, 256, -1, -1]", style=solid]; +"3921 QuantizeLinear_6850_1" -> "3922 DequantizeLinear_6850_1" [label="[256, 256, 2, 2]", style=dashed]; +"3922 DequantizeLinear_6850_1" -> "3923 6814" [label="[256, 256, 2, 2]", style=solid]; +"3923 6814" -> "3924 6815" [label="[-1, 256, -1, -1]", style=solid]; +"3924 6815" -> "3925 QuantizeLinear_6853_1" [label="[-1, 256, -1, -1]", style=solid]; +"3925 QuantizeLinear_6853_1" -> "3926 DequantizeLinear_6853_1" [label="[-1, 256, -1, -1]", style=dashed]; +"3926 DequantizeLinear_6853_1" -> "3929 6818" [label="[-1, 256, -1, -1]", style=solid]; +"3927 QuantizeLinear_6854_1" -> "3928 DequantizeLinear_6854_1" [label="[81, 256, 1, 1]", style=dashed]; +"3928 DequantizeLinear_6854_1" -> "3929 6818" [label="[81, 256, 1, 1]", style=solid]; +"3929 6818" -> "3930 6819" [label="[-1, 81, -1, -1]", style=solid]; +"3929 6818" -> "3933 6822" [label="[-1, 81, -1, -1]", style=solid]; +"3930 6819" -> "3931 6844" [label="[-1, 81, -1, -1]", style=solid]; +"3930 6819" -> "4267 6835" [label="[-1, 81, -1, -1]", style=solid]; +"3930 6819" -> "4270 6832" [label="[-1, 81, -1, -1]", style=solid]; +"3930 6819" -> "4275 6842" [label="[-1, 81, -1, -1]", style=solid]; +"3931 6844" -> "3932 6845" [label="[4]", style=dashed]; +"3932 6845" -> "3942 6846" [label="[]", style=dashed]; +"3933 6822" -> "3934 6823" [label="[4]", style=dashed]; +"3934 6823" -> "3935 6824" [label="[]", style=dashed]; +"3935 6824" -> "3936 6825" [label="[1]", style=dashed]; +"3936 6825" -> "3937 6826" [label="[1]", style=dashed]; +"3937 6826" -> "3938 6827" [label="[-1]", style=dashed]; +"3938 6827" -> "3939 6828" [label="[-1]", style=solid]; +"3939 6828" -> "3940 6829" [label="[1, -1]", style=dashed]; +"3940 6829" -> "3941 6830" [label="[-1, 1]", style=dashed]; +"3941 6830" -> "3942 6846" [label="[-1]", style=dashed]; +"3942 6846" -> "4266 6847" [label="[-1]", style=dashed]; +"3943 6513" -> "3944 6515" [label="[]", style=solid]; +"3944 6515" -> "3945 6516" [label="[]", style=solid]; +"3945 6516" -> "3946 6517" [label="[-1]", style=dashed]; +"3946 6517" -> "4263 6519" [label="[]", style=dashed]; +"3947 6469" -> "3948 6471" [label="[]", style=solid]; +"3948 6471" -> "3949 6472" [label="[]", style=solid]; +"3949 6472" -> "3950 6473" [label="[-1]", style=dashed]; +"3950 6473" -> "4263 6519" [label="[]", style=dashed]; +"3951 6425" -> "3952 6427" [label="[]", style=solid]; +"3952 6427" -> "3953 6428" [label="[]", style=solid]; +"3953 6428" -> "3954 6429" [label="[-1]", style=dashed]; +"3954 6429" -> "4263 6519" [label="[]", style=dashed]; +"3955 6381" -> "3956 6383" [label="[]", style=solid]; +"3956 6383" -> "3957 6384" [label="[]", style=solid]; +"3957 6384" -> "3958 6385" [label="[-1]", style=dashed]; +"3958 6385" -> "4263 6519" [label="[]", style=dashed]; +"3959 6337" -> "3960 6339" [label="[]", style=solid]; +"3960 6339" -> "3961 6340" [label="[]", style=solid]; +"3961 6340" -> "3962 6341" [label="[-1]", style=dashed]; +"3962 6341" -> "4263 6519" [label="[]", style=dashed]; +"3963 6293" -> "3964 6295" [label="[]", style=solid]; +"3964 6295" -> "3965 6296" [label="[]", style=solid]; +"3965 6296" -> "3966 6297" [label="[-1]", style=dashed]; +"3966 6297" -> "4263 6519" [label="[]", style=dashed]; +"3967 6249" -> "3968 6251" [label="[]", style=solid]; +"3968 6251" -> "3969 6252" [label="[]", style=solid]; +"3969 6252" -> "3970 6253" [label="[-1]", style=dashed]; +"3970 6253" -> "4263 6519" [label="[]", style=dashed]; +"3971 6205" -> "3972 6207" [label="[]", style=solid]; +"3972 6207" -> "3973 6208" [label="[]", style=solid]; +"3973 6208" -> "3974 6209" [label="[-1]", style=dashed]; +"3974 6209" -> "4263 6519" [label="[]", style=dashed]; +"3975 6161" -> "3976 6163" [label="[]", style=solid]; +"3976 6163" -> "3977 6164" [label="[]", style=solid]; +"3977 6164" -> "3978 6165" [label="[-1]", style=dashed]; +"3978 6165" -> "4263 6519" [label="[]", style=dashed]; +"3979 6117" -> "3980 6119" [label="[]", style=solid]; +"3980 6119" -> "3981 6120" [label="[]", style=solid]; +"3981 6120" -> "3982 6121" [label="[-1]", style=dashed]; +"3982 6121" -> "4263 6519" [label="[]", style=dashed]; +"3983 6073" -> "3984 6075" [label="[]", style=solid]; +"3984 6075" -> "3985 6076" [label="[]", style=solid]; +"3985 6076" -> "3986 6077" [label="[-1]", style=dashed]; +"3986 6077" -> "4263 6519" [label="[]", style=dashed]; +"3987 6029" -> "3988 6031" [label="[]", style=solid]; +"3988 6031" -> "3989 6032" [label="[]", style=solid]; +"3989 6032" -> "3990 6033" [label="[-1]", style=dashed]; +"3990 6033" -> "4263 6519" [label="[]", style=dashed]; +"3991 5985" -> "3992 5987" [label="[]", style=solid]; +"3992 5987" -> "3993 5988" [label="[]", style=solid]; +"3993 5988" -> "3994 5989" [label="[-1]", style=dashed]; +"3994 5989" -> "4263 6519" [label="[]", style=dashed]; +"3995 5941" -> "3996 5943" [label="[]", style=solid]; +"3996 5943" -> "3997 5944" [label="[]", style=solid]; +"3997 5944" -> "3998 5945" [label="[-1]", style=dashed]; +"3998 5945" -> "4263 6519" [label="[]", style=dashed]; +"3999 5897" -> "4000 5899" [label="[]", style=solid]; +"4000 5899" -> "4001 5900" [label="[]", style=solid]; +"4001 5900" -> "4002 5901" [label="[-1]", style=dashed]; +"4002 5901" -> "4263 6519" [label="[]", style=dashed]; +"4003 5853" -> "4004 5855" [label="[]", style=solid]; +"4004 5855" -> "4005 5856" [label="[]", style=solid]; +"4005 5856" -> "4006 5857" [label="[-1]", style=dashed]; +"4006 5857" -> "4263 6519" [label="[]", style=dashed]; +"4007 5809" -> "4008 5811" [label="[]", style=solid]; +"4008 5811" -> "4009 5812" [label="[]", style=solid]; +"4009 5812" -> "4010 5813" [label="[-1]", style=dashed]; +"4010 5813" -> "4263 6519" [label="[]", style=dashed]; +"4011 5765" -> "4012 5767" [label="[]", style=solid]; +"4012 5767" -> "4013 5768" [label="[]", style=solid]; +"4013 5768" -> "4014 5769" [label="[-1]", style=dashed]; +"4014 5769" -> "4263 6519" [label="[]", style=dashed]; +"4015 5721" -> "4016 5723" [label="[]", style=solid]; +"4016 5723" -> "4017 5724" [label="[]", style=solid]; +"4017 5724" -> "4018 5725" [label="[-1]", style=dashed]; +"4018 5725" -> "4263 6519" [label="[]", style=dashed]; +"4019 5677" -> "4020 5679" [label="[]", style=solid]; +"4020 5679" -> "4021 5680" [label="[]", style=solid]; +"4021 5680" -> "4022 5681" [label="[-1]", style=dashed]; +"4022 5681" -> "4263 6519" [label="[]", style=dashed]; +"4023 5633" -> "4024 5635" [label="[]", style=solid]; +"4024 5635" -> "4025 5636" [label="[]", style=solid]; +"4025 5636" -> "4026 5637" [label="[-1]", style=dashed]; +"4026 5637" -> "4263 6519" [label="[]", style=dashed]; +"4027 5589" -> "4028 5591" [label="[]", style=solid]; +"4028 5591" -> "4029 5592" [label="[]", style=solid]; +"4029 5592" -> "4030 5593" [label="[-1]", style=dashed]; +"4030 5593" -> "4263 6519" [label="[]", style=dashed]; +"4031 5545" -> "4032 5547" [label="[]", style=solid]; +"4032 5547" -> "4033 5548" [label="[]", style=solid]; +"4033 5548" -> "4034 5549" [label="[-1]", style=dashed]; +"4034 5549" -> "4263 6519" [label="[]", style=dashed]; +"4035 5501" -> "4036 5503" [label="[]", style=solid]; +"4036 5503" -> "4037 5504" [label="[]", style=solid]; +"4037 5504" -> "4038 5505" [label="[-1]", style=dashed]; +"4038 5505" -> "4263 6519" [label="[]", style=dashed]; +"4039 5457" -> "4040 5459" [label="[]", style=solid]; +"4040 5459" -> "4041 5460" [label="[]", style=solid]; +"4041 5460" -> "4042 5461" [label="[-1]", style=dashed]; +"4042 5461" -> "4263 6519" [label="[]", style=dashed]; +"4043 5413" -> "4044 5415" [label="[]", style=solid]; +"4044 5415" -> "4045 5416" [label="[]", style=solid]; +"4045 5416" -> "4046 5417" [label="[-1]", style=dashed]; +"4046 5417" -> "4263 6519" [label="[]", style=dashed]; +"4047 5369" -> "4048 5371" [label="[]", style=solid]; +"4048 5371" -> "4049 5372" [label="[]", style=solid]; +"4049 5372" -> "4050 5373" [label="[-1]", style=dashed]; +"4050 5373" -> "4263 6519" [label="[]", style=dashed]; +"4051 5325" -> "4052 5327" [label="[]", style=solid]; +"4052 5327" -> "4053 5328" [label="[]", style=solid]; +"4053 5328" -> "4054 5329" [label="[-1]", style=dashed]; +"4054 5329" -> "4263 6519" [label="[]", style=dashed]; +"4055 5281" -> "4056 5283" [label="[]", style=solid]; +"4056 5283" -> "4057 5284" [label="[]", style=solid]; +"4057 5284" -> "4058 5285" [label="[-1]", style=dashed]; +"4058 5285" -> "4263 6519" [label="[]", style=dashed]; +"4059 5237" -> "4060 5239" [label="[]", style=solid]; +"4060 5239" -> "4061 5240" [label="[]", style=solid]; +"4061 5240" -> "4062 5241" [label="[-1]", style=dashed]; +"4062 5241" -> "4263 6519" [label="[]", style=dashed]; +"4063 5193" -> "4064 5195" [label="[]", style=solid]; +"4064 5195" -> "4065 5196" [label="[]", style=solid]; +"4065 5196" -> "4066 5197" [label="[-1]", style=dashed]; +"4066 5197" -> "4263 6519" [label="[]", style=dashed]; +"4067 5149" -> "4068 5151" [label="[]", style=solid]; +"4068 5151" -> "4069 5152" [label="[]", style=solid]; +"4069 5152" -> "4070 5153" [label="[-1]", style=dashed]; +"4070 5153" -> "4263 6519" [label="[]", style=dashed]; +"4071 5105" -> "4072 5107" [label="[]", style=solid]; +"4072 5107" -> "4073 5108" [label="[]", style=solid]; +"4073 5108" -> "4074 5109" [label="[-1]", style=dashed]; +"4074 5109" -> "4263 6519" [label="[]", style=dashed]; +"4075 5061" -> "4076 5063" [label="[]", style=solid]; +"4076 5063" -> "4077 5064" [label="[]", style=solid]; +"4077 5064" -> "4078 5065" [label="[-1]", style=dashed]; +"4078 5065" -> "4263 6519" [label="[]", style=dashed]; +"4079 5017" -> "4080 5019" [label="[]", style=solid]; +"4080 5019" -> "4081 5020" [label="[]", style=solid]; +"4081 5020" -> "4082 5021" [label="[-1]", style=dashed]; +"4082 5021" -> "4263 6519" [label="[]", style=dashed]; +"4083 4973" -> "4084 4975" [label="[]", style=solid]; +"4084 4975" -> "4085 4976" [label="[]", style=solid]; +"4085 4976" -> "4086 4977" [label="[-1]", style=dashed]; +"4086 4977" -> "4263 6519" [label="[]", style=dashed]; +"4087 4929" -> "4088 4931" [label="[]", style=solid]; +"4088 4931" -> "4089 4932" [label="[]", style=solid]; +"4089 4932" -> "4090 4933" [label="[-1]", style=dashed]; +"4090 4933" -> "4263 6519" [label="[]", style=dashed]; +"4091 4885" -> "4092 4887" [label="[]", style=solid]; +"4092 4887" -> "4093 4888" [label="[]", style=solid]; +"4093 4888" -> "4094 4889" [label="[-1]", style=dashed]; +"4094 4889" -> "4263 6519" [label="[]", style=dashed]; +"4095 4841" -> "4096 4843" [label="[]", style=solid]; +"4096 4843" -> "4097 4844" [label="[]", style=solid]; +"4097 4844" -> "4098 4845" [label="[-1]", style=dashed]; +"4098 4845" -> "4263 6519" [label="[]", style=dashed]; +"4099 4797" -> "4100 4799" [label="[]", style=solid]; +"4100 4799" -> "4101 4800" [label="[]", style=solid]; +"4101 4800" -> "4102 4801" [label="[-1]", style=dashed]; +"4102 4801" -> "4263 6519" [label="[]", style=dashed]; +"4103 4753" -> "4104 4755" [label="[]", style=solid]; +"4104 4755" -> "4105 4756" [label="[]", style=solid]; +"4105 4756" -> "4106 4757" [label="[-1]", style=dashed]; +"4106 4757" -> "4263 6519" [label="[]", style=dashed]; +"4107 4709" -> "4108 4711" [label="[]", style=solid]; +"4108 4711" -> "4109 4712" [label="[]", style=solid]; +"4109 4712" -> "4110 4713" [label="[-1]", style=dashed]; +"4110 4713" -> "4263 6519" [label="[]", style=dashed]; +"4111 4665" -> "4112 4667" [label="[]", style=solid]; +"4112 4667" -> "4113 4668" [label="[]", style=solid]; +"4113 4668" -> "4114 4669" [label="[-1]", style=dashed]; +"4114 4669" -> "4263 6519" [label="[]", style=dashed]; +"4115 4621" -> "4116 4623" [label="[]", style=solid]; +"4116 4623" -> "4117 4624" [label="[]", style=solid]; +"4117 4624" -> "4118 4625" [label="[-1]", style=dashed]; +"4118 4625" -> "4263 6519" [label="[]", style=dashed]; +"4119 4577" -> "4120 4579" [label="[]", style=solid]; +"4120 4579" -> "4121 4580" [label="[]", style=solid]; +"4121 4580" -> "4122 4581" [label="[-1]", style=dashed]; +"4122 4581" -> "4263 6519" [label="[]", style=dashed]; +"4123 4533" -> "4124 4535" [label="[]", style=solid]; +"4124 4535" -> "4125 4536" [label="[]", style=solid]; +"4125 4536" -> "4126 4537" [label="[-1]", style=dashed]; +"4126 4537" -> "4263 6519" [label="[]", style=dashed]; +"4127 4489" -> "4128 4491" [label="[]", style=solid]; +"4128 4491" -> "4129 4492" [label="[]", style=solid]; +"4129 4492" -> "4130 4493" [label="[-1]", style=dashed]; +"4130 4493" -> "4263 6519" [label="[]", style=dashed]; +"4131 4445" -> "4132 4447" [label="[]", style=solid]; +"4132 4447" -> "4133 4448" [label="[]", style=solid]; +"4133 4448" -> "4134 4449" [label="[-1]", style=dashed]; +"4134 4449" -> "4263 6519" [label="[]", style=dashed]; +"4135 4401" -> "4136 4403" [label="[]", style=solid]; +"4136 4403" -> "4137 4404" [label="[]", style=solid]; +"4137 4404" -> "4138 4405" [label="[-1]", style=dashed]; +"4138 4405" -> "4263 6519" [label="[]", style=dashed]; +"4139 4357" -> "4140 4359" [label="[]", style=solid]; +"4140 4359" -> "4141 4360" [label="[]", style=solid]; +"4141 4360" -> "4142 4361" [label="[-1]", style=dashed]; +"4142 4361" -> "4263 6519" [label="[]", style=dashed]; +"4143 4313" -> "4144 4315" [label="[]", style=solid]; +"4144 4315" -> "4145 4316" [label="[]", style=solid]; +"4145 4316" -> "4146 4317" [label="[-1]", style=dashed]; +"4146 4317" -> "4263 6519" [label="[]", style=dashed]; +"4147 4269" -> "4148 4271" [label="[]", style=solid]; +"4148 4271" -> "4149 4272" [label="[]", style=solid]; +"4149 4272" -> "4150 4273" [label="[-1]", style=dashed]; +"4150 4273" -> "4263 6519" [label="[]", style=dashed]; +"4151 4225" -> "4152 4227" [label="[]", style=solid]; +"4152 4227" -> "4153 4228" [label="[]", style=solid]; +"4153 4228" -> "4154 4229" [label="[-1]", style=dashed]; +"4154 4229" -> "4263 6519" [label="[]", style=dashed]; +"4155 4181" -> "4156 4183" [label="[]", style=solid]; +"4156 4183" -> "4157 4184" [label="[]", style=solid]; +"4157 4184" -> "4158 4185" [label="[-1]", style=dashed]; +"4158 4185" -> "4263 6519" [label="[]", style=dashed]; +"4159 4137" -> "4160 4139" [label="[]", style=solid]; +"4160 4139" -> "4161 4140" [label="[]", style=solid]; +"4161 4140" -> "4162 4141" [label="[-1]", style=dashed]; +"4162 4141" -> "4263 6519" [label="[]", style=dashed]; +"4163 4093" -> "4164 4095" [label="[]", style=solid]; +"4164 4095" -> "4165 4096" [label="[]", style=solid]; +"4165 4096" -> "4166 4097" [label="[-1]", style=dashed]; +"4166 4097" -> "4263 6519" [label="[]", style=dashed]; +"4167 4049" -> "4168 4051" [label="[]", style=solid]; +"4168 4051" -> "4169 4052" [label="[]", style=solid]; +"4169 4052" -> "4170 4053" [label="[-1]", style=dashed]; +"4170 4053" -> "4263 6519" [label="[]", style=dashed]; +"4171 4005" -> "4172 4007" [label="[]", style=solid]; +"4172 4007" -> "4173 4008" [label="[]", style=solid]; +"4173 4008" -> "4174 4009" [label="[-1]", style=dashed]; +"4174 4009" -> "4263 6519" [label="[]", style=dashed]; +"4175 3961" -> "4176 3963" [label="[]", style=solid]; +"4176 3963" -> "4177 3964" [label="[]", style=solid]; +"4177 3964" -> "4178 3965" [label="[-1]", style=dashed]; +"4178 3965" -> "4263 6519" [label="[]", style=dashed]; +"4179 3917" -> "4180 3919" [label="[]", style=solid]; +"4180 3919" -> "4181 3920" [label="[]", style=solid]; +"4181 3920" -> "4182 3921" [label="[-1]", style=dashed]; +"4182 3921" -> "4263 6519" [label="[]", style=dashed]; +"4183 3873" -> "4184 3875" [label="[]", style=solid]; +"4184 3875" -> "4185 3876" [label="[]", style=solid]; +"4185 3876" -> "4186 3877" [label="[-1]", style=dashed]; +"4186 3877" -> "4263 6519" [label="[]", style=dashed]; +"4187 3829" -> "4188 3831" [label="[]", style=solid]; +"4188 3831" -> "4189 3832" [label="[]", style=solid]; +"4189 3832" -> "4190 3833" [label="[-1]", style=dashed]; +"4190 3833" -> "4263 6519" [label="[]", style=dashed]; +"4191 3785" -> "4192 3787" [label="[]", style=solid]; +"4192 3787" -> "4193 3788" [label="[]", style=solid]; +"4193 3788" -> "4194 3789" [label="[-1]", style=dashed]; +"4194 3789" -> "4263 6519" [label="[]", style=dashed]; +"4195 3741" -> "4196 3743" [label="[]", style=solid]; +"4196 3743" -> "4197 3744" [label="[]", style=solid]; +"4197 3744" -> "4198 3745" [label="[-1]", style=dashed]; +"4198 3745" -> "4263 6519" [label="[]", style=dashed]; +"4199 3697" -> "4200 3699" [label="[]", style=solid]; +"4200 3699" -> "4201 3700" [label="[]", style=solid]; +"4201 3700" -> "4202 3701" [label="[-1]", style=dashed]; +"4202 3701" -> "4263 6519" [label="[]", style=dashed]; +"4203 3653" -> "4204 3655" [label="[]", style=solid]; +"4204 3655" -> "4205 3656" [label="[]", style=solid]; +"4205 3656" -> "4206 3657" [label="[-1]", style=dashed]; +"4206 3657" -> "4263 6519" [label="[]", style=dashed]; +"4207 3609" -> "4208 3611" [label="[]", style=solid]; +"4208 3611" -> "4209 3612" [label="[]", style=solid]; +"4209 3612" -> "4210 3613" [label="[-1]", style=dashed]; +"4210 3613" -> "4263 6519" [label="[]", style=dashed]; +"4211 3565" -> "4212 3567" [label="[]", style=solid]; +"4212 3567" -> "4213 3568" [label="[]", style=solid]; +"4213 3568" -> "4214 3569" [label="[-1]", style=dashed]; +"4214 3569" -> "4263 6519" [label="[]", style=dashed]; +"4215 3521" -> "4216 3523" [label="[]", style=solid]; +"4216 3523" -> "4217 3524" [label="[]", style=solid]; +"4217 3524" -> "4218 3525" [label="[-1]", style=dashed]; +"4218 3525" -> "4263 6519" [label="[]", style=dashed]; +"4219 3477" -> "4220 3479" [label="[]", style=solid]; +"4220 3479" -> "4221 3480" [label="[]", style=solid]; +"4221 3480" -> "4222 3481" [label="[-1]", style=dashed]; +"4222 3481" -> "4263 6519" [label="[]", style=dashed]; +"4223 3433" -> "4224 3435" [label="[]", style=solid]; +"4224 3435" -> "4225 3436" [label="[]", style=solid]; +"4225 3436" -> "4226 3437" [label="[-1]", style=dashed]; +"4226 3437" -> "4263 6519" [label="[]", style=dashed]; +"4227 3389" -> "4228 3391" [label="[]", style=solid]; +"4228 3391" -> "4229 3392" [label="[]", style=solid]; +"4229 3392" -> "4230 3393" [label="[-1]", style=dashed]; +"4230 3393" -> "4263 6519" [label="[]", style=dashed]; +"4231 3345" -> "4232 3347" [label="[]", style=solid]; +"4232 3347" -> "4233 3348" [label="[]", style=solid]; +"4233 3348" -> "4234 3349" [label="[-1]", style=dashed]; +"4234 3349" -> "4263 6519" [label="[]", style=dashed]; +"4235 3301" -> "4236 3303" [label="[]", style=solid]; +"4236 3303" -> "4237 3304" [label="[]", style=solid]; +"4237 3304" -> "4238 3305" [label="[-1]", style=dashed]; +"4238 3305" -> "4263 6519" [label="[]", style=dashed]; +"4239 3257" -> "4240 3259" [label="[]", style=solid]; +"4240 3259" -> "4241 3260" [label="[]", style=solid]; +"4241 3260" -> "4242 3261" [label="[-1]", style=dashed]; +"4242 3261" -> "4263 6519" [label="[]", style=dashed]; +"4243 3213" -> "4244 3215" [label="[]", style=solid]; +"4244 3215" -> "4245 3216" [label="[]", style=solid]; +"4245 3216" -> "4246 3217" [label="[-1]", style=dashed]; +"4246 3217" -> "4263 6519" [label="[]", style=dashed]; +"4247 3169" -> "4248 3171" [label="[]", style=solid]; +"4248 3171" -> "4249 3172" [label="[]", style=solid]; +"4249 3172" -> "4250 3173" [label="[-1]", style=dashed]; +"4250 3173" -> "4263 6519" [label="[]", style=dashed]; +"4251 3125" -> "4252 3127" [label="[]", style=solid]; +"4252 3127" -> "4253 3128" [label="[]", style=solid]; +"4253 3128" -> "4254 3129" [label="[-1]", style=dashed]; +"4254 3129" -> "4263 6519" [label="[]", style=dashed]; +"4255 3081" -> "4256 3083" [label="[]", style=solid]; +"4256 3083" -> "4257 3084" [label="[]", style=solid]; +"4257 3084" -> "4258 3085" [label="[-1]", style=dashed]; +"4258 3085" -> "4263 6519" [label="[]", style=dashed]; +"4259 3037" -> "4260 3039" [label="[]", style=solid]; +"4260 3039" -> "4261 3040" [label="[]", style=solid]; +"4261 3040" -> "4262 3041" [label="[-1]", style=dashed]; +"4262 3041" -> "4263 6519" [label="[]", style=dashed]; +"4263 6519" -> "4264 6532" [label="[]", style=dashed]; +"4264 6532" -> "4265 6820" [label="[-1]", style=dashed]; +"4264 6532" -> "4282 nncf_model_output_1" [label="[-1]", style=dashed]; +"4265 6820" -> "4266 6847" [label="[-1]", style=dashed]; +"4266 6847" -> "4276 6848" [label="[-1]", style=dashed]; +"4267 6835" -> "4268 6836" [label="[4]", style=dashed]; +"4268 6836" -> "4269 6840" [label="[]", style=dashed]; +"4269 6840" -> "4274 6841" [label="[1]", style=dashed]; +"4270 6832" -> "4271 6833" [label="[4]", style=dashed]; +"4271 6833" -> "4272 6839" [label="[]", style=dashed]; +"4272 6839" -> "4274 6841" [label="[1]", style=dashed]; +"4273 6838" -> "4274 6841" [label="[1]", style=dashed]; +"4274 6841" -> "4275 6842" [label="[3]", style=dashed]; +"4275 6842" -> "4276 6848" [label="[]", style=solid]; +"4276 6848" -> "4277 6849" [label="[]", style=solid]; +"4277 6849" -> "4284 nncf_model_output_3" [label="[-1, 1, 28, 28]", style=solid]; +"4278 6533" -> "4279 6534" [label="[]", style=dashed]; +"4279 6534" -> "4283 nncf_model_output_2" [label="[-1]", style=solid]; +"4280 nncf_model_input_0" -> "2 QuantizeLinear_image_1" [label="[3, -1, -1]", style=solid]; } diff --git a/tests/onnx/data/reference_graphs/quantization/bertsquad-12.dot b/tests/onnx/data/reference_graphs/quantization/bertsquad-12.dot new file mode 100644 index 00000000000..246765a6663 --- /dev/null +++ b/tests/onnx/data/reference_graphs/quantization/bertsquad-12.dot @@ -0,0 +1,3278 @@ +strict digraph { +"0 unique_ids_graph_outputs_Identity__10" [id=0, type=Identity]; +"1 bert/encoder/ones/packed_Unsqueeze__20" [id=1, type=Unsqueeze]; +"2 bert/encoder/ones/packed_Unsqueeze__19" [id=2, type=Unsqueeze]; +"3 bert/encoder/layer_9/attention/self/Reshape_3/shape_Unsqueeze__83" [id=3, type=Unsqueeze]; +"4 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__88" [id=4, type=Unsqueeze]; +"5 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__87" [id=5, type=Unsqueeze]; +"6 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__86" [id=6, type=Unsqueeze]; +"7 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__93" [id=7, type=Unsqueeze]; +"8 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__92" [id=8, type=Unsqueeze]; +"9 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__91" [id=9, type=Unsqueeze]; +"10 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__98" [id=10, type=Unsqueeze]; +"11 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__97" [id=11, type=Unsqueeze]; +"12 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__96" [id=12, type=Unsqueeze]; +"13 bert/encoder/layer_8/attention/self/Reshape_3/shape_Unsqueeze__101" [id=13, type=Unsqueeze]; +"14 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__106" [id=14, type=Unsqueeze]; +"15 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__105" [id=15, type=Unsqueeze]; +"16 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__104" [id=16, type=Unsqueeze]; +"17 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__111" [id=17, type=Unsqueeze]; +"18 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__110" [id=18, type=Unsqueeze]; +"19 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__109" [id=19, type=Unsqueeze]; +"20 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__116" [id=20, type=Unsqueeze]; +"21 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__115" [id=21, type=Unsqueeze]; +"22 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__114" [id=22, type=Unsqueeze]; +"23 bert/encoder/layer_7/attention/self/Reshape_3/shape_Unsqueeze__119" [id=23, type=Unsqueeze]; +"24 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__124" [id=24, type=Unsqueeze]; +"25 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__123" [id=25, type=Unsqueeze]; +"26 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__122" [id=26, type=Unsqueeze]; +"27 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__129" [id=27, type=Unsqueeze]; +"28 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__128" [id=28, type=Unsqueeze]; +"29 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__127" [id=29, type=Unsqueeze]; +"30 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__134" [id=30, type=Unsqueeze]; +"31 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__133" [id=31, type=Unsqueeze]; +"32 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__132" [id=32, type=Unsqueeze]; +"33 bert/encoder/layer_6/attention/self/Reshape_3/shape_Unsqueeze__137" [id=33, type=Unsqueeze]; +"34 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__142" [id=34, type=Unsqueeze]; +"35 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__141" [id=35, type=Unsqueeze]; +"36 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__140" [id=36, type=Unsqueeze]; +"37 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__147" [id=37, type=Unsqueeze]; +"38 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__146" [id=38, type=Unsqueeze]; +"39 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__145" [id=39, type=Unsqueeze]; +"40 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__152" [id=40, type=Unsqueeze]; +"41 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__151" [id=41, type=Unsqueeze]; +"42 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__150" [id=42, type=Unsqueeze]; +"43 bert/encoder/layer_5/attention/self/Reshape_3/shape_Unsqueeze__155" [id=43, type=Unsqueeze]; +"44 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__160" [id=44, type=Unsqueeze]; +"45 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__159" [id=45, type=Unsqueeze]; +"46 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__158" [id=46, type=Unsqueeze]; +"47 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__165" [id=47, type=Unsqueeze]; +"48 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__164" [id=48, type=Unsqueeze]; +"49 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__163" [id=49, type=Unsqueeze]; +"50 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__170" [id=50, type=Unsqueeze]; +"51 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__169" [id=51, type=Unsqueeze]; +"52 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__168" [id=52, type=Unsqueeze]; +"53 bert/encoder/layer_4/attention/self/Reshape_3/shape_Unsqueeze__173" [id=53, type=Unsqueeze]; +"54 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__178" [id=54, type=Unsqueeze]; +"55 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__177" [id=55, type=Unsqueeze]; +"56 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__176" [id=56, type=Unsqueeze]; +"57 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__183" [id=57, type=Unsqueeze]; +"58 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__182" [id=58, type=Unsqueeze]; +"59 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__181" [id=59, type=Unsqueeze]; +"60 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__188" [id=60, type=Unsqueeze]; +"61 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__187" [id=61, type=Unsqueeze]; +"62 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__186" [id=62, type=Unsqueeze]; +"63 bert/encoder/layer_3/attention/self/Reshape_3/shape_Unsqueeze__191" [id=63, type=Unsqueeze]; +"64 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__196" [id=64, type=Unsqueeze]; +"65 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__195" [id=65, type=Unsqueeze]; +"66 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__194" [id=66, type=Unsqueeze]; +"67 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__201" [id=67, type=Unsqueeze]; +"68 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__200" [id=68, type=Unsqueeze]; +"69 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__199" [id=69, type=Unsqueeze]; +"70 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__206" [id=70, type=Unsqueeze]; +"71 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__205" [id=71, type=Unsqueeze]; +"72 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__204" [id=72, type=Unsqueeze]; +"73 bert/encoder/layer_2/attention/self/Reshape_3/shape_Unsqueeze__209" [id=73, type=Unsqueeze]; +"74 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__214" [id=74, type=Unsqueeze]; +"75 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__213" [id=75, type=Unsqueeze]; +"76 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__212" [id=76, type=Unsqueeze]; +"77 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__219" [id=77, type=Unsqueeze]; +"78 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__218" [id=78, type=Unsqueeze]; +"79 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__217" [id=79, type=Unsqueeze]; +"80 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__224" [id=80, type=Unsqueeze]; +"81 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__223" [id=81, type=Unsqueeze]; +"82 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__222" [id=82, type=Unsqueeze]; +"83 bert/encoder/layer_11/attention/self/Reshape_3/shape_Unsqueeze__227" [id=83, type=Unsqueeze]; +"84 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__232" [id=84, type=Unsqueeze]; +"85 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__231" [id=85, type=Unsqueeze]; +"86 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__230" [id=86, type=Unsqueeze]; +"87 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__237" [id=87, type=Unsqueeze]; +"88 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__236" [id=88, type=Unsqueeze]; +"89 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__235" [id=89, type=Unsqueeze]; +"90 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__242" [id=90, type=Unsqueeze]; +"91 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__241" [id=91, type=Unsqueeze]; +"92 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__240" [id=92, type=Unsqueeze]; +"93 bert/encoder/layer_10/attention/self/Reshape_3/shape_Unsqueeze__245" [id=93, type=Unsqueeze]; +"94 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__250" [id=94, type=Unsqueeze]; +"95 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__249" [id=95, type=Unsqueeze]; +"96 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__248" [id=96, type=Unsqueeze]; +"97 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__255" [id=97, type=Unsqueeze]; +"98 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__254" [id=98, type=Unsqueeze]; +"99 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__253" [id=99, type=Unsqueeze]; +"100 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__260" [id=100, type=Unsqueeze]; +"101 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__259" [id=101, type=Unsqueeze]; +"102 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__258" [id=102, type=Unsqueeze]; +"103 bert/encoder/layer_1/attention/self/Reshape_3/shape_Unsqueeze__263" [id=103, type=Unsqueeze]; +"104 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__268" [id=104, type=Unsqueeze]; +"105 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__267" [id=105, type=Unsqueeze]; +"106 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__266" [id=106, type=Unsqueeze]; +"107 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__273" [id=107, type=Unsqueeze]; +"108 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__272" [id=108, type=Unsqueeze]; +"109 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__271" [id=109, type=Unsqueeze]; +"110 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__278" [id=110, type=Unsqueeze]; +"111 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__277" [id=111, type=Unsqueeze]; +"112 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__276" [id=112, type=Unsqueeze]; +"113 bert/encoder/layer_0/attention/self/Reshape_3/shape_Unsqueeze__281" [id=113, type=Unsqueeze]; +"114 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__286" [id=114, type=Unsqueeze]; +"115 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__285" [id=115, type=Unsqueeze]; +"116 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__284" [id=116, type=Unsqueeze]; +"117 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__291" [id=117, type=Unsqueeze]; +"118 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__290" [id=118, type=Unsqueeze]; +"119 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__289" [id=119, type=Unsqueeze]; +"120 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__296" [id=120, type=Unsqueeze]; +"121 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__295" [id=121, type=Unsqueeze]; +"122 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__294" [id=122, type=Unsqueeze]; +"123 bert/encoder/Shape" [id=123, type=Shape]; +"124 bert/encoder/Shape__12" [id=124, type=Cast]; +"125 bert/encoder/strided_slice" [id=125, type=Slice]; +"126 bert/encoder/strided_slice__16" [id=126, type=Squeeze]; +"127 bert/encoder/strided_slice__17" [id=127, type=Cast]; +"128 bert/encoder/ones/packed_Unsqueeze__18" [id=128, type=Unsqueeze]; +"129 bert/encoder/ones/packed_Concat__21" [id=129, type=Concat]; +"130 bert/encoder/ones__22" [id=130, type=Cast]; +"131 bert/encoder/ones" [id=131, type=ConstantOfShape]; +"132 bert/encoder/Reshape_13/shape_Unsqueeze__300" [id=132, type=Unsqueeze]; +"133 bert/encoder/Reshape_13/shape_Unsqueeze__299" [id=133, type=Unsqueeze]; +"134 bert/encoder/Reshape_1__302" [id=134, type=Cast]; +"135 bert/encoder/Reshape/shape_Unsqueeze__23" [id=135, type=Unsqueeze]; +"136 bert/encoder/Reshape/shape_Unsqueeze__25" [id=136, type=Unsqueeze]; +"137 bert/encoder/Reshape/shape_Unsqueeze__24" [id=137, type=Unsqueeze]; +"138 bert/encoder/Reshape/shape_Concat__26" [id=138, type=Concat]; +"139 bert/encoder/Reshape__27" [id=139, type=Cast]; +"140 bert/encoder/Reshape" [id=140, type=Reshape]; +"141 bert/encoder/Cast" [id=141, type=Cast]; +"142 bert/encoder/mul" [id=142, type=Mul]; +"143 bert/encoder/layer_9/attention/self/ExpandDims" [id=143, type=Reshape]; +"144 bert/encoder/layer_9/attention/self/sub" [id=144, type=Sub]; +"145 bert/encoder/layer_9/attention/self/mul_1" [id=145, type=Mul]; +"146 bert/encoder/layer_8/attention/self/ExpandDims" [id=146, type=Reshape]; +"147 bert/encoder/layer_8/attention/self/sub" [id=147, type=Sub]; +"148 bert/encoder/layer_8/attention/self/mul_1" [id=148, type=Mul]; +"149 bert/encoder/layer_7/attention/self/ExpandDims" [id=149, type=Reshape]; +"150 bert/encoder/layer_7/attention/self/sub" [id=150, type=Sub]; +"151 bert/encoder/layer_7/attention/self/mul_1" [id=151, type=Mul]; +"152 bert/encoder/layer_6/attention/self/ExpandDims" [id=152, type=Reshape]; +"153 bert/encoder/layer_6/attention/self/sub" [id=153, type=Sub]; +"154 bert/encoder/layer_6/attention/self/mul_1" [id=154, type=Mul]; +"155 bert/encoder/layer_5/attention/self/ExpandDims" [id=155, type=Reshape]; +"156 bert/encoder/layer_5/attention/self/sub" [id=156, type=Sub]; +"157 bert/encoder/layer_5/attention/self/mul_1" [id=157, type=Mul]; +"158 bert/encoder/layer_4/attention/self/ExpandDims" [id=158, type=Reshape]; +"159 bert/encoder/layer_4/attention/self/sub" [id=159, type=Sub]; +"160 bert/encoder/layer_4/attention/self/mul_1" [id=160, type=Mul]; +"161 bert/encoder/layer_3/attention/self/ExpandDims" [id=161, type=Reshape]; +"162 bert/encoder/layer_3/attention/self/sub" [id=162, type=Sub]; +"163 bert/encoder/layer_3/attention/self/mul_1" [id=163, type=Mul]; +"164 bert/encoder/layer_2/attention/self/ExpandDims" [id=164, type=Reshape]; +"165 bert/encoder/layer_2/attention/self/sub" [id=165, type=Sub]; +"166 bert/encoder/layer_2/attention/self/mul_1" [id=166, type=Mul]; +"167 bert/encoder/layer_11/attention/self/ExpandDims" [id=167, type=Reshape]; +"168 bert/encoder/layer_11/attention/self/sub" [id=168, type=Sub]; +"169 bert/encoder/layer_11/attention/self/mul_1" [id=169, type=Mul]; +"170 bert/encoder/layer_10/attention/self/ExpandDims" [id=170, type=Reshape]; +"171 bert/encoder/layer_10/attention/self/sub" [id=171, type=Sub]; +"172 bert/encoder/layer_10/attention/self/mul_1" [id=172, type=Mul]; +"173 bert/encoder/layer_1/attention/self/ExpandDims" [id=173, type=Reshape]; +"174 bert/encoder/layer_1/attention/self/sub" [id=174, type=Sub]; +"175 bert/encoder/layer_1/attention/self/mul_1" [id=175, type=Mul]; +"176 bert/encoder/layer_0/attention/self/ExpandDims" [id=176, type=Reshape]; +"177 bert/encoder/layer_0/attention/self/sub" [id=177, type=Sub]; +"178 bert/encoder/layer_0/attention/self/mul_1" [id=178, type=Mul]; +"179 bert/embeddings/Slice" [id=179, type=Slice]; +"180 bert/embeddings/Reshape_4__42" [id=180, type=Cast]; +"181 bert/embeddings/Reshape_4" [id=181, type=Reshape]; +"182 bert/embeddings/Reshape_3/shape_Unsqueeze__69" [id=182, type=Unsqueeze]; +"183 bert/embeddings/Reshape_3/shape_Unsqueeze__68" [id=183, type=Unsqueeze]; +"184 bert/embeddings/Reshape_2__43" [id=184, type=Cast]; +"185 bert/embeddings/Reshape_2" [id=185, type=Reshape]; +"186 bert/embeddings/Reshape_1/shape_Unsqueeze__57" [id=186, type=Unsqueeze]; +"187 bert/embeddings/Reshape_1/shape_Unsqueeze__56" [id=187, type=Unsqueeze]; +"188 bert/embeddings/Reshape__59" [id=188, type=Cast]; +"189 bert/embeddings/ExpandDims" [id=189, type=Reshape]; +"190 bert/embeddings/Shape" [id=190, type=Shape]; +"191 bert/embeddings/Shape__49" [id=191, type=Cast]; +"192 bert/embeddings/strided_slice" [id=192, type=Slice]; +"193 bert/embeddings/strided_slice__53" [id=193, type=Squeeze]; +"194 bert/embeddings/strided_slice__54" [id=194, type=Cast]; +"195 bert/embeddings/Reshape_1/shape_Unsqueeze__55" [id=195, type=Unsqueeze]; +"196 bert/embeddings/Reshape_1/shape_Concat__58" [id=196, type=Concat]; +"197 bert/embeddings/Reshape_1__60" [id=197, type=Cast]; +"198 bert/embeddings/Reshape" [id=198, type=Reshape]; +"199 QuantizeLinear_bert/embeddings/word_embeddings^0_1" [id=199, label="199 QuantizeLinear_bert/embeddings/word_embeddings:0_1", type=QuantizeLinear]; +"200 DequantizeLinear_bert/embeddings/word_embeddings^0_1" [id=200, label="200 DequantizeLinear_bert/embeddings/word_embeddings:0_1", type=DequantizeLinear]; +"201 bert/embeddings/GatherV2" [id=201, type=Gather]; +"202 bert/embeddings/Reshape_1" [id=202, type=Reshape]; +"203 bert/embeddings/Shape_1" [id=203, type=Shape]; +"204 bert/embeddings/Shape_1__61" [id=204, type=Cast]; +"205 bert/embeddings/strided_slice_1" [id=205, type=Slice]; +"206 bert/embeddings/strided_slice_1__65" [id=206, type=Squeeze]; +"207 bert/embeddings/strided_slice_1__66" [id=207, type=Cast]; +"208 bert/embeddings/Reshape_3/shape_Unsqueeze__67" [id=208, type=Unsqueeze]; +"209 bert/embeddings/Reshape_3/shape_Concat__70" [id=209, type=Concat]; +"210 bert/embeddings/Reshape_3__71" [id=210, type=Cast]; +"211 Unsqueeze__46" [id=211, type=Unsqueeze]; +"212 Unsqueeze__45" [id=212, type=Unsqueeze]; +"213 Unsqueeze__44" [id=213, type=Unsqueeze]; +"214 Reshape_1/shape_Unsqueeze__480" [id=214, type=Unsqueeze]; +"215 Reshape_1/shape_Unsqueeze__479" [id=215, type=Unsqueeze]; +"216 Reshape/shape_Unsqueeze__483" [id=216, type=Unsqueeze]; +"217 MatMul__486" [id=217, type=Transpose]; +"218 Concat__47" [id=218, type=Concat]; +"219 bert/embeddings/one_hot" [id=219, type=OneHot]; +"220 QuantizeLinear_bert/embeddings/one_hot^0_1" [id=220, label="220 QuantizeLinear_bert/embeddings/one_hot:0_1", type=QuantizeLinear]; +"221 DequantizeLinear_bert/embeddings/one_hot^0_1" [id=221, label="221 DequantizeLinear_bert/embeddings/one_hot:0_1", type=DequantizeLinear]; +"222 QuantizeLinear_bert/embeddings/token_type_embeddings^0_1" [id=222, label="222 QuantizeLinear_bert/embeddings/token_type_embeddings:0_1", type=QuantizeLinear]; +"223 DequantizeLinear_bert/embeddings/token_type_embeddings^0_1" [id=223, label="223 DequantizeLinear_bert/embeddings/token_type_embeddings:0_1", type=DequantizeLinear]; +"224 bert/embeddings/MatMul" [id=224, type=MatMul]; +"225 bert/embeddings/Reshape_3" [id=225, type=Reshape]; +"226 bert/embeddings/add" [id=226, type=Add]; +"227 bert/embeddings/add_1" [id=227, type=Add]; +"228 bert/embeddings/LayerNorm/moments/mean" [id=228, type=ReduceMean]; +"229 bert/embeddings/LayerNorm/moments/StopGradient" [id=229, type=Identity]; +"230 bert/embeddings/LayerNorm/moments/SquaredDifference" [id=230, type=Sub]; +"231 bert/embeddings/LayerNorm/moments/SquaredDifference__72" [id=231, type=Mul]; +"232 bert/embeddings/LayerNorm/moments/variance" [id=232, type=ReduceMean]; +"233 bert/embeddings/LayerNorm/batchnorm/add" [id=233, type=Add]; +"234 bert/embeddings/LayerNorm/batchnorm/Rsqrt" [id=234, type=Sqrt]; +"235 bert/embeddings/LayerNorm/batchnorm/Rsqrt__74" [id=235, type=Reciprocal]; +"236 bert/embeddings/LayerNorm/batchnorm/mul" [id=236, type=Mul]; +"237 bert/embeddings/LayerNorm/batchnorm/mul_2" [id=237, type=Mul]; +"238 bert/embeddings/LayerNorm/batchnorm/sub" [id=238, type=Sub]; +"239 bert/embeddings/LayerNorm/batchnorm/mul_1" [id=239, type=Mul]; +"240 bert/embeddings/LayerNorm/batchnorm/add_1" [id=240, type=Add]; +"241 bert/encoder/Shape_2" [id=241, type=Shape]; +"242 bert/encoder/Shape_2__76" [id=242, type=Cast]; +"243 bert/encoder/strided_slice_2" [id=243, type=Slice]; +"244 bert/encoder/strided_slice_2__80" [id=244, type=Squeeze]; +"245 bert/encoder/strided_slice_2__81" [id=245, type=Cast]; +"246 bert/encoder/layer_9/attention/self/mul_2" [id=246, type=Mul]; +"247 bert/encoder/layer_9/attention/self/Reshape_3/shape_Unsqueeze__82" [id=247, type=Unsqueeze]; +"248 bert/encoder/layer_9/attention/self/Reshape_3/shape_Concat__84" [id=248, type=Concat]; +"249 bert/encoder/layer_9/attention/self/Reshape_3__434" [id=249, type=Cast]; +"250 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__85" [id=250, type=Unsqueeze]; +"251 bert/encoder/layer_9/attention/self/Reshape_2/shape_Concat__89" [id=251, type=Concat]; +"252 bert/encoder/layer_9/attention/self/Reshape_2__429" [id=252, type=Cast]; +"253 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__90" [id=253, type=Unsqueeze]; +"254 bert/encoder/layer_9/attention/self/Reshape_1/shape_Concat__94" [id=254, type=Concat]; +"255 bert/encoder/layer_9/attention/self/Reshape_1__431" [id=255, type=Cast]; +"256 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__95" [id=256, type=Unsqueeze]; +"257 bert/encoder/layer_9/attention/self/Reshape/shape_Concat__99" [id=257, type=Concat]; +"258 bert/encoder/layer_9/attention/self/Reshape__430" [id=258, type=Cast]; +"259 bert/encoder/layer_8/attention/self/mul_2" [id=259, type=Mul]; +"260 bert/encoder/layer_8/attention/self/Reshape_3/shape_Unsqueeze__100" [id=260, type=Unsqueeze]; +"261 bert/encoder/layer_8/attention/self/Reshape_3/shape_Concat__102" [id=261, type=Concat]; +"262 bert/encoder/layer_8/attention/self/Reshape_3__420" [id=262, type=Cast]; +"263 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__103" [id=263, type=Unsqueeze]; +"264 bert/encoder/layer_8/attention/self/Reshape_2/shape_Concat__107" [id=264, type=Concat]; +"265 bert/encoder/layer_8/attention/self/Reshape_2__415" [id=265, type=Cast]; +"266 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__108" [id=266, type=Unsqueeze]; +"267 bert/encoder/layer_8/attention/self/Reshape_1/shape_Concat__112" [id=267, type=Concat]; +"268 bert/encoder/layer_8/attention/self/Reshape_1__417" [id=268, type=Cast]; +"269 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__113" [id=269, type=Unsqueeze]; +"270 bert/encoder/layer_8/attention/self/Reshape/shape_Concat__117" [id=270, type=Concat]; +"271 bert/encoder/layer_8/attention/self/Reshape__416" [id=271, type=Cast]; +"272 bert/encoder/layer_7/attention/self/mul_2" [id=272, type=Mul]; +"273 bert/encoder/layer_7/attention/self/Reshape_3/shape_Unsqueeze__118" [id=273, type=Unsqueeze]; +"274 bert/encoder/layer_7/attention/self/Reshape_3/shape_Concat__120" [id=274, type=Concat]; +"275 bert/encoder/layer_7/attention/self/Reshape_3__406" [id=275, type=Cast]; +"276 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__121" [id=276, type=Unsqueeze]; +"277 bert/encoder/layer_7/attention/self/Reshape_2/shape_Concat__125" [id=277, type=Concat]; +"278 bert/encoder/layer_7/attention/self/Reshape_2__401" [id=278, type=Cast]; +"279 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__126" [id=279, type=Unsqueeze]; +"280 bert/encoder/layer_7/attention/self/Reshape_1/shape_Concat__130" [id=280, type=Concat]; +"281 bert/encoder/layer_7/attention/self/Reshape_1__403" [id=281, type=Cast]; +"282 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__131" [id=282, type=Unsqueeze]; +"283 bert/encoder/layer_7/attention/self/Reshape/shape_Concat__135" [id=283, type=Concat]; +"284 bert/encoder/layer_7/attention/self/Reshape__402" [id=284, type=Cast]; +"285 bert/encoder/layer_6/attention/self/mul_2" [id=285, type=Mul]; +"286 bert/encoder/layer_6/attention/self/Reshape_3/shape_Unsqueeze__136" [id=286, type=Unsqueeze]; +"287 bert/encoder/layer_6/attention/self/Reshape_3/shape_Concat__138" [id=287, type=Concat]; +"288 bert/encoder/layer_6/attention/self/Reshape_3__392" [id=288, type=Cast]; +"289 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__139" [id=289, type=Unsqueeze]; +"290 bert/encoder/layer_6/attention/self/Reshape_2/shape_Concat__143" [id=290, type=Concat]; +"291 bert/encoder/layer_6/attention/self/Reshape_2__387" [id=291, type=Cast]; +"292 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__144" [id=292, type=Unsqueeze]; +"293 bert/encoder/layer_6/attention/self/Reshape_1/shape_Concat__148" [id=293, type=Concat]; +"294 bert/encoder/layer_6/attention/self/Reshape_1__389" [id=294, type=Cast]; +"295 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__149" [id=295, type=Unsqueeze]; +"296 bert/encoder/layer_6/attention/self/Reshape/shape_Concat__153" [id=296, type=Concat]; +"297 bert/encoder/layer_6/attention/self/Reshape__388" [id=297, type=Cast]; +"298 bert/encoder/layer_5/attention/self/mul_2" [id=298, type=Mul]; +"299 bert/encoder/layer_5/attention/self/Reshape_3/shape_Unsqueeze__154" [id=299, type=Unsqueeze]; +"300 bert/encoder/layer_5/attention/self/Reshape_3/shape_Concat__156" [id=300, type=Concat]; +"301 bert/encoder/layer_5/attention/self/Reshape_3__378" [id=301, type=Cast]; +"302 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__157" [id=302, type=Unsqueeze]; +"303 bert/encoder/layer_5/attention/self/Reshape_2/shape_Concat__161" [id=303, type=Concat]; +"304 bert/encoder/layer_5/attention/self/Reshape_2__373" [id=304, type=Cast]; +"305 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__162" [id=305, type=Unsqueeze]; +"306 bert/encoder/layer_5/attention/self/Reshape_1/shape_Concat__166" [id=306, type=Concat]; +"307 bert/encoder/layer_5/attention/self/Reshape_1__375" [id=307, type=Cast]; +"308 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__167" [id=308, type=Unsqueeze]; +"309 bert/encoder/layer_5/attention/self/Reshape/shape_Concat__171" [id=309, type=Concat]; +"310 bert/encoder/layer_5/attention/self/Reshape__374" [id=310, type=Cast]; +"311 bert/encoder/layer_4/attention/self/mul_2" [id=311, type=Mul]; +"312 bert/encoder/layer_4/attention/self/Reshape_3/shape_Unsqueeze__172" [id=312, type=Unsqueeze]; +"313 bert/encoder/layer_4/attention/self/Reshape_3/shape_Concat__174" [id=313, type=Concat]; +"314 bert/encoder/layer_4/attention/self/Reshape_3__364" [id=314, type=Cast]; +"315 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__175" [id=315, type=Unsqueeze]; +"316 bert/encoder/layer_4/attention/self/Reshape_2/shape_Concat__179" [id=316, type=Concat]; +"317 bert/encoder/layer_4/attention/self/Reshape_2__359" [id=317, type=Cast]; +"318 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__180" [id=318, type=Unsqueeze]; +"319 bert/encoder/layer_4/attention/self/Reshape_1/shape_Concat__184" [id=319, type=Concat]; +"320 bert/encoder/layer_4/attention/self/Reshape_1__361" [id=320, type=Cast]; +"321 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__185" [id=321, type=Unsqueeze]; +"322 bert/encoder/layer_4/attention/self/Reshape/shape_Concat__189" [id=322, type=Concat]; +"323 bert/encoder/layer_4/attention/self/Reshape__360" [id=323, type=Cast]; +"324 bert/encoder/layer_3/attention/self/mul_2" [id=324, type=Mul]; +"325 bert/encoder/layer_3/attention/self/Reshape_3/shape_Unsqueeze__190" [id=325, type=Unsqueeze]; +"326 bert/encoder/layer_3/attention/self/Reshape_3/shape_Concat__192" [id=326, type=Concat]; +"327 bert/encoder/layer_3/attention/self/Reshape_3__350" [id=327, type=Cast]; +"328 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__193" [id=328, type=Unsqueeze]; +"329 bert/encoder/layer_3/attention/self/Reshape_2/shape_Concat__197" [id=329, type=Concat]; +"330 bert/encoder/layer_3/attention/self/Reshape_2__345" [id=330, type=Cast]; +"331 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__198" [id=331, type=Unsqueeze]; +"332 bert/encoder/layer_3/attention/self/Reshape_1/shape_Concat__202" [id=332, type=Concat]; +"333 bert/encoder/layer_3/attention/self/Reshape_1__347" [id=333, type=Cast]; +"334 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__203" [id=334, type=Unsqueeze]; +"335 bert/encoder/layer_3/attention/self/Reshape/shape_Concat__207" [id=335, type=Concat]; +"336 bert/encoder/layer_3/attention/self/Reshape__346" [id=336, type=Cast]; +"337 bert/encoder/layer_2/attention/self/mul_2" [id=337, type=Mul]; +"338 bert/encoder/layer_2/attention/self/Reshape_3/shape_Unsqueeze__208" [id=338, type=Unsqueeze]; +"339 bert/encoder/layer_2/attention/self/Reshape_3/shape_Concat__210" [id=339, type=Concat]; +"340 bert/encoder/layer_2/attention/self/Reshape_3__336" [id=340, type=Cast]; +"341 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__211" [id=341, type=Unsqueeze]; +"342 bert/encoder/layer_2/attention/self/Reshape_2/shape_Concat__215" [id=342, type=Concat]; +"343 bert/encoder/layer_2/attention/self/Reshape_2__331" [id=343, type=Cast]; +"344 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__216" [id=344, type=Unsqueeze]; +"345 bert/encoder/layer_2/attention/self/Reshape_1/shape_Concat__220" [id=345, type=Concat]; +"346 bert/encoder/layer_2/attention/self/Reshape_1__333" [id=346, type=Cast]; +"347 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__221" [id=347, type=Unsqueeze]; +"348 bert/encoder/layer_2/attention/self/Reshape/shape_Concat__225" [id=348, type=Concat]; +"349 bert/encoder/layer_2/attention/self/Reshape__332" [id=349, type=Cast]; +"350 bert/encoder/layer_11/attention/self/mul_2" [id=350, type=Mul]; +"351 bert/encoder/layer_11/attention/self/Reshape_3/shape_Unsqueeze__226" [id=351, type=Unsqueeze]; +"352 bert/encoder/layer_11/attention/self/Reshape_3/shape_Concat__228" [id=352, type=Concat]; +"353 bert/encoder/layer_11/attention/self/Reshape_3__462" [id=353, type=Cast]; +"354 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__229" [id=354, type=Unsqueeze]; +"355 bert/encoder/layer_11/attention/self/Reshape_2/shape_Concat__233" [id=355, type=Concat]; +"356 bert/encoder/layer_11/attention/self/Reshape_2__457" [id=356, type=Cast]; +"357 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__234" [id=357, type=Unsqueeze]; +"358 bert/encoder/layer_11/attention/self/Reshape_1/shape_Concat__238" [id=358, type=Concat]; +"359 bert/encoder/layer_11/attention/self/Reshape_1__459" [id=359, type=Cast]; +"360 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__239" [id=360, type=Unsqueeze]; +"361 bert/encoder/layer_11/attention/self/Reshape/shape_Concat__243" [id=361, type=Concat]; +"362 bert/encoder/layer_11/attention/self/Reshape__458" [id=362, type=Cast]; +"363 bert/encoder/layer_10/attention/self/mul_2" [id=363, type=Mul]; +"364 bert/encoder/layer_10/attention/self/Reshape_3/shape_Unsqueeze__244" [id=364, type=Unsqueeze]; +"365 bert/encoder/layer_10/attention/self/Reshape_3/shape_Concat__246" [id=365, type=Concat]; +"366 bert/encoder/layer_10/attention/self/Reshape_3__448" [id=366, type=Cast]; +"367 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__247" [id=367, type=Unsqueeze]; +"368 bert/encoder/layer_10/attention/self/Reshape_2/shape_Concat__251" [id=368, type=Concat]; +"369 bert/encoder/layer_10/attention/self/Reshape_2__443" [id=369, type=Cast]; +"370 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__252" [id=370, type=Unsqueeze]; +"371 bert/encoder/layer_10/attention/self/Reshape_1/shape_Concat__256" [id=371, type=Concat]; +"372 bert/encoder/layer_10/attention/self/Reshape_1__445" [id=372, type=Cast]; +"373 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__257" [id=373, type=Unsqueeze]; +"374 bert/encoder/layer_10/attention/self/Reshape/shape_Concat__261" [id=374, type=Concat]; +"375 bert/encoder/layer_10/attention/self/Reshape__444" [id=375, type=Cast]; +"376 bert/encoder/layer_1/attention/self/mul_2" [id=376, type=Mul]; +"377 bert/encoder/layer_1/attention/self/Reshape_3/shape_Unsqueeze__262" [id=377, type=Unsqueeze]; +"378 bert/encoder/layer_1/attention/self/Reshape_3/shape_Concat__264" [id=378, type=Concat]; +"379 bert/encoder/layer_1/attention/self/Reshape_3__322" [id=379, type=Cast]; +"380 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__265" [id=380, type=Unsqueeze]; +"381 bert/encoder/layer_1/attention/self/Reshape_2/shape_Concat__269" [id=381, type=Concat]; +"382 bert/encoder/layer_1/attention/self/Reshape_2__317" [id=382, type=Cast]; +"383 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__270" [id=383, type=Unsqueeze]; +"384 bert/encoder/layer_1/attention/self/Reshape_1/shape_Concat__274" [id=384, type=Concat]; +"385 bert/encoder/layer_1/attention/self/Reshape_1__319" [id=385, type=Cast]; +"386 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__275" [id=386, type=Unsqueeze]; +"387 bert/encoder/layer_1/attention/self/Reshape/shape_Concat__279" [id=387, type=Concat]; +"388 bert/encoder/layer_1/attention/self/Reshape__318" [id=388, type=Cast]; +"389 bert/encoder/layer_0/attention/self/mul_2" [id=389, type=Mul]; +"390 bert/encoder/layer_0/attention/self/Reshape_3/shape_Unsqueeze__280" [id=390, type=Unsqueeze]; +"391 bert/encoder/layer_0/attention/self/Reshape_3/shape_Concat__282" [id=391, type=Concat]; +"392 bert/encoder/layer_0/attention/self/Reshape_3__308" [id=392, type=Cast]; +"393 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__283" [id=393, type=Unsqueeze]; +"394 bert/encoder/layer_0/attention/self/Reshape_2/shape_Concat__287" [id=394, type=Concat]; +"395 bert/encoder/layer_0/attention/self/Reshape_2__303" [id=395, type=Cast]; +"396 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__288" [id=396, type=Unsqueeze]; +"397 bert/encoder/layer_0/attention/self/Reshape_1/shape_Concat__292" [id=397, type=Concat]; +"398 bert/encoder/layer_0/attention/self/Reshape_1__305" [id=398, type=Cast]; +"399 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__293" [id=399, type=Unsqueeze]; +"400 bert/encoder/layer_0/attention/self/Reshape/shape_Concat__297" [id=400, type=Concat]; +"401 bert/encoder/layer_0/attention/self/Reshape__304" [id=401, type=Cast]; +"402 bert/encoder/Reshape_13/shape_Unsqueeze__298" [id=402, type=Unsqueeze]; +"403 bert/encoder/Reshape_13/shape_Concat__301" [id=403, type=Concat]; +"404 bert/encoder/Reshape_13__471" [id=404, type=Cast]; +"405 bert/encoder/Reshape_1" [id=405, type=Reshape]; +"406 QuantizeLinear_bert/encoder/Reshape_1^0_3" [id=406, label="406 QuantizeLinear_bert/encoder/Reshape_1:0_3", type=QuantizeLinear]; +"407 DequantizeLinear_bert/encoder/Reshape_1^0_3" [id=407, label="407 DequantizeLinear_bert/encoder/Reshape_1:0_3", type=DequantizeLinear]; +"408 QuantizeLinear_bert/encoder/Reshape_1^0_2" [id=408, label="408 QuantizeLinear_bert/encoder/Reshape_1:0_2", type=QuantizeLinear]; +"409 DequantizeLinear_bert/encoder/Reshape_1^0_2" [id=409, label="409 DequantizeLinear_bert/encoder/Reshape_1:0_2", type=DequantizeLinear]; +"410 QuantizeLinear_bert/encoder/Reshape_1^0_1" [id=410, label="410 QuantizeLinear_bert/encoder/Reshape_1:0_1", type=QuantizeLinear]; +"411 DequantizeLinear_bert/encoder/Reshape_1^0_1" [id=411, label="411 DequantizeLinear_bert/encoder/Reshape_1:0_1", type=DequantizeLinear]; +"412 QuantizeLinear_bert/encoder/layer_0/attention/self/value/kernel^0_1" [id=412, label="412 QuantizeLinear_bert/encoder/layer_0/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"413 DequantizeLinear_bert/encoder/layer_0/attention/self/value/kernel^0_1" [id=413, label="413 DequantizeLinear_bert/encoder/layer_0/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"414 bert/encoder/layer_0/attention/self/value/MatMul" [id=414, type=MatMul]; +"415 bert/encoder/layer_0/attention/self/value/BiasAdd" [id=415, type=Add]; +"416 bert/encoder/layer_0/attention/self/Reshape_2" [id=416, type=Reshape]; +"417 bert/encoder/layer_0/attention/self/transpose_2" [id=417, type=Transpose]; +"418 QuantizeLinear_bert/encoder/layer_0/attention/self/query/kernel^0_1" [id=418, label="418 QuantizeLinear_bert/encoder/layer_0/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"419 DequantizeLinear_bert/encoder/layer_0/attention/self/query/kernel^0_1" [id=419, label="419 DequantizeLinear_bert/encoder/layer_0/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"420 bert/encoder/layer_0/attention/self/query/MatMul" [id=420, type=MatMul]; +"421 bert/encoder/layer_0/attention/self/query/BiasAdd" [id=421, type=Add]; +"422 QuantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd^0_1" [id=422, label="422 QuantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"423 DequantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd^0_1" [id=423, label="423 DequantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"424 bert/encoder/layer_0/attention/self/Reshape" [id=424, type=Reshape]; +"425 bert/encoder/layer_0/attention/self/transpose" [id=425, type=Transpose]; +"426 QuantizeLinear_bert/encoder/layer_0/attention/self/key/kernel^0_1" [id=426, label="426 QuantizeLinear_bert/encoder/layer_0/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"427 DequantizeLinear_bert/encoder/layer_0/attention/self/key/kernel^0_1" [id=427, label="427 DequantizeLinear_bert/encoder/layer_0/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"428 bert/encoder/layer_0/attention/self/key/MatMul" [id=428, type=MatMul]; +"429 bert/encoder/layer_0/attention/self/key/BiasAdd" [id=429, type=Add]; +"430 QuantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd^0_1" [id=430, label="430 QuantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"431 DequantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd^0_1" [id=431, label="431 DequantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"432 bert/encoder/layer_0/attention/self/Reshape_1" [id=432, type=Reshape]; +"433 bert/encoder/layer_0/attention/self/transpose_1" [id=433, type=Transpose]; +"434 bert/encoder/layer_0/attention/self/MatMul__306" [id=434, type=Transpose]; +"435 bert/encoder/layer_0/attention/self/MatMul" [id=435, type=MatMul]; +"436 bert/encoder/layer_0/attention/self/Mul" [id=436, type=Mul]; +"437 bert/encoder/layer_0/attention/self/add" [id=437, type=Add]; +"438 bert/encoder/layer_0/attention/self/Softmax" [id=438, type=Softmax]; +"439 bert/encoder/layer_0/attention/self/MatMul_1" [id=439, type=MatMul]; +"440 QuantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1^0_1" [id=440, label="440 QuantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"441 DequantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1^0_1" [id=441, label="441 DequantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"442 bert/encoder/layer_0/attention/self/transpose_3" [id=442, type=Transpose]; +"443 bert/encoder/layer_0/attention/self/Reshape_3" [id=443, type=Reshape]; +"444 QuantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel^0_1" [id=444, label="444 QuantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"445 DequantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel^0_1" [id=445, label="445 DequantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"446 bert/encoder/layer_0/attention/output/dense/MatMul" [id=446, type=MatMul]; +"447 bert/encoder/layer_0/attention/output/dense/BiasAdd" [id=447, type=Add]; +"448 bert/encoder/layer_0/attention/output/add" [id=448, type=Add]; +"449 bert/encoder/layer_0/attention/output/LayerNorm/moments/mean" [id=449, type=ReduceMean]; +"450 bert/encoder/layer_0/attention/output/LayerNorm/moments/StopGradient" [id=450, type=Identity]; +"451 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference" [id=451, type=Sub]; +"452 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference__309" [id=452, type=Mul]; +"453 bert/encoder/layer_0/attention/output/LayerNorm/moments/variance" [id=453, type=ReduceMean]; +"454 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add" [id=454, type=Add]; +"455 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/Rsqrt" [id=455, type=Sqrt]; +"456 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/Rsqrt__311" [id=456, type=Reciprocal]; +"457 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul" [id=457, type=Mul]; +"458 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_2" [id=458, type=Mul]; +"459 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/sub" [id=459, type=Sub]; +"460 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_1" [id=460, type=Mul]; +"461 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1" [id=461, type=Add]; +"462 QuantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=462, label="462 QuantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"463 DequantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=463, label="463 DequantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"464 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel^0_1" [id=464, label="464 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"465 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel^0_1" [id=465, label="465 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"466 bert/encoder/layer_0/intermediate/dense/MatMul" [id=466, type=MatMul]; +"467 bert/encoder/layer_0/intermediate/dense/BiasAdd" [id=467, type=Add]; +"468 bert/encoder/layer_0/intermediate/dense/Pow" [id=468, type=Pow]; +"469 bert/encoder/layer_0/intermediate/dense/mul" [id=469, type=Mul]; +"470 bert/encoder/layer_0/intermediate/dense/add" [id=470, type=Add]; +"471 bert/encoder/layer_0/intermediate/dense/mul_1" [id=471, type=Mul]; +"472 bert/encoder/layer_0/intermediate/dense/Tanh" [id=472, type=Tanh]; +"473 bert/encoder/layer_0/intermediate/dense/add_1" [id=473, type=Add]; +"474 bert/encoder/layer_0/intermediate/dense/mul_2" [id=474, type=Mul]; +"475 bert/encoder/layer_0/intermediate/dense/mul_3" [id=475, type=Mul]; +"476 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3^0_1" [id=476, label="476 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"477 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3^0_1" [id=477, label="477 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"478 QuantizeLinear_bert/encoder/layer_0/output/dense/kernel^0_1" [id=478, label="478 QuantizeLinear_bert/encoder/layer_0/output/dense/kernel:0_1", type=QuantizeLinear]; +"479 DequantizeLinear_bert/encoder/layer_0/output/dense/kernel^0_1" [id=479, label="479 DequantizeLinear_bert/encoder/layer_0/output/dense/kernel:0_1", type=DequantizeLinear]; +"480 bert/encoder/layer_0/output/dense/MatMul" [id=480, type=MatMul]; +"481 bert/encoder/layer_0/output/dense/BiasAdd" [id=481, type=Add]; +"482 bert/encoder/layer_0/output/add" [id=482, type=Add]; +"483 bert/encoder/layer_0/output/LayerNorm/moments/mean" [id=483, type=ReduceMean]; +"484 bert/encoder/layer_0/output/LayerNorm/moments/StopGradient" [id=484, type=Identity]; +"485 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference" [id=485, type=Sub]; +"486 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference__313" [id=486, type=Mul]; +"487 bert/encoder/layer_0/output/LayerNorm/moments/variance" [id=487, type=ReduceMean]; +"488 bert/encoder/layer_0/output/LayerNorm/batchnorm/add" [id=488, type=Add]; +"489 bert/encoder/layer_0/output/LayerNorm/batchnorm/Rsqrt" [id=489, type=Sqrt]; +"490 bert/encoder/layer_0/output/LayerNorm/batchnorm/Rsqrt__315" [id=490, type=Reciprocal]; +"491 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul" [id=491, type=Mul]; +"492 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_2" [id=492, type=Mul]; +"493 bert/encoder/layer_0/output/LayerNorm/batchnorm/sub" [id=493, type=Sub]; +"494 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_1" [id=494, type=Mul]; +"495 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" [id=495, type=Add]; +"496 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_3" [id=496, label="496 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"497 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_3" [id=497, label="497 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"498 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_2" [id=498, label="498 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"499 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_2" [id=499, label="499 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"500 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_1" [id=500, label="500 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"501 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_1" [id=501, label="501 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"502 QuantizeLinear_bert/encoder/layer_1/attention/self/value/kernel^0_1" [id=502, label="502 QuantizeLinear_bert/encoder/layer_1/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"503 DequantizeLinear_bert/encoder/layer_1/attention/self/value/kernel^0_1" [id=503, label="503 DequantizeLinear_bert/encoder/layer_1/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"504 bert/encoder/layer_1/attention/self/value/MatMul" [id=504, type=MatMul]; +"505 bert/encoder/layer_1/attention/self/value/BiasAdd" [id=505, type=Add]; +"506 bert/encoder/layer_1/attention/self/Reshape_2" [id=506, type=Reshape]; +"507 bert/encoder/layer_1/attention/self/transpose_2" [id=507, type=Transpose]; +"508 QuantizeLinear_bert/encoder/layer_1/attention/self/query/kernel^0_1" [id=508, label="508 QuantizeLinear_bert/encoder/layer_1/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"509 DequantizeLinear_bert/encoder/layer_1/attention/self/query/kernel^0_1" [id=509, label="509 DequantizeLinear_bert/encoder/layer_1/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"510 bert/encoder/layer_1/attention/self/query/MatMul" [id=510, type=MatMul]; +"511 bert/encoder/layer_1/attention/self/query/BiasAdd" [id=511, type=Add]; +"512 QuantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd^0_1" [id=512, label="512 QuantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"513 DequantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd^0_1" [id=513, label="513 DequantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"514 bert/encoder/layer_1/attention/self/Reshape" [id=514, type=Reshape]; +"515 bert/encoder/layer_1/attention/self/transpose" [id=515, type=Transpose]; +"516 QuantizeLinear_bert/encoder/layer_1/attention/self/key/kernel^0_1" [id=516, label="516 QuantizeLinear_bert/encoder/layer_1/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"517 DequantizeLinear_bert/encoder/layer_1/attention/self/key/kernel^0_1" [id=517, label="517 DequantizeLinear_bert/encoder/layer_1/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"518 bert/encoder/layer_1/attention/self/key/MatMul" [id=518, type=MatMul]; +"519 bert/encoder/layer_1/attention/self/key/BiasAdd" [id=519, type=Add]; +"520 QuantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd^0_1" [id=520, label="520 QuantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"521 DequantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd^0_1" [id=521, label="521 DequantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"522 bert/encoder/layer_1/attention/self/Reshape_1" [id=522, type=Reshape]; +"523 bert/encoder/layer_1/attention/self/transpose_1" [id=523, type=Transpose]; +"524 bert/encoder/layer_1/attention/self/MatMul__320" [id=524, type=Transpose]; +"525 bert/encoder/layer_1/attention/self/MatMul" [id=525, type=MatMul]; +"526 bert/encoder/layer_1/attention/self/Mul" [id=526, type=Mul]; +"527 bert/encoder/layer_1/attention/self/add" [id=527, type=Add]; +"528 bert/encoder/layer_1/attention/self/Softmax" [id=528, type=Softmax]; +"529 bert/encoder/layer_1/attention/self/MatMul_1" [id=529, type=MatMul]; +"530 QuantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1^0_1" [id=530, label="530 QuantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"531 DequantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1^0_1" [id=531, label="531 DequantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"532 bert/encoder/layer_1/attention/self/transpose_3" [id=532, type=Transpose]; +"533 bert/encoder/layer_1/attention/self/Reshape_3" [id=533, type=Reshape]; +"534 QuantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel^0_1" [id=534, label="534 QuantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"535 DequantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel^0_1" [id=535, label="535 DequantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"536 bert/encoder/layer_1/attention/output/dense/MatMul" [id=536, type=MatMul]; +"537 bert/encoder/layer_1/attention/output/dense/BiasAdd" [id=537, type=Add]; +"538 bert/encoder/layer_1/attention/output/add" [id=538, type=Add]; +"539 bert/encoder/layer_1/attention/output/LayerNorm/moments/mean" [id=539, type=ReduceMean]; +"540 bert/encoder/layer_1/attention/output/LayerNorm/moments/StopGradient" [id=540, type=Identity]; +"541 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference" [id=541, type=Sub]; +"542 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference__323" [id=542, type=Mul]; +"543 bert/encoder/layer_1/attention/output/LayerNorm/moments/variance" [id=543, type=ReduceMean]; +"544 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add" [id=544, type=Add]; +"545 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/Rsqrt" [id=545, type=Sqrt]; +"546 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/Rsqrt__325" [id=546, type=Reciprocal]; +"547 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul" [id=547, type=Mul]; +"548 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_2" [id=548, type=Mul]; +"549 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/sub" [id=549, type=Sub]; +"550 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_1" [id=550, type=Mul]; +"551 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1" [id=551, type=Add]; +"552 QuantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=552, label="552 QuantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"553 DequantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=553, label="553 DequantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"554 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel^0_1" [id=554, label="554 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"555 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel^0_1" [id=555, label="555 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"556 bert/encoder/layer_1/intermediate/dense/MatMul" [id=556, type=MatMul]; +"557 bert/encoder/layer_1/intermediate/dense/BiasAdd" [id=557, type=Add]; +"558 bert/encoder/layer_1/intermediate/dense/Pow" [id=558, type=Pow]; +"559 bert/encoder/layer_1/intermediate/dense/mul" [id=559, type=Mul]; +"560 bert/encoder/layer_1/intermediate/dense/add" [id=560, type=Add]; +"561 bert/encoder/layer_1/intermediate/dense/mul_1" [id=561, type=Mul]; +"562 bert/encoder/layer_1/intermediate/dense/Tanh" [id=562, type=Tanh]; +"563 bert/encoder/layer_1/intermediate/dense/add_1" [id=563, type=Add]; +"564 bert/encoder/layer_1/intermediate/dense/mul_2" [id=564, type=Mul]; +"565 bert/encoder/layer_1/intermediate/dense/mul_3" [id=565, type=Mul]; +"566 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3^0_1" [id=566, label="566 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"567 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3^0_1" [id=567, label="567 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"568 QuantizeLinear_bert/encoder/layer_1/output/dense/kernel^0_1" [id=568, label="568 QuantizeLinear_bert/encoder/layer_1/output/dense/kernel:0_1", type=QuantizeLinear]; +"569 DequantizeLinear_bert/encoder/layer_1/output/dense/kernel^0_1" [id=569, label="569 DequantizeLinear_bert/encoder/layer_1/output/dense/kernel:0_1", type=DequantizeLinear]; +"570 bert/encoder/layer_1/output/dense/MatMul" [id=570, type=MatMul]; +"571 bert/encoder/layer_1/output/dense/BiasAdd" [id=571, type=Add]; +"572 bert/encoder/layer_1/output/add" [id=572, type=Add]; +"573 bert/encoder/layer_1/output/LayerNorm/moments/mean" [id=573, type=ReduceMean]; +"574 bert/encoder/layer_1/output/LayerNorm/moments/StopGradient" [id=574, type=Identity]; +"575 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference" [id=575, type=Sub]; +"576 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference__327" [id=576, type=Mul]; +"577 bert/encoder/layer_1/output/LayerNorm/moments/variance" [id=577, type=ReduceMean]; +"578 bert/encoder/layer_1/output/LayerNorm/batchnorm/add" [id=578, type=Add]; +"579 bert/encoder/layer_1/output/LayerNorm/batchnorm/Rsqrt" [id=579, type=Sqrt]; +"580 bert/encoder/layer_1/output/LayerNorm/batchnorm/Rsqrt__329" [id=580, type=Reciprocal]; +"581 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul" [id=581, type=Mul]; +"582 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_2" [id=582, type=Mul]; +"583 bert/encoder/layer_1/output/LayerNorm/batchnorm/sub" [id=583, type=Sub]; +"584 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_1" [id=584, type=Mul]; +"585 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" [id=585, type=Add]; +"586 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_3" [id=586, label="586 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"587 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_3" [id=587, label="587 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"588 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_2" [id=588, label="588 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"589 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_2" [id=589, label="589 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"590 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_1" [id=590, label="590 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"591 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_1" [id=591, label="591 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"592 QuantizeLinear_bert/encoder/layer_2/attention/self/value/kernel^0_1" [id=592, label="592 QuantizeLinear_bert/encoder/layer_2/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"593 DequantizeLinear_bert/encoder/layer_2/attention/self/value/kernel^0_1" [id=593, label="593 DequantizeLinear_bert/encoder/layer_2/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"594 bert/encoder/layer_2/attention/self/value/MatMul" [id=594, type=MatMul]; +"595 bert/encoder/layer_2/attention/self/value/BiasAdd" [id=595, type=Add]; +"596 bert/encoder/layer_2/attention/self/Reshape_2" [id=596, type=Reshape]; +"597 bert/encoder/layer_2/attention/self/transpose_2" [id=597, type=Transpose]; +"598 QuantizeLinear_bert/encoder/layer_2/attention/self/query/kernel^0_1" [id=598, label="598 QuantizeLinear_bert/encoder/layer_2/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"599 DequantizeLinear_bert/encoder/layer_2/attention/self/query/kernel^0_1" [id=599, label="599 DequantizeLinear_bert/encoder/layer_2/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"600 bert/encoder/layer_2/attention/self/query/MatMul" [id=600, type=MatMul]; +"601 bert/encoder/layer_2/attention/self/query/BiasAdd" [id=601, type=Add]; +"602 QuantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd^0_1" [id=602, label="602 QuantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"603 DequantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd^0_1" [id=603, label="603 DequantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"604 bert/encoder/layer_2/attention/self/Reshape" [id=604, type=Reshape]; +"605 bert/encoder/layer_2/attention/self/transpose" [id=605, type=Transpose]; +"606 QuantizeLinear_bert/encoder/layer_2/attention/self/key/kernel^0_1" [id=606, label="606 QuantizeLinear_bert/encoder/layer_2/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"607 DequantizeLinear_bert/encoder/layer_2/attention/self/key/kernel^0_1" [id=607, label="607 DequantizeLinear_bert/encoder/layer_2/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"608 bert/encoder/layer_2/attention/self/key/MatMul" [id=608, type=MatMul]; +"609 bert/encoder/layer_2/attention/self/key/BiasAdd" [id=609, type=Add]; +"610 QuantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd^0_1" [id=610, label="610 QuantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"611 DequantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd^0_1" [id=611, label="611 DequantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"612 bert/encoder/layer_2/attention/self/Reshape_1" [id=612, type=Reshape]; +"613 bert/encoder/layer_2/attention/self/transpose_1" [id=613, type=Transpose]; +"614 bert/encoder/layer_2/attention/self/MatMul__334" [id=614, type=Transpose]; +"615 bert/encoder/layer_2/attention/self/MatMul" [id=615, type=MatMul]; +"616 bert/encoder/layer_2/attention/self/Mul" [id=616, type=Mul]; +"617 bert/encoder/layer_2/attention/self/add" [id=617, type=Add]; +"618 bert/encoder/layer_2/attention/self/Softmax" [id=618, type=Softmax]; +"619 bert/encoder/layer_2/attention/self/MatMul_1" [id=619, type=MatMul]; +"620 QuantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1^0_1" [id=620, label="620 QuantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"621 DequantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1^0_1" [id=621, label="621 DequantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"622 bert/encoder/layer_2/attention/self/transpose_3" [id=622, type=Transpose]; +"623 bert/encoder/layer_2/attention/self/Reshape_3" [id=623, type=Reshape]; +"624 QuantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel^0_1" [id=624, label="624 QuantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"625 DequantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel^0_1" [id=625, label="625 DequantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"626 bert/encoder/layer_2/attention/output/dense/MatMul" [id=626, type=MatMul]; +"627 bert/encoder/layer_2/attention/output/dense/BiasAdd" [id=627, type=Add]; +"628 bert/encoder/layer_2/attention/output/add" [id=628, type=Add]; +"629 bert/encoder/layer_2/attention/output/LayerNorm/moments/mean" [id=629, type=ReduceMean]; +"630 bert/encoder/layer_2/attention/output/LayerNorm/moments/StopGradient" [id=630, type=Identity]; +"631 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference" [id=631, type=Sub]; +"632 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference__337" [id=632, type=Mul]; +"633 bert/encoder/layer_2/attention/output/LayerNorm/moments/variance" [id=633, type=ReduceMean]; +"634 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add" [id=634, type=Add]; +"635 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/Rsqrt" [id=635, type=Sqrt]; +"636 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/Rsqrt__339" [id=636, type=Reciprocal]; +"637 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul" [id=637, type=Mul]; +"638 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_2" [id=638, type=Mul]; +"639 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/sub" [id=639, type=Sub]; +"640 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_1" [id=640, type=Mul]; +"641 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1" [id=641, type=Add]; +"642 QuantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=642, label="642 QuantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"643 DequantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=643, label="643 DequantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"644 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel^0_1" [id=644, label="644 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"645 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel^0_1" [id=645, label="645 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"646 bert/encoder/layer_2/intermediate/dense/MatMul" [id=646, type=MatMul]; +"647 bert/encoder/layer_2/intermediate/dense/BiasAdd" [id=647, type=Add]; +"648 bert/encoder/layer_2/intermediate/dense/Pow" [id=648, type=Pow]; +"649 bert/encoder/layer_2/intermediate/dense/mul" [id=649, type=Mul]; +"650 bert/encoder/layer_2/intermediate/dense/add" [id=650, type=Add]; +"651 bert/encoder/layer_2/intermediate/dense/mul_1" [id=651, type=Mul]; +"652 bert/encoder/layer_2/intermediate/dense/Tanh" [id=652, type=Tanh]; +"653 bert/encoder/layer_2/intermediate/dense/add_1" [id=653, type=Add]; +"654 bert/encoder/layer_2/intermediate/dense/mul_2" [id=654, type=Mul]; +"655 bert/encoder/layer_2/intermediate/dense/mul_3" [id=655, type=Mul]; +"656 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3^0_1" [id=656, label="656 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"657 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3^0_1" [id=657, label="657 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"658 QuantizeLinear_bert/encoder/layer_2/output/dense/kernel^0_1" [id=658, label="658 QuantizeLinear_bert/encoder/layer_2/output/dense/kernel:0_1", type=QuantizeLinear]; +"659 DequantizeLinear_bert/encoder/layer_2/output/dense/kernel^0_1" [id=659, label="659 DequantizeLinear_bert/encoder/layer_2/output/dense/kernel:0_1", type=DequantizeLinear]; +"660 bert/encoder/layer_2/output/dense/MatMul" [id=660, type=MatMul]; +"661 bert/encoder/layer_2/output/dense/BiasAdd" [id=661, type=Add]; +"662 bert/encoder/layer_2/output/add" [id=662, type=Add]; +"663 bert/encoder/layer_2/output/LayerNorm/moments/mean" [id=663, type=ReduceMean]; +"664 bert/encoder/layer_2/output/LayerNorm/moments/StopGradient" [id=664, type=Identity]; +"665 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference" [id=665, type=Sub]; +"666 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference__341" [id=666, type=Mul]; +"667 bert/encoder/layer_2/output/LayerNorm/moments/variance" [id=667, type=ReduceMean]; +"668 bert/encoder/layer_2/output/LayerNorm/batchnorm/add" [id=668, type=Add]; +"669 bert/encoder/layer_2/output/LayerNorm/batchnorm/Rsqrt" [id=669, type=Sqrt]; +"670 bert/encoder/layer_2/output/LayerNorm/batchnorm/Rsqrt__343" [id=670, type=Reciprocal]; +"671 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul" [id=671, type=Mul]; +"672 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_2" [id=672, type=Mul]; +"673 bert/encoder/layer_2/output/LayerNorm/batchnorm/sub" [id=673, type=Sub]; +"674 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_1" [id=674, type=Mul]; +"675 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" [id=675, type=Add]; +"676 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_3" [id=676, label="676 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"677 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_3" [id=677, label="677 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"678 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_2" [id=678, label="678 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"679 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_2" [id=679, label="679 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"680 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_1" [id=680, label="680 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"681 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_1" [id=681, label="681 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"682 QuantizeLinear_bert/encoder/layer_3/attention/self/value/kernel^0_1" [id=682, label="682 QuantizeLinear_bert/encoder/layer_3/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"683 DequantizeLinear_bert/encoder/layer_3/attention/self/value/kernel^0_1" [id=683, label="683 DequantizeLinear_bert/encoder/layer_3/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"684 bert/encoder/layer_3/attention/self/value/MatMul" [id=684, type=MatMul]; +"685 bert/encoder/layer_3/attention/self/value/BiasAdd" [id=685, type=Add]; +"686 bert/encoder/layer_3/attention/self/Reshape_2" [id=686, type=Reshape]; +"687 bert/encoder/layer_3/attention/self/transpose_2" [id=687, type=Transpose]; +"688 QuantizeLinear_bert/encoder/layer_3/attention/self/query/kernel^0_1" [id=688, label="688 QuantizeLinear_bert/encoder/layer_3/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"689 DequantizeLinear_bert/encoder/layer_3/attention/self/query/kernel^0_1" [id=689, label="689 DequantizeLinear_bert/encoder/layer_3/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"690 bert/encoder/layer_3/attention/self/query/MatMul" [id=690, type=MatMul]; +"691 bert/encoder/layer_3/attention/self/query/BiasAdd" [id=691, type=Add]; +"692 QuantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd^0_1" [id=692, label="692 QuantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"693 DequantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd^0_1" [id=693, label="693 DequantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"694 bert/encoder/layer_3/attention/self/Reshape" [id=694, type=Reshape]; +"695 bert/encoder/layer_3/attention/self/transpose" [id=695, type=Transpose]; +"696 QuantizeLinear_bert/encoder/layer_3/attention/self/key/kernel^0_1" [id=696, label="696 QuantizeLinear_bert/encoder/layer_3/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"697 DequantizeLinear_bert/encoder/layer_3/attention/self/key/kernel^0_1" [id=697, label="697 DequantizeLinear_bert/encoder/layer_3/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"698 bert/encoder/layer_3/attention/self/key/MatMul" [id=698, type=MatMul]; +"699 bert/encoder/layer_3/attention/self/key/BiasAdd" [id=699, type=Add]; +"700 QuantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd^0_1" [id=700, label="700 QuantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"701 DequantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd^0_1" [id=701, label="701 DequantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"702 bert/encoder/layer_3/attention/self/Reshape_1" [id=702, type=Reshape]; +"703 bert/encoder/layer_3/attention/self/transpose_1" [id=703, type=Transpose]; +"704 bert/encoder/layer_3/attention/self/MatMul__348" [id=704, type=Transpose]; +"705 bert/encoder/layer_3/attention/self/MatMul" [id=705, type=MatMul]; +"706 bert/encoder/layer_3/attention/self/Mul" [id=706, type=Mul]; +"707 bert/encoder/layer_3/attention/self/add" [id=707, type=Add]; +"708 bert/encoder/layer_3/attention/self/Softmax" [id=708, type=Softmax]; +"709 bert/encoder/layer_3/attention/self/MatMul_1" [id=709, type=MatMul]; +"710 QuantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1^0_1" [id=710, label="710 QuantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"711 DequantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1^0_1" [id=711, label="711 DequantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"712 bert/encoder/layer_3/attention/self/transpose_3" [id=712, type=Transpose]; +"713 bert/encoder/layer_3/attention/self/Reshape_3" [id=713, type=Reshape]; +"714 QuantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel^0_1" [id=714, label="714 QuantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"715 DequantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel^0_1" [id=715, label="715 DequantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"716 bert/encoder/layer_3/attention/output/dense/MatMul" [id=716, type=MatMul]; +"717 bert/encoder/layer_3/attention/output/dense/BiasAdd" [id=717, type=Add]; +"718 bert/encoder/layer_3/attention/output/add" [id=718, type=Add]; +"719 bert/encoder/layer_3/attention/output/LayerNorm/moments/mean" [id=719, type=ReduceMean]; +"720 bert/encoder/layer_3/attention/output/LayerNorm/moments/StopGradient" [id=720, type=Identity]; +"721 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference" [id=721, type=Sub]; +"722 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference__351" [id=722, type=Mul]; +"723 bert/encoder/layer_3/attention/output/LayerNorm/moments/variance" [id=723, type=ReduceMean]; +"724 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add" [id=724, type=Add]; +"725 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/Rsqrt" [id=725, type=Sqrt]; +"726 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/Rsqrt__353" [id=726, type=Reciprocal]; +"727 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul" [id=727, type=Mul]; +"728 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_2" [id=728, type=Mul]; +"729 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/sub" [id=729, type=Sub]; +"730 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_1" [id=730, type=Mul]; +"731 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1" [id=731, type=Add]; +"732 QuantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=732, label="732 QuantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"733 DequantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=733, label="733 DequantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"734 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel^0_1" [id=734, label="734 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"735 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel^0_1" [id=735, label="735 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"736 bert/encoder/layer_3/intermediate/dense/MatMul" [id=736, type=MatMul]; +"737 bert/encoder/layer_3/intermediate/dense/BiasAdd" [id=737, type=Add]; +"738 bert/encoder/layer_3/intermediate/dense/Pow" [id=738, type=Pow]; +"739 bert/encoder/layer_3/intermediate/dense/mul" [id=739, type=Mul]; +"740 bert/encoder/layer_3/intermediate/dense/add" [id=740, type=Add]; +"741 bert/encoder/layer_3/intermediate/dense/mul_1" [id=741, type=Mul]; +"742 bert/encoder/layer_3/intermediate/dense/Tanh" [id=742, type=Tanh]; +"743 bert/encoder/layer_3/intermediate/dense/add_1" [id=743, type=Add]; +"744 bert/encoder/layer_3/intermediate/dense/mul_2" [id=744, type=Mul]; +"745 bert/encoder/layer_3/intermediate/dense/mul_3" [id=745, type=Mul]; +"746 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3^0_1" [id=746, label="746 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"747 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3^0_1" [id=747, label="747 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"748 QuantizeLinear_bert/encoder/layer_3/output/dense/kernel^0_1" [id=748, label="748 QuantizeLinear_bert/encoder/layer_3/output/dense/kernel:0_1", type=QuantizeLinear]; +"749 DequantizeLinear_bert/encoder/layer_3/output/dense/kernel^0_1" [id=749, label="749 DequantizeLinear_bert/encoder/layer_3/output/dense/kernel:0_1", type=DequantizeLinear]; +"750 bert/encoder/layer_3/output/dense/MatMul" [id=750, type=MatMul]; +"751 bert/encoder/layer_3/output/dense/BiasAdd" [id=751, type=Add]; +"752 bert/encoder/layer_3/output/add" [id=752, type=Add]; +"753 bert/encoder/layer_3/output/LayerNorm/moments/mean" [id=753, type=ReduceMean]; +"754 bert/encoder/layer_3/output/LayerNorm/moments/StopGradient" [id=754, type=Identity]; +"755 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference" [id=755, type=Sub]; +"756 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference__355" [id=756, type=Mul]; +"757 bert/encoder/layer_3/output/LayerNorm/moments/variance" [id=757, type=ReduceMean]; +"758 bert/encoder/layer_3/output/LayerNorm/batchnorm/add" [id=758, type=Add]; +"759 bert/encoder/layer_3/output/LayerNorm/batchnorm/Rsqrt" [id=759, type=Sqrt]; +"760 bert/encoder/layer_3/output/LayerNorm/batchnorm/Rsqrt__357" [id=760, type=Reciprocal]; +"761 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul" [id=761, type=Mul]; +"762 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_2" [id=762, type=Mul]; +"763 bert/encoder/layer_3/output/LayerNorm/batchnorm/sub" [id=763, type=Sub]; +"764 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_1" [id=764, type=Mul]; +"765 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" [id=765, type=Add]; +"766 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_3" [id=766, label="766 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"767 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_3" [id=767, label="767 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"768 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_2" [id=768, label="768 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"769 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_2" [id=769, label="769 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"770 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_1" [id=770, label="770 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"771 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_1" [id=771, label="771 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"772 QuantizeLinear_bert/encoder/layer_4/attention/self/value/kernel^0_1" [id=772, label="772 QuantizeLinear_bert/encoder/layer_4/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"773 DequantizeLinear_bert/encoder/layer_4/attention/self/value/kernel^0_1" [id=773, label="773 DequantizeLinear_bert/encoder/layer_4/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"774 bert/encoder/layer_4/attention/self/value/MatMul" [id=774, type=MatMul]; +"775 bert/encoder/layer_4/attention/self/value/BiasAdd" [id=775, type=Add]; +"776 bert/encoder/layer_4/attention/self/Reshape_2" [id=776, type=Reshape]; +"777 bert/encoder/layer_4/attention/self/transpose_2" [id=777, type=Transpose]; +"778 QuantizeLinear_bert/encoder/layer_4/attention/self/query/kernel^0_1" [id=778, label="778 QuantizeLinear_bert/encoder/layer_4/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"779 DequantizeLinear_bert/encoder/layer_4/attention/self/query/kernel^0_1" [id=779, label="779 DequantizeLinear_bert/encoder/layer_4/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"780 bert/encoder/layer_4/attention/self/query/MatMul" [id=780, type=MatMul]; +"781 bert/encoder/layer_4/attention/self/query/BiasAdd" [id=781, type=Add]; +"782 QuantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd^0_1" [id=782, label="782 QuantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"783 DequantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd^0_1" [id=783, label="783 DequantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"784 bert/encoder/layer_4/attention/self/Reshape" [id=784, type=Reshape]; +"785 bert/encoder/layer_4/attention/self/transpose" [id=785, type=Transpose]; +"786 QuantizeLinear_bert/encoder/layer_4/attention/self/key/kernel^0_1" [id=786, label="786 QuantizeLinear_bert/encoder/layer_4/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"787 DequantizeLinear_bert/encoder/layer_4/attention/self/key/kernel^0_1" [id=787, label="787 DequantizeLinear_bert/encoder/layer_4/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"788 bert/encoder/layer_4/attention/self/key/MatMul" [id=788, type=MatMul]; +"789 bert/encoder/layer_4/attention/self/key/BiasAdd" [id=789, type=Add]; +"790 QuantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd^0_1" [id=790, label="790 QuantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"791 DequantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd^0_1" [id=791, label="791 DequantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"792 bert/encoder/layer_4/attention/self/Reshape_1" [id=792, type=Reshape]; +"793 bert/encoder/layer_4/attention/self/transpose_1" [id=793, type=Transpose]; +"794 bert/encoder/layer_4/attention/self/MatMul__362" [id=794, type=Transpose]; +"795 bert/encoder/layer_4/attention/self/MatMul" [id=795, type=MatMul]; +"796 bert/encoder/layer_4/attention/self/Mul" [id=796, type=Mul]; +"797 bert/encoder/layer_4/attention/self/add" [id=797, type=Add]; +"798 bert/encoder/layer_4/attention/self/Softmax" [id=798, type=Softmax]; +"799 bert/encoder/layer_4/attention/self/MatMul_1" [id=799, type=MatMul]; +"800 QuantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1^0_1" [id=800, label="800 QuantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"801 DequantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1^0_1" [id=801, label="801 DequantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"802 bert/encoder/layer_4/attention/self/transpose_3" [id=802, type=Transpose]; +"803 bert/encoder/layer_4/attention/self/Reshape_3" [id=803, type=Reshape]; +"804 QuantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel^0_1" [id=804, label="804 QuantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"805 DequantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel^0_1" [id=805, label="805 DequantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"806 bert/encoder/layer_4/attention/output/dense/MatMul" [id=806, type=MatMul]; +"807 bert/encoder/layer_4/attention/output/dense/BiasAdd" [id=807, type=Add]; +"808 bert/encoder/layer_4/attention/output/add" [id=808, type=Add]; +"809 bert/encoder/layer_4/attention/output/LayerNorm/moments/mean" [id=809, type=ReduceMean]; +"810 bert/encoder/layer_4/attention/output/LayerNorm/moments/StopGradient" [id=810, type=Identity]; +"811 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference" [id=811, type=Sub]; +"812 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference__365" [id=812, type=Mul]; +"813 bert/encoder/layer_4/attention/output/LayerNorm/moments/variance" [id=813, type=ReduceMean]; +"814 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add" [id=814, type=Add]; +"815 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/Rsqrt" [id=815, type=Sqrt]; +"816 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/Rsqrt__367" [id=816, type=Reciprocal]; +"817 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul" [id=817, type=Mul]; +"818 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_2" [id=818, type=Mul]; +"819 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/sub" [id=819, type=Sub]; +"820 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_1" [id=820, type=Mul]; +"821 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1" [id=821, type=Add]; +"822 QuantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=822, label="822 QuantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"823 DequantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=823, label="823 DequantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"824 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel^0_1" [id=824, label="824 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"825 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel^0_1" [id=825, label="825 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"826 bert/encoder/layer_4/intermediate/dense/MatMul" [id=826, type=MatMul]; +"827 bert/encoder/layer_4/intermediate/dense/BiasAdd" [id=827, type=Add]; +"828 bert/encoder/layer_4/intermediate/dense/Pow" [id=828, type=Pow]; +"829 bert/encoder/layer_4/intermediate/dense/mul" [id=829, type=Mul]; +"830 bert/encoder/layer_4/intermediate/dense/add" [id=830, type=Add]; +"831 bert/encoder/layer_4/intermediate/dense/mul_1" [id=831, type=Mul]; +"832 bert/encoder/layer_4/intermediate/dense/Tanh" [id=832, type=Tanh]; +"833 bert/encoder/layer_4/intermediate/dense/add_1" [id=833, type=Add]; +"834 bert/encoder/layer_4/intermediate/dense/mul_2" [id=834, type=Mul]; +"835 bert/encoder/layer_4/intermediate/dense/mul_3" [id=835, type=Mul]; +"836 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3^0_1" [id=836, label="836 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"837 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3^0_1" [id=837, label="837 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"838 QuantizeLinear_bert/encoder/layer_4/output/dense/kernel^0_1" [id=838, label="838 QuantizeLinear_bert/encoder/layer_4/output/dense/kernel:0_1", type=QuantizeLinear]; +"839 DequantizeLinear_bert/encoder/layer_4/output/dense/kernel^0_1" [id=839, label="839 DequantizeLinear_bert/encoder/layer_4/output/dense/kernel:0_1", type=DequantizeLinear]; +"840 bert/encoder/layer_4/output/dense/MatMul" [id=840, type=MatMul]; +"841 bert/encoder/layer_4/output/dense/BiasAdd" [id=841, type=Add]; +"842 bert/encoder/layer_4/output/add" [id=842, type=Add]; +"843 bert/encoder/layer_4/output/LayerNorm/moments/mean" [id=843, type=ReduceMean]; +"844 bert/encoder/layer_4/output/LayerNorm/moments/StopGradient" [id=844, type=Identity]; +"845 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference" [id=845, type=Sub]; +"846 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference__369" [id=846, type=Mul]; +"847 bert/encoder/layer_4/output/LayerNorm/moments/variance" [id=847, type=ReduceMean]; +"848 bert/encoder/layer_4/output/LayerNorm/batchnorm/add" [id=848, type=Add]; +"849 bert/encoder/layer_4/output/LayerNorm/batchnorm/Rsqrt" [id=849, type=Sqrt]; +"850 bert/encoder/layer_4/output/LayerNorm/batchnorm/Rsqrt__371" [id=850, type=Reciprocal]; +"851 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul" [id=851, type=Mul]; +"852 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_2" [id=852, type=Mul]; +"853 bert/encoder/layer_4/output/LayerNorm/batchnorm/sub" [id=853, type=Sub]; +"854 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_1" [id=854, type=Mul]; +"855 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" [id=855, type=Add]; +"856 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_3" [id=856, label="856 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"857 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_3" [id=857, label="857 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"858 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_2" [id=858, label="858 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"859 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_2" [id=859, label="859 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"860 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_1" [id=860, label="860 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"861 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_1" [id=861, label="861 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"862 QuantizeLinear_bert/encoder/layer_5/attention/self/value/kernel^0_1" [id=862, label="862 QuantizeLinear_bert/encoder/layer_5/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"863 DequantizeLinear_bert/encoder/layer_5/attention/self/value/kernel^0_1" [id=863, label="863 DequantizeLinear_bert/encoder/layer_5/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"864 bert/encoder/layer_5/attention/self/value/MatMul" [id=864, type=MatMul]; +"865 bert/encoder/layer_5/attention/self/value/BiasAdd" [id=865, type=Add]; +"866 bert/encoder/layer_5/attention/self/Reshape_2" [id=866, type=Reshape]; +"867 bert/encoder/layer_5/attention/self/transpose_2" [id=867, type=Transpose]; +"868 QuantizeLinear_bert/encoder/layer_5/attention/self/query/kernel^0_1" [id=868, label="868 QuantizeLinear_bert/encoder/layer_5/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"869 DequantizeLinear_bert/encoder/layer_5/attention/self/query/kernel^0_1" [id=869, label="869 DequantizeLinear_bert/encoder/layer_5/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"870 bert/encoder/layer_5/attention/self/query/MatMul" [id=870, type=MatMul]; +"871 bert/encoder/layer_5/attention/self/query/BiasAdd" [id=871, type=Add]; +"872 QuantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd^0_1" [id=872, label="872 QuantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"873 DequantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd^0_1" [id=873, label="873 DequantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"874 bert/encoder/layer_5/attention/self/Reshape" [id=874, type=Reshape]; +"875 bert/encoder/layer_5/attention/self/transpose" [id=875, type=Transpose]; +"876 QuantizeLinear_bert/encoder/layer_5/attention/self/key/kernel^0_1" [id=876, label="876 QuantizeLinear_bert/encoder/layer_5/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"877 DequantizeLinear_bert/encoder/layer_5/attention/self/key/kernel^0_1" [id=877, label="877 DequantizeLinear_bert/encoder/layer_5/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"878 bert/encoder/layer_5/attention/self/key/MatMul" [id=878, type=MatMul]; +"879 bert/encoder/layer_5/attention/self/key/BiasAdd" [id=879, type=Add]; +"880 QuantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd^0_1" [id=880, label="880 QuantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"881 DequantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd^0_1" [id=881, label="881 DequantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"882 bert/encoder/layer_5/attention/self/Reshape_1" [id=882, type=Reshape]; +"883 bert/encoder/layer_5/attention/self/transpose_1" [id=883, type=Transpose]; +"884 bert/encoder/layer_5/attention/self/MatMul__376" [id=884, type=Transpose]; +"885 bert/encoder/layer_5/attention/self/MatMul" [id=885, type=MatMul]; +"886 bert/encoder/layer_5/attention/self/Mul" [id=886, type=Mul]; +"887 bert/encoder/layer_5/attention/self/add" [id=887, type=Add]; +"888 bert/encoder/layer_5/attention/self/Softmax" [id=888, type=Softmax]; +"889 bert/encoder/layer_5/attention/self/MatMul_1" [id=889, type=MatMul]; +"890 QuantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1^0_1" [id=890, label="890 QuantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"891 DequantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1^0_1" [id=891, label="891 DequantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"892 bert/encoder/layer_5/attention/self/transpose_3" [id=892, type=Transpose]; +"893 bert/encoder/layer_5/attention/self/Reshape_3" [id=893, type=Reshape]; +"894 QuantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel^0_1" [id=894, label="894 QuantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"895 DequantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel^0_1" [id=895, label="895 DequantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"896 bert/encoder/layer_5/attention/output/dense/MatMul" [id=896, type=MatMul]; +"897 bert/encoder/layer_5/attention/output/dense/BiasAdd" [id=897, type=Add]; +"898 bert/encoder/layer_5/attention/output/add" [id=898, type=Add]; +"899 bert/encoder/layer_5/attention/output/LayerNorm/moments/mean" [id=899, type=ReduceMean]; +"900 bert/encoder/layer_5/attention/output/LayerNorm/moments/StopGradient" [id=900, type=Identity]; +"901 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference" [id=901, type=Sub]; +"902 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference__379" [id=902, type=Mul]; +"903 bert/encoder/layer_5/attention/output/LayerNorm/moments/variance" [id=903, type=ReduceMean]; +"904 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add" [id=904, type=Add]; +"905 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/Rsqrt" [id=905, type=Sqrt]; +"906 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/Rsqrt__381" [id=906, type=Reciprocal]; +"907 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul" [id=907, type=Mul]; +"908 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_2" [id=908, type=Mul]; +"909 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/sub" [id=909, type=Sub]; +"910 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_1" [id=910, type=Mul]; +"911 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1" [id=911, type=Add]; +"912 QuantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=912, label="912 QuantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"913 DequantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=913, label="913 DequantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"914 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel^0_1" [id=914, label="914 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"915 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel^0_1" [id=915, label="915 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"916 bert/encoder/layer_5/intermediate/dense/MatMul" [id=916, type=MatMul]; +"917 bert/encoder/layer_5/intermediate/dense/BiasAdd" [id=917, type=Add]; +"918 bert/encoder/layer_5/intermediate/dense/Pow" [id=918, type=Pow]; +"919 bert/encoder/layer_5/intermediate/dense/mul" [id=919, type=Mul]; +"920 bert/encoder/layer_5/intermediate/dense/add" [id=920, type=Add]; +"921 bert/encoder/layer_5/intermediate/dense/mul_1" [id=921, type=Mul]; +"922 bert/encoder/layer_5/intermediate/dense/Tanh" [id=922, type=Tanh]; +"923 bert/encoder/layer_5/intermediate/dense/add_1" [id=923, type=Add]; +"924 bert/encoder/layer_5/intermediate/dense/mul_2" [id=924, type=Mul]; +"925 bert/encoder/layer_5/intermediate/dense/mul_3" [id=925, type=Mul]; +"926 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3^0_1" [id=926, label="926 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"927 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3^0_1" [id=927, label="927 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"928 QuantizeLinear_bert/encoder/layer_5/output/dense/kernel^0_1" [id=928, label="928 QuantizeLinear_bert/encoder/layer_5/output/dense/kernel:0_1", type=QuantizeLinear]; +"929 DequantizeLinear_bert/encoder/layer_5/output/dense/kernel^0_1" [id=929, label="929 DequantizeLinear_bert/encoder/layer_5/output/dense/kernel:0_1", type=DequantizeLinear]; +"930 bert/encoder/layer_5/output/dense/MatMul" [id=930, type=MatMul]; +"931 bert/encoder/layer_5/output/dense/BiasAdd" [id=931, type=Add]; +"932 bert/encoder/layer_5/output/add" [id=932, type=Add]; +"933 bert/encoder/layer_5/output/LayerNorm/moments/mean" [id=933, type=ReduceMean]; +"934 bert/encoder/layer_5/output/LayerNorm/moments/StopGradient" [id=934, type=Identity]; +"935 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference" [id=935, type=Sub]; +"936 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference__383" [id=936, type=Mul]; +"937 bert/encoder/layer_5/output/LayerNorm/moments/variance" [id=937, type=ReduceMean]; +"938 bert/encoder/layer_5/output/LayerNorm/batchnorm/add" [id=938, type=Add]; +"939 bert/encoder/layer_5/output/LayerNorm/batchnorm/Rsqrt" [id=939, type=Sqrt]; +"940 bert/encoder/layer_5/output/LayerNorm/batchnorm/Rsqrt__385" [id=940, type=Reciprocal]; +"941 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul" [id=941, type=Mul]; +"942 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_2" [id=942, type=Mul]; +"943 bert/encoder/layer_5/output/LayerNorm/batchnorm/sub" [id=943, type=Sub]; +"944 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_1" [id=944, type=Mul]; +"945 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" [id=945, type=Add]; +"946 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_3" [id=946, label="946 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"947 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_3" [id=947, label="947 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"948 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_2" [id=948, label="948 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"949 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_2" [id=949, label="949 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"950 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_1" [id=950, label="950 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"951 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_1" [id=951, label="951 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"952 QuantizeLinear_bert/encoder/layer_6/attention/self/value/kernel^0_1" [id=952, label="952 QuantizeLinear_bert/encoder/layer_6/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"953 DequantizeLinear_bert/encoder/layer_6/attention/self/value/kernel^0_1" [id=953, label="953 DequantizeLinear_bert/encoder/layer_6/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"954 bert/encoder/layer_6/attention/self/value/MatMul" [id=954, type=MatMul]; +"955 bert/encoder/layer_6/attention/self/value/BiasAdd" [id=955, type=Add]; +"956 bert/encoder/layer_6/attention/self/Reshape_2" [id=956, type=Reshape]; +"957 bert/encoder/layer_6/attention/self/transpose_2" [id=957, type=Transpose]; +"958 QuantizeLinear_bert/encoder/layer_6/attention/self/query/kernel^0_1" [id=958, label="958 QuantizeLinear_bert/encoder/layer_6/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"959 DequantizeLinear_bert/encoder/layer_6/attention/self/query/kernel^0_1" [id=959, label="959 DequantizeLinear_bert/encoder/layer_6/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"960 bert/encoder/layer_6/attention/self/query/MatMul" [id=960, type=MatMul]; +"961 bert/encoder/layer_6/attention/self/query/BiasAdd" [id=961, type=Add]; +"962 QuantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd^0_1" [id=962, label="962 QuantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"963 DequantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd^0_1" [id=963, label="963 DequantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"964 bert/encoder/layer_6/attention/self/Reshape" [id=964, type=Reshape]; +"965 bert/encoder/layer_6/attention/self/transpose" [id=965, type=Transpose]; +"966 QuantizeLinear_bert/encoder/layer_6/attention/self/key/kernel^0_1" [id=966, label="966 QuantizeLinear_bert/encoder/layer_6/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"967 DequantizeLinear_bert/encoder/layer_6/attention/self/key/kernel^0_1" [id=967, label="967 DequantizeLinear_bert/encoder/layer_6/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"968 bert/encoder/layer_6/attention/self/key/MatMul" [id=968, type=MatMul]; +"969 bert/encoder/layer_6/attention/self/key/BiasAdd" [id=969, type=Add]; +"970 QuantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd^0_1" [id=970, label="970 QuantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"971 DequantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd^0_1" [id=971, label="971 DequantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"972 bert/encoder/layer_6/attention/self/Reshape_1" [id=972, type=Reshape]; +"973 bert/encoder/layer_6/attention/self/transpose_1" [id=973, type=Transpose]; +"974 bert/encoder/layer_6/attention/self/MatMul__390" [id=974, type=Transpose]; +"975 bert/encoder/layer_6/attention/self/MatMul" [id=975, type=MatMul]; +"976 bert/encoder/layer_6/attention/self/Mul" [id=976, type=Mul]; +"977 bert/encoder/layer_6/attention/self/add" [id=977, type=Add]; +"978 bert/encoder/layer_6/attention/self/Softmax" [id=978, type=Softmax]; +"979 bert/encoder/layer_6/attention/self/MatMul_1" [id=979, type=MatMul]; +"980 QuantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1^0_1" [id=980, label="980 QuantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"981 DequantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1^0_1" [id=981, label="981 DequantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"982 bert/encoder/layer_6/attention/self/transpose_3" [id=982, type=Transpose]; +"983 bert/encoder/layer_6/attention/self/Reshape_3" [id=983, type=Reshape]; +"984 QuantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel^0_1" [id=984, label="984 QuantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"985 DequantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel^0_1" [id=985, label="985 DequantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"986 bert/encoder/layer_6/attention/output/dense/MatMul" [id=986, type=MatMul]; +"987 bert/encoder/layer_6/attention/output/dense/BiasAdd" [id=987, type=Add]; +"988 bert/encoder/layer_6/attention/output/add" [id=988, type=Add]; +"989 bert/encoder/layer_6/attention/output/LayerNorm/moments/mean" [id=989, type=ReduceMean]; +"990 bert/encoder/layer_6/attention/output/LayerNorm/moments/StopGradient" [id=990, type=Identity]; +"991 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference" [id=991, type=Sub]; +"992 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference__393" [id=992, type=Mul]; +"993 bert/encoder/layer_6/attention/output/LayerNorm/moments/variance" [id=993, type=ReduceMean]; +"994 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add" [id=994, type=Add]; +"995 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/Rsqrt" [id=995, type=Sqrt]; +"996 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/Rsqrt__395" [id=996, type=Reciprocal]; +"997 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul" [id=997, type=Mul]; +"998 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_2" [id=998, type=Mul]; +"999 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/sub" [id=999, type=Sub]; +"1000 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_1" [id=1000, type=Mul]; +"1001 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1" [id=1001, type=Add]; +"1002 QuantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1002, label="1002 QuantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1003 DequantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1003, label="1003 DequantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1004 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel^0_1" [id=1004, label="1004 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"1005 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel^0_1" [id=1005, label="1005 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"1006 bert/encoder/layer_6/intermediate/dense/MatMul" [id=1006, type=MatMul]; +"1007 bert/encoder/layer_6/intermediate/dense/BiasAdd" [id=1007, type=Add]; +"1008 bert/encoder/layer_6/intermediate/dense/Pow" [id=1008, type=Pow]; +"1009 bert/encoder/layer_6/intermediate/dense/mul" [id=1009, type=Mul]; +"1010 bert/encoder/layer_6/intermediate/dense/add" [id=1010, type=Add]; +"1011 bert/encoder/layer_6/intermediate/dense/mul_1" [id=1011, type=Mul]; +"1012 bert/encoder/layer_6/intermediate/dense/Tanh" [id=1012, type=Tanh]; +"1013 bert/encoder/layer_6/intermediate/dense/add_1" [id=1013, type=Add]; +"1014 bert/encoder/layer_6/intermediate/dense/mul_2" [id=1014, type=Mul]; +"1015 bert/encoder/layer_6/intermediate/dense/mul_3" [id=1015, type=Mul]; +"1016 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3^0_1" [id=1016, label="1016 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"1017 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3^0_1" [id=1017, label="1017 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"1018 QuantizeLinear_bert/encoder/layer_6/output/dense/kernel^0_1" [id=1018, label="1018 QuantizeLinear_bert/encoder/layer_6/output/dense/kernel:0_1", type=QuantizeLinear]; +"1019 DequantizeLinear_bert/encoder/layer_6/output/dense/kernel^0_1" [id=1019, label="1019 DequantizeLinear_bert/encoder/layer_6/output/dense/kernel:0_1", type=DequantizeLinear]; +"1020 bert/encoder/layer_6/output/dense/MatMul" [id=1020, type=MatMul]; +"1021 bert/encoder/layer_6/output/dense/BiasAdd" [id=1021, type=Add]; +"1022 bert/encoder/layer_6/output/add" [id=1022, type=Add]; +"1023 bert/encoder/layer_6/output/LayerNorm/moments/mean" [id=1023, type=ReduceMean]; +"1024 bert/encoder/layer_6/output/LayerNorm/moments/StopGradient" [id=1024, type=Identity]; +"1025 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference" [id=1025, type=Sub]; +"1026 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference__397" [id=1026, type=Mul]; +"1027 bert/encoder/layer_6/output/LayerNorm/moments/variance" [id=1027, type=ReduceMean]; +"1028 bert/encoder/layer_6/output/LayerNorm/batchnorm/add" [id=1028, type=Add]; +"1029 bert/encoder/layer_6/output/LayerNorm/batchnorm/Rsqrt" [id=1029, type=Sqrt]; +"1030 bert/encoder/layer_6/output/LayerNorm/batchnorm/Rsqrt__399" [id=1030, type=Reciprocal]; +"1031 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul" [id=1031, type=Mul]; +"1032 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_2" [id=1032, type=Mul]; +"1033 bert/encoder/layer_6/output/LayerNorm/batchnorm/sub" [id=1033, type=Sub]; +"1034 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_1" [id=1034, type=Mul]; +"1035 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" [id=1035, type=Add]; +"1036 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_3" [id=1036, label="1036 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"1037 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_3" [id=1037, label="1037 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"1038 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_2" [id=1038, label="1038 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"1039 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_2" [id=1039, label="1039 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"1040 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_1" [id=1040, label="1040 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1041 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_1" [id=1041, label="1041 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1042 QuantizeLinear_bert/encoder/layer_7/attention/self/value/kernel^0_1" [id=1042, label="1042 QuantizeLinear_bert/encoder/layer_7/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"1043 DequantizeLinear_bert/encoder/layer_7/attention/self/value/kernel^0_1" [id=1043, label="1043 DequantizeLinear_bert/encoder/layer_7/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"1044 bert/encoder/layer_7/attention/self/value/MatMul" [id=1044, type=MatMul]; +"1045 bert/encoder/layer_7/attention/self/value/BiasAdd" [id=1045, type=Add]; +"1046 bert/encoder/layer_7/attention/self/Reshape_2" [id=1046, type=Reshape]; +"1047 bert/encoder/layer_7/attention/self/transpose_2" [id=1047, type=Transpose]; +"1048 QuantizeLinear_bert/encoder/layer_7/attention/self/query/kernel^0_1" [id=1048, label="1048 QuantizeLinear_bert/encoder/layer_7/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"1049 DequantizeLinear_bert/encoder/layer_7/attention/self/query/kernel^0_1" [id=1049, label="1049 DequantizeLinear_bert/encoder/layer_7/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"1050 bert/encoder/layer_7/attention/self/query/MatMul" [id=1050, type=MatMul]; +"1051 bert/encoder/layer_7/attention/self/query/BiasAdd" [id=1051, type=Add]; +"1052 QuantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd^0_1" [id=1052, label="1052 QuantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"1053 DequantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd^0_1" [id=1053, label="1053 DequantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"1054 bert/encoder/layer_7/attention/self/Reshape" [id=1054, type=Reshape]; +"1055 bert/encoder/layer_7/attention/self/transpose" [id=1055, type=Transpose]; +"1056 QuantizeLinear_bert/encoder/layer_7/attention/self/key/kernel^0_1" [id=1056, label="1056 QuantizeLinear_bert/encoder/layer_7/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"1057 DequantizeLinear_bert/encoder/layer_7/attention/self/key/kernel^0_1" [id=1057, label="1057 DequantizeLinear_bert/encoder/layer_7/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"1058 bert/encoder/layer_7/attention/self/key/MatMul" [id=1058, type=MatMul]; +"1059 bert/encoder/layer_7/attention/self/key/BiasAdd" [id=1059, type=Add]; +"1060 QuantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd^0_1" [id=1060, label="1060 QuantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"1061 DequantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd^0_1" [id=1061, label="1061 DequantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"1062 bert/encoder/layer_7/attention/self/Reshape_1" [id=1062, type=Reshape]; +"1063 bert/encoder/layer_7/attention/self/transpose_1" [id=1063, type=Transpose]; +"1064 bert/encoder/layer_7/attention/self/MatMul__404" [id=1064, type=Transpose]; +"1065 bert/encoder/layer_7/attention/self/MatMul" [id=1065, type=MatMul]; +"1066 bert/encoder/layer_7/attention/self/Mul" [id=1066, type=Mul]; +"1067 bert/encoder/layer_7/attention/self/add" [id=1067, type=Add]; +"1068 bert/encoder/layer_7/attention/self/Softmax" [id=1068, type=Softmax]; +"1069 bert/encoder/layer_7/attention/self/MatMul_1" [id=1069, type=MatMul]; +"1070 QuantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1^0_1" [id=1070, label="1070 QuantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"1071 DequantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1^0_1" [id=1071, label="1071 DequantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"1072 bert/encoder/layer_7/attention/self/transpose_3" [id=1072, type=Transpose]; +"1073 bert/encoder/layer_7/attention/self/Reshape_3" [id=1073, type=Reshape]; +"1074 QuantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel^0_1" [id=1074, label="1074 QuantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"1075 DequantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel^0_1" [id=1075, label="1075 DequantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"1076 bert/encoder/layer_7/attention/output/dense/MatMul" [id=1076, type=MatMul]; +"1077 bert/encoder/layer_7/attention/output/dense/BiasAdd" [id=1077, type=Add]; +"1078 bert/encoder/layer_7/attention/output/add" [id=1078, type=Add]; +"1079 bert/encoder/layer_7/attention/output/LayerNorm/moments/mean" [id=1079, type=ReduceMean]; +"1080 bert/encoder/layer_7/attention/output/LayerNorm/moments/StopGradient" [id=1080, type=Identity]; +"1081 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference" [id=1081, type=Sub]; +"1082 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference__407" [id=1082, type=Mul]; +"1083 bert/encoder/layer_7/attention/output/LayerNorm/moments/variance" [id=1083, type=ReduceMean]; +"1084 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add" [id=1084, type=Add]; +"1085 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/Rsqrt" [id=1085, type=Sqrt]; +"1086 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/Rsqrt__409" [id=1086, type=Reciprocal]; +"1087 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul" [id=1087, type=Mul]; +"1088 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_2" [id=1088, type=Mul]; +"1089 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/sub" [id=1089, type=Sub]; +"1090 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_1" [id=1090, type=Mul]; +"1091 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1" [id=1091, type=Add]; +"1092 QuantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1092, label="1092 QuantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1093 DequantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1093, label="1093 DequantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1094 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel^0_1" [id=1094, label="1094 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"1095 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel^0_1" [id=1095, label="1095 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"1096 bert/encoder/layer_7/intermediate/dense/MatMul" [id=1096, type=MatMul]; +"1097 bert/encoder/layer_7/intermediate/dense/BiasAdd" [id=1097, type=Add]; +"1098 bert/encoder/layer_7/intermediate/dense/Pow" [id=1098, type=Pow]; +"1099 bert/encoder/layer_7/intermediate/dense/mul" [id=1099, type=Mul]; +"1100 bert/encoder/layer_7/intermediate/dense/add" [id=1100, type=Add]; +"1101 bert/encoder/layer_7/intermediate/dense/mul_1" [id=1101, type=Mul]; +"1102 bert/encoder/layer_7/intermediate/dense/Tanh" [id=1102, type=Tanh]; +"1103 bert/encoder/layer_7/intermediate/dense/add_1" [id=1103, type=Add]; +"1104 bert/encoder/layer_7/intermediate/dense/mul_2" [id=1104, type=Mul]; +"1105 bert/encoder/layer_7/intermediate/dense/mul_3" [id=1105, type=Mul]; +"1106 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3^0_1" [id=1106, label="1106 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"1107 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3^0_1" [id=1107, label="1107 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"1108 QuantizeLinear_bert/encoder/layer_7/output/dense/kernel^0_1" [id=1108, label="1108 QuantizeLinear_bert/encoder/layer_7/output/dense/kernel:0_1", type=QuantizeLinear]; +"1109 DequantizeLinear_bert/encoder/layer_7/output/dense/kernel^0_1" [id=1109, label="1109 DequantizeLinear_bert/encoder/layer_7/output/dense/kernel:0_1", type=DequantizeLinear]; +"1110 bert/encoder/layer_7/output/dense/MatMul" [id=1110, type=MatMul]; +"1111 bert/encoder/layer_7/output/dense/BiasAdd" [id=1111, type=Add]; +"1112 bert/encoder/layer_7/output/add" [id=1112, type=Add]; +"1113 bert/encoder/layer_7/output/LayerNorm/moments/mean" [id=1113, type=ReduceMean]; +"1114 bert/encoder/layer_7/output/LayerNorm/moments/StopGradient" [id=1114, type=Identity]; +"1115 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference" [id=1115, type=Sub]; +"1116 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference__411" [id=1116, type=Mul]; +"1117 bert/encoder/layer_7/output/LayerNorm/moments/variance" [id=1117, type=ReduceMean]; +"1118 bert/encoder/layer_7/output/LayerNorm/batchnorm/add" [id=1118, type=Add]; +"1119 bert/encoder/layer_7/output/LayerNorm/batchnorm/Rsqrt" [id=1119, type=Sqrt]; +"1120 bert/encoder/layer_7/output/LayerNorm/batchnorm/Rsqrt__413" [id=1120, type=Reciprocal]; +"1121 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul" [id=1121, type=Mul]; +"1122 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_2" [id=1122, type=Mul]; +"1123 bert/encoder/layer_7/output/LayerNorm/batchnorm/sub" [id=1123, type=Sub]; +"1124 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_1" [id=1124, type=Mul]; +"1125 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" [id=1125, type=Add]; +"1126 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_3" [id=1126, label="1126 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"1127 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_3" [id=1127, label="1127 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"1128 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_2" [id=1128, label="1128 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"1129 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_2" [id=1129, label="1129 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"1130 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_1" [id=1130, label="1130 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1131 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_1" [id=1131, label="1131 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1132 QuantizeLinear_bert/encoder/layer_8/attention/self/value/kernel^0_1" [id=1132, label="1132 QuantizeLinear_bert/encoder/layer_8/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"1133 DequantizeLinear_bert/encoder/layer_8/attention/self/value/kernel^0_1" [id=1133, label="1133 DequantizeLinear_bert/encoder/layer_8/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"1134 bert/encoder/layer_8/attention/self/value/MatMul" [id=1134, type=MatMul]; +"1135 bert/encoder/layer_8/attention/self/value/BiasAdd" [id=1135, type=Add]; +"1136 bert/encoder/layer_8/attention/self/Reshape_2" [id=1136, type=Reshape]; +"1137 bert/encoder/layer_8/attention/self/transpose_2" [id=1137, type=Transpose]; +"1138 QuantizeLinear_bert/encoder/layer_8/attention/self/query/kernel^0_1" [id=1138, label="1138 QuantizeLinear_bert/encoder/layer_8/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"1139 DequantizeLinear_bert/encoder/layer_8/attention/self/query/kernel^0_1" [id=1139, label="1139 DequantizeLinear_bert/encoder/layer_8/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"1140 bert/encoder/layer_8/attention/self/query/MatMul" [id=1140, type=MatMul]; +"1141 bert/encoder/layer_8/attention/self/query/BiasAdd" [id=1141, type=Add]; +"1142 QuantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd^0_1" [id=1142, label="1142 QuantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"1143 DequantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd^0_1" [id=1143, label="1143 DequantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"1144 bert/encoder/layer_8/attention/self/Reshape" [id=1144, type=Reshape]; +"1145 bert/encoder/layer_8/attention/self/transpose" [id=1145, type=Transpose]; +"1146 QuantizeLinear_bert/encoder/layer_8/attention/self/key/kernel^0_1" [id=1146, label="1146 QuantizeLinear_bert/encoder/layer_8/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"1147 DequantizeLinear_bert/encoder/layer_8/attention/self/key/kernel^0_1" [id=1147, label="1147 DequantizeLinear_bert/encoder/layer_8/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"1148 bert/encoder/layer_8/attention/self/key/MatMul" [id=1148, type=MatMul]; +"1149 bert/encoder/layer_8/attention/self/key/BiasAdd" [id=1149, type=Add]; +"1150 QuantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd^0_1" [id=1150, label="1150 QuantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"1151 DequantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd^0_1" [id=1151, label="1151 DequantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"1152 bert/encoder/layer_8/attention/self/Reshape_1" [id=1152, type=Reshape]; +"1153 bert/encoder/layer_8/attention/self/transpose_1" [id=1153, type=Transpose]; +"1154 bert/encoder/layer_8/attention/self/MatMul__418" [id=1154, type=Transpose]; +"1155 bert/encoder/layer_8/attention/self/MatMul" [id=1155, type=MatMul]; +"1156 bert/encoder/layer_8/attention/self/Mul" [id=1156, type=Mul]; +"1157 bert/encoder/layer_8/attention/self/add" [id=1157, type=Add]; +"1158 bert/encoder/layer_8/attention/self/Softmax" [id=1158, type=Softmax]; +"1159 bert/encoder/layer_8/attention/self/MatMul_1" [id=1159, type=MatMul]; +"1160 QuantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1^0_1" [id=1160, label="1160 QuantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"1161 DequantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1^0_1" [id=1161, label="1161 DequantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"1162 bert/encoder/layer_8/attention/self/transpose_3" [id=1162, type=Transpose]; +"1163 bert/encoder/layer_8/attention/self/Reshape_3" [id=1163, type=Reshape]; +"1164 QuantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel^0_1" [id=1164, label="1164 QuantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"1165 DequantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel^0_1" [id=1165, label="1165 DequantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"1166 bert/encoder/layer_8/attention/output/dense/MatMul" [id=1166, type=MatMul]; +"1167 bert/encoder/layer_8/attention/output/dense/BiasAdd" [id=1167, type=Add]; +"1168 bert/encoder/layer_8/attention/output/add" [id=1168, type=Add]; +"1169 bert/encoder/layer_8/attention/output/LayerNorm/moments/mean" [id=1169, type=ReduceMean]; +"1170 bert/encoder/layer_8/attention/output/LayerNorm/moments/StopGradient" [id=1170, type=Identity]; +"1171 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference" [id=1171, type=Sub]; +"1172 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference__421" [id=1172, type=Mul]; +"1173 bert/encoder/layer_8/attention/output/LayerNorm/moments/variance" [id=1173, type=ReduceMean]; +"1174 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add" [id=1174, type=Add]; +"1175 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/Rsqrt" [id=1175, type=Sqrt]; +"1176 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/Rsqrt__423" [id=1176, type=Reciprocal]; +"1177 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul" [id=1177, type=Mul]; +"1178 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_2" [id=1178, type=Mul]; +"1179 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/sub" [id=1179, type=Sub]; +"1180 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_1" [id=1180, type=Mul]; +"1181 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1" [id=1181, type=Add]; +"1182 QuantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1182, label="1182 QuantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1183 DequantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1183, label="1183 DequantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1184 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel^0_1" [id=1184, label="1184 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"1185 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel^0_1" [id=1185, label="1185 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"1186 bert/encoder/layer_8/intermediate/dense/MatMul" [id=1186, type=MatMul]; +"1187 bert/encoder/layer_8/intermediate/dense/BiasAdd" [id=1187, type=Add]; +"1188 bert/encoder/layer_8/intermediate/dense/Pow" [id=1188, type=Pow]; +"1189 bert/encoder/layer_8/intermediate/dense/mul" [id=1189, type=Mul]; +"1190 bert/encoder/layer_8/intermediate/dense/add" [id=1190, type=Add]; +"1191 bert/encoder/layer_8/intermediate/dense/mul_1" [id=1191, type=Mul]; +"1192 bert/encoder/layer_8/intermediate/dense/Tanh" [id=1192, type=Tanh]; +"1193 bert/encoder/layer_8/intermediate/dense/add_1" [id=1193, type=Add]; +"1194 bert/encoder/layer_8/intermediate/dense/mul_2" [id=1194, type=Mul]; +"1195 bert/encoder/layer_8/intermediate/dense/mul_3" [id=1195, type=Mul]; +"1196 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3^0_1" [id=1196, label="1196 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"1197 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3^0_1" [id=1197, label="1197 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"1198 QuantizeLinear_bert/encoder/layer_8/output/dense/kernel^0_1" [id=1198, label="1198 QuantizeLinear_bert/encoder/layer_8/output/dense/kernel:0_1", type=QuantizeLinear]; +"1199 DequantizeLinear_bert/encoder/layer_8/output/dense/kernel^0_1" [id=1199, label="1199 DequantizeLinear_bert/encoder/layer_8/output/dense/kernel:0_1", type=DequantizeLinear]; +"1200 bert/encoder/layer_8/output/dense/MatMul" [id=1200, type=MatMul]; +"1201 bert/encoder/layer_8/output/dense/BiasAdd" [id=1201, type=Add]; +"1202 bert/encoder/layer_8/output/add" [id=1202, type=Add]; +"1203 bert/encoder/layer_8/output/LayerNorm/moments/mean" [id=1203, type=ReduceMean]; +"1204 bert/encoder/layer_8/output/LayerNorm/moments/StopGradient" [id=1204, type=Identity]; +"1205 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference" [id=1205, type=Sub]; +"1206 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference__425" [id=1206, type=Mul]; +"1207 bert/encoder/layer_8/output/LayerNorm/moments/variance" [id=1207, type=ReduceMean]; +"1208 bert/encoder/layer_8/output/LayerNorm/batchnorm/add" [id=1208, type=Add]; +"1209 bert/encoder/layer_8/output/LayerNorm/batchnorm/Rsqrt" [id=1209, type=Sqrt]; +"1210 bert/encoder/layer_8/output/LayerNorm/batchnorm/Rsqrt__427" [id=1210, type=Reciprocal]; +"1211 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul" [id=1211, type=Mul]; +"1212 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_2" [id=1212, type=Mul]; +"1213 bert/encoder/layer_8/output/LayerNorm/batchnorm/sub" [id=1213, type=Sub]; +"1214 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_1" [id=1214, type=Mul]; +"1215 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" [id=1215, type=Add]; +"1216 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_3" [id=1216, label="1216 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"1217 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_3" [id=1217, label="1217 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"1218 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_2" [id=1218, label="1218 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"1219 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_2" [id=1219, label="1219 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"1220 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_1" [id=1220, label="1220 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1221 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_1" [id=1221, label="1221 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1222 QuantizeLinear_bert/encoder/layer_9/attention/self/value/kernel^0_1" [id=1222, label="1222 QuantizeLinear_bert/encoder/layer_9/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"1223 DequantizeLinear_bert/encoder/layer_9/attention/self/value/kernel^0_1" [id=1223, label="1223 DequantizeLinear_bert/encoder/layer_9/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"1224 bert/encoder/layer_9/attention/self/value/MatMul" [id=1224, type=MatMul]; +"1225 bert/encoder/layer_9/attention/self/value/BiasAdd" [id=1225, type=Add]; +"1226 bert/encoder/layer_9/attention/self/Reshape_2" [id=1226, type=Reshape]; +"1227 bert/encoder/layer_9/attention/self/transpose_2" [id=1227, type=Transpose]; +"1228 QuantizeLinear_bert/encoder/layer_9/attention/self/query/kernel^0_1" [id=1228, label="1228 QuantizeLinear_bert/encoder/layer_9/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"1229 DequantizeLinear_bert/encoder/layer_9/attention/self/query/kernel^0_1" [id=1229, label="1229 DequantizeLinear_bert/encoder/layer_9/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"1230 bert/encoder/layer_9/attention/self/query/MatMul" [id=1230, type=MatMul]; +"1231 bert/encoder/layer_9/attention/self/query/BiasAdd" [id=1231, type=Add]; +"1232 QuantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd^0_1" [id=1232, label="1232 QuantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"1233 DequantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd^0_1" [id=1233, label="1233 DequantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"1234 bert/encoder/layer_9/attention/self/Reshape" [id=1234, type=Reshape]; +"1235 bert/encoder/layer_9/attention/self/transpose" [id=1235, type=Transpose]; +"1236 QuantizeLinear_bert/encoder/layer_9/attention/self/key/kernel^0_1" [id=1236, label="1236 QuantizeLinear_bert/encoder/layer_9/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"1237 DequantizeLinear_bert/encoder/layer_9/attention/self/key/kernel^0_1" [id=1237, label="1237 DequantizeLinear_bert/encoder/layer_9/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"1238 bert/encoder/layer_9/attention/self/key/MatMul" [id=1238, type=MatMul]; +"1239 bert/encoder/layer_9/attention/self/key/BiasAdd" [id=1239, type=Add]; +"1240 QuantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd^0_1" [id=1240, label="1240 QuantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"1241 DequantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd^0_1" [id=1241, label="1241 DequantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"1242 bert/encoder/layer_9/attention/self/Reshape_1" [id=1242, type=Reshape]; +"1243 bert/encoder/layer_9/attention/self/transpose_1" [id=1243, type=Transpose]; +"1244 bert/encoder/layer_9/attention/self/MatMul__432" [id=1244, type=Transpose]; +"1245 bert/encoder/layer_9/attention/self/MatMul" [id=1245, type=MatMul]; +"1246 bert/encoder/layer_9/attention/self/Mul" [id=1246, type=Mul]; +"1247 bert/encoder/layer_9/attention/self/add" [id=1247, type=Add]; +"1248 bert/encoder/layer_9/attention/self/Softmax" [id=1248, type=Softmax]; +"1249 bert/encoder/layer_9/attention/self/MatMul_1" [id=1249, type=MatMul]; +"1250 QuantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1^0_1" [id=1250, label="1250 QuantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"1251 DequantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1^0_1" [id=1251, label="1251 DequantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"1252 bert/encoder/layer_9/attention/self/transpose_3" [id=1252, type=Transpose]; +"1253 bert/encoder/layer_9/attention/self/Reshape_3" [id=1253, type=Reshape]; +"1254 QuantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel^0_1" [id=1254, label="1254 QuantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"1255 DequantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel^0_1" [id=1255, label="1255 DequantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"1256 bert/encoder/layer_9/attention/output/dense/MatMul" [id=1256, type=MatMul]; +"1257 bert/encoder/layer_9/attention/output/dense/BiasAdd" [id=1257, type=Add]; +"1258 bert/encoder/layer_9/attention/output/add" [id=1258, type=Add]; +"1259 bert/encoder/layer_9/attention/output/LayerNorm/moments/mean" [id=1259, type=ReduceMean]; +"1260 bert/encoder/layer_9/attention/output/LayerNorm/moments/StopGradient" [id=1260, type=Identity]; +"1261 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference" [id=1261, type=Sub]; +"1262 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference__435" [id=1262, type=Mul]; +"1263 bert/encoder/layer_9/attention/output/LayerNorm/moments/variance" [id=1263, type=ReduceMean]; +"1264 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add" [id=1264, type=Add]; +"1265 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/Rsqrt" [id=1265, type=Sqrt]; +"1266 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/Rsqrt__437" [id=1266, type=Reciprocal]; +"1267 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul" [id=1267, type=Mul]; +"1268 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_2" [id=1268, type=Mul]; +"1269 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/sub" [id=1269, type=Sub]; +"1270 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_1" [id=1270, type=Mul]; +"1271 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1" [id=1271, type=Add]; +"1272 QuantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1272, label="1272 QuantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1273 DequantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1273, label="1273 DequantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1274 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel^0_1" [id=1274, label="1274 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"1275 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel^0_1" [id=1275, label="1275 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"1276 bert/encoder/layer_9/intermediate/dense/MatMul" [id=1276, type=MatMul]; +"1277 bert/encoder/layer_9/intermediate/dense/BiasAdd" [id=1277, type=Add]; +"1278 bert/encoder/layer_9/intermediate/dense/Pow" [id=1278, type=Pow]; +"1279 bert/encoder/layer_9/intermediate/dense/mul" [id=1279, type=Mul]; +"1280 bert/encoder/layer_9/intermediate/dense/add" [id=1280, type=Add]; +"1281 bert/encoder/layer_9/intermediate/dense/mul_1" [id=1281, type=Mul]; +"1282 bert/encoder/layer_9/intermediate/dense/Tanh" [id=1282, type=Tanh]; +"1283 bert/encoder/layer_9/intermediate/dense/add_1" [id=1283, type=Add]; +"1284 bert/encoder/layer_9/intermediate/dense/mul_2" [id=1284, type=Mul]; +"1285 bert/encoder/layer_9/intermediate/dense/mul_3" [id=1285, type=Mul]; +"1286 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3^0_1" [id=1286, label="1286 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"1287 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3^0_1" [id=1287, label="1287 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"1288 QuantizeLinear_bert/encoder/layer_9/output/dense/kernel^0_1" [id=1288, label="1288 QuantizeLinear_bert/encoder/layer_9/output/dense/kernel:0_1", type=QuantizeLinear]; +"1289 DequantizeLinear_bert/encoder/layer_9/output/dense/kernel^0_1" [id=1289, label="1289 DequantizeLinear_bert/encoder/layer_9/output/dense/kernel:0_1", type=DequantizeLinear]; +"1290 bert/encoder/layer_9/output/dense/MatMul" [id=1290, type=MatMul]; +"1291 bert/encoder/layer_9/output/dense/BiasAdd" [id=1291, type=Add]; +"1292 bert/encoder/layer_9/output/add" [id=1292, type=Add]; +"1293 bert/encoder/layer_9/output/LayerNorm/moments/mean" [id=1293, type=ReduceMean]; +"1294 bert/encoder/layer_9/output/LayerNorm/moments/StopGradient" [id=1294, type=Identity]; +"1295 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference" [id=1295, type=Sub]; +"1296 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference__439" [id=1296, type=Mul]; +"1297 bert/encoder/layer_9/output/LayerNorm/moments/variance" [id=1297, type=ReduceMean]; +"1298 bert/encoder/layer_9/output/LayerNorm/batchnorm/add" [id=1298, type=Add]; +"1299 bert/encoder/layer_9/output/LayerNorm/batchnorm/Rsqrt" [id=1299, type=Sqrt]; +"1300 bert/encoder/layer_9/output/LayerNorm/batchnorm/Rsqrt__441" [id=1300, type=Reciprocal]; +"1301 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul" [id=1301, type=Mul]; +"1302 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_2" [id=1302, type=Mul]; +"1303 bert/encoder/layer_9/output/LayerNorm/batchnorm/sub" [id=1303, type=Sub]; +"1304 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_1" [id=1304, type=Mul]; +"1305 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" [id=1305, type=Add]; +"1306 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_3" [id=1306, label="1306 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"1307 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_3" [id=1307, label="1307 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"1308 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_2" [id=1308, label="1308 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"1309 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_2" [id=1309, label="1309 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"1310 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_1" [id=1310, label="1310 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1311 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_1" [id=1311, label="1311 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1312 QuantizeLinear_bert/encoder/layer_10/attention/self/value/kernel^0_1" [id=1312, label="1312 QuantizeLinear_bert/encoder/layer_10/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"1313 DequantizeLinear_bert/encoder/layer_10/attention/self/value/kernel^0_1" [id=1313, label="1313 DequantizeLinear_bert/encoder/layer_10/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"1314 bert/encoder/layer_10/attention/self/value/MatMul" [id=1314, type=MatMul]; +"1315 bert/encoder/layer_10/attention/self/value/BiasAdd" [id=1315, type=Add]; +"1316 bert/encoder/layer_10/attention/self/Reshape_2" [id=1316, type=Reshape]; +"1317 bert/encoder/layer_10/attention/self/transpose_2" [id=1317, type=Transpose]; +"1318 QuantizeLinear_bert/encoder/layer_10/attention/self/query/kernel^0_1" [id=1318, label="1318 QuantizeLinear_bert/encoder/layer_10/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"1319 DequantizeLinear_bert/encoder/layer_10/attention/self/query/kernel^0_1" [id=1319, label="1319 DequantizeLinear_bert/encoder/layer_10/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"1320 bert/encoder/layer_10/attention/self/query/MatMul" [id=1320, type=MatMul]; +"1321 bert/encoder/layer_10/attention/self/query/BiasAdd" [id=1321, type=Add]; +"1322 QuantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd^0_1" [id=1322, label="1322 QuantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"1323 DequantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd^0_1" [id=1323, label="1323 DequantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"1324 bert/encoder/layer_10/attention/self/Reshape" [id=1324, type=Reshape]; +"1325 bert/encoder/layer_10/attention/self/transpose" [id=1325, type=Transpose]; +"1326 QuantizeLinear_bert/encoder/layer_10/attention/self/key/kernel^0_1" [id=1326, label="1326 QuantizeLinear_bert/encoder/layer_10/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"1327 DequantizeLinear_bert/encoder/layer_10/attention/self/key/kernel^0_1" [id=1327, label="1327 DequantizeLinear_bert/encoder/layer_10/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"1328 bert/encoder/layer_10/attention/self/key/MatMul" [id=1328, type=MatMul]; +"1329 bert/encoder/layer_10/attention/self/key/BiasAdd" [id=1329, type=Add]; +"1330 QuantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd^0_1" [id=1330, label="1330 QuantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"1331 DequantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd^0_1" [id=1331, label="1331 DequantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"1332 bert/encoder/layer_10/attention/self/Reshape_1" [id=1332, type=Reshape]; +"1333 bert/encoder/layer_10/attention/self/transpose_1" [id=1333, type=Transpose]; +"1334 bert/encoder/layer_10/attention/self/MatMul__446" [id=1334, type=Transpose]; +"1335 bert/encoder/layer_10/attention/self/MatMul" [id=1335, type=MatMul]; +"1336 bert/encoder/layer_10/attention/self/Mul" [id=1336, type=Mul]; +"1337 bert/encoder/layer_10/attention/self/add" [id=1337, type=Add]; +"1338 bert/encoder/layer_10/attention/self/Softmax" [id=1338, type=Softmax]; +"1339 bert/encoder/layer_10/attention/self/MatMul_1" [id=1339, type=MatMul]; +"1340 QuantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1^0_1" [id=1340, label="1340 QuantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"1341 DequantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1^0_1" [id=1341, label="1341 DequantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"1342 bert/encoder/layer_10/attention/self/transpose_3" [id=1342, type=Transpose]; +"1343 bert/encoder/layer_10/attention/self/Reshape_3" [id=1343, type=Reshape]; +"1344 QuantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel^0_1" [id=1344, label="1344 QuantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"1345 DequantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel^0_1" [id=1345, label="1345 DequantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"1346 bert/encoder/layer_10/attention/output/dense/MatMul" [id=1346, type=MatMul]; +"1347 bert/encoder/layer_10/attention/output/dense/BiasAdd" [id=1347, type=Add]; +"1348 bert/encoder/layer_10/attention/output/add" [id=1348, type=Add]; +"1349 bert/encoder/layer_10/attention/output/LayerNorm/moments/mean" [id=1349, type=ReduceMean]; +"1350 bert/encoder/layer_10/attention/output/LayerNorm/moments/StopGradient" [id=1350, type=Identity]; +"1351 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference" [id=1351, type=Sub]; +"1352 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference__449" [id=1352, type=Mul]; +"1353 bert/encoder/layer_10/attention/output/LayerNorm/moments/variance" [id=1353, type=ReduceMean]; +"1354 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add" [id=1354, type=Add]; +"1355 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/Rsqrt" [id=1355, type=Sqrt]; +"1356 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/Rsqrt__451" [id=1356, type=Reciprocal]; +"1357 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul" [id=1357, type=Mul]; +"1358 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_2" [id=1358, type=Mul]; +"1359 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/sub" [id=1359, type=Sub]; +"1360 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_1" [id=1360, type=Mul]; +"1361 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1" [id=1361, type=Add]; +"1362 QuantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1362, label="1362 QuantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1363 DequantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1363, label="1363 DequantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1364 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel^0_1" [id=1364, label="1364 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"1365 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel^0_1" [id=1365, label="1365 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"1366 bert/encoder/layer_10/intermediate/dense/MatMul" [id=1366, type=MatMul]; +"1367 bert/encoder/layer_10/intermediate/dense/BiasAdd" [id=1367, type=Add]; +"1368 bert/encoder/layer_10/intermediate/dense/Pow" [id=1368, type=Pow]; +"1369 bert/encoder/layer_10/intermediate/dense/mul" [id=1369, type=Mul]; +"1370 bert/encoder/layer_10/intermediate/dense/add" [id=1370, type=Add]; +"1371 bert/encoder/layer_10/intermediate/dense/mul_1" [id=1371, type=Mul]; +"1372 bert/encoder/layer_10/intermediate/dense/Tanh" [id=1372, type=Tanh]; +"1373 bert/encoder/layer_10/intermediate/dense/add_1" [id=1373, type=Add]; +"1374 bert/encoder/layer_10/intermediate/dense/mul_2" [id=1374, type=Mul]; +"1375 bert/encoder/layer_10/intermediate/dense/mul_3" [id=1375, type=Mul]; +"1376 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3^0_1" [id=1376, label="1376 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"1377 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3^0_1" [id=1377, label="1377 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"1378 QuantizeLinear_bert/encoder/layer_10/output/dense/kernel^0_1" [id=1378, label="1378 QuantizeLinear_bert/encoder/layer_10/output/dense/kernel:0_1", type=QuantizeLinear]; +"1379 DequantizeLinear_bert/encoder/layer_10/output/dense/kernel^0_1" [id=1379, label="1379 DequantizeLinear_bert/encoder/layer_10/output/dense/kernel:0_1", type=DequantizeLinear]; +"1380 bert/encoder/layer_10/output/dense/MatMul" [id=1380, type=MatMul]; +"1381 bert/encoder/layer_10/output/dense/BiasAdd" [id=1381, type=Add]; +"1382 bert/encoder/layer_10/output/add" [id=1382, type=Add]; +"1383 bert/encoder/layer_10/output/LayerNorm/moments/mean" [id=1383, type=ReduceMean]; +"1384 bert/encoder/layer_10/output/LayerNorm/moments/StopGradient" [id=1384, type=Identity]; +"1385 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference" [id=1385, type=Sub]; +"1386 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference__453" [id=1386, type=Mul]; +"1387 bert/encoder/layer_10/output/LayerNorm/moments/variance" [id=1387, type=ReduceMean]; +"1388 bert/encoder/layer_10/output/LayerNorm/batchnorm/add" [id=1388, type=Add]; +"1389 bert/encoder/layer_10/output/LayerNorm/batchnorm/Rsqrt" [id=1389, type=Sqrt]; +"1390 bert/encoder/layer_10/output/LayerNorm/batchnorm/Rsqrt__455" [id=1390, type=Reciprocal]; +"1391 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul" [id=1391, type=Mul]; +"1392 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_2" [id=1392, type=Mul]; +"1393 bert/encoder/layer_10/output/LayerNorm/batchnorm/sub" [id=1393, type=Sub]; +"1394 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_1" [id=1394, type=Mul]; +"1395 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" [id=1395, type=Add]; +"1396 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_3" [id=1396, label="1396 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1:0_3", type=QuantizeLinear]; +"1397 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_3" [id=1397, label="1397 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1:0_3", type=DequantizeLinear]; +"1398 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_2" [id=1398, label="1398 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1:0_2", type=QuantizeLinear]; +"1399 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_2" [id=1399, label="1399 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1:0_2", type=DequantizeLinear]; +"1400 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_1" [id=1400, label="1400 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1401 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_1" [id=1401, label="1401 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1402 QuantizeLinear_bert/encoder/layer_11/attention/self/value/kernel^0_1" [id=1402, label="1402 QuantizeLinear_bert/encoder/layer_11/attention/self/value/kernel:0_1", type=QuantizeLinear]; +"1403 DequantizeLinear_bert/encoder/layer_11/attention/self/value/kernel^0_1" [id=1403, label="1403 DequantizeLinear_bert/encoder/layer_11/attention/self/value/kernel:0_1", type=DequantizeLinear]; +"1404 bert/encoder/layer_11/attention/self/value/MatMul" [id=1404, type=MatMul]; +"1405 bert/encoder/layer_11/attention/self/value/BiasAdd" [id=1405, type=Add]; +"1406 bert/encoder/layer_11/attention/self/Reshape_2" [id=1406, type=Reshape]; +"1407 bert/encoder/layer_11/attention/self/transpose_2" [id=1407, type=Transpose]; +"1408 QuantizeLinear_bert/encoder/layer_11/attention/self/query/kernel^0_1" [id=1408, label="1408 QuantizeLinear_bert/encoder/layer_11/attention/self/query/kernel:0_1", type=QuantizeLinear]; +"1409 DequantizeLinear_bert/encoder/layer_11/attention/self/query/kernel^0_1" [id=1409, label="1409 DequantizeLinear_bert/encoder/layer_11/attention/self/query/kernel:0_1", type=DequantizeLinear]; +"1410 bert/encoder/layer_11/attention/self/query/MatMul" [id=1410, type=MatMul]; +"1411 bert/encoder/layer_11/attention/self/query/BiasAdd" [id=1411, type=Add]; +"1412 QuantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd^0_1" [id=1412, label="1412 QuantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd:0_1", type=QuantizeLinear]; +"1413 DequantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd^0_1" [id=1413, label="1413 DequantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd:0_1", type=DequantizeLinear]; +"1414 bert/encoder/layer_11/attention/self/Reshape" [id=1414, type=Reshape]; +"1415 bert/encoder/layer_11/attention/self/transpose" [id=1415, type=Transpose]; +"1416 QuantizeLinear_bert/encoder/layer_11/attention/self/key/kernel^0_1" [id=1416, label="1416 QuantizeLinear_bert/encoder/layer_11/attention/self/key/kernel:0_1", type=QuantizeLinear]; +"1417 DequantizeLinear_bert/encoder/layer_11/attention/self/key/kernel^0_1" [id=1417, label="1417 DequantizeLinear_bert/encoder/layer_11/attention/self/key/kernel:0_1", type=DequantizeLinear]; +"1418 bert/encoder/layer_11/attention/self/key/MatMul" [id=1418, type=MatMul]; +"1419 bert/encoder/layer_11/attention/self/key/BiasAdd" [id=1419, type=Add]; +"1420 QuantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd^0_1" [id=1420, label="1420 QuantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd:0_1", type=QuantizeLinear]; +"1421 DequantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd^0_1" [id=1421, label="1421 DequantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd:0_1", type=DequantizeLinear]; +"1422 bert/encoder/layer_11/attention/self/Reshape_1" [id=1422, type=Reshape]; +"1423 bert/encoder/layer_11/attention/self/transpose_1" [id=1423, type=Transpose]; +"1424 bert/encoder/layer_11/attention/self/MatMul__460" [id=1424, type=Transpose]; +"1425 bert/encoder/layer_11/attention/self/MatMul" [id=1425, type=MatMul]; +"1426 bert/encoder/layer_11/attention/self/Mul" [id=1426, type=Mul]; +"1427 bert/encoder/layer_11/attention/self/add" [id=1427, type=Add]; +"1428 bert/encoder/layer_11/attention/self/Softmax" [id=1428, type=Softmax]; +"1429 bert/encoder/layer_11/attention/self/MatMul_1" [id=1429, type=MatMul]; +"1430 QuantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1^0_1" [id=1430, label="1430 QuantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1:0_1", type=QuantizeLinear]; +"1431 DequantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1^0_1" [id=1431, label="1431 DequantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1:0_1", type=DequantizeLinear]; +"1432 bert/encoder/layer_11/attention/self/transpose_3" [id=1432, type=Transpose]; +"1433 bert/encoder/layer_11/attention/self/Reshape_3" [id=1433, type=Reshape]; +"1434 QuantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel^0_1" [id=1434, label="1434 QuantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel:0_1", type=QuantizeLinear]; +"1435 DequantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel^0_1" [id=1435, label="1435 DequantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel:0_1", type=DequantizeLinear]; +"1436 bert/encoder/layer_11/attention/output/dense/MatMul" [id=1436, type=MatMul]; +"1437 bert/encoder/layer_11/attention/output/dense/BiasAdd" [id=1437, type=Add]; +"1438 bert/encoder/layer_11/attention/output/add" [id=1438, type=Add]; +"1439 bert/encoder/layer_11/attention/output/LayerNorm/moments/mean" [id=1439, type=ReduceMean]; +"1440 bert/encoder/layer_11/attention/output/LayerNorm/moments/StopGradient" [id=1440, type=Identity]; +"1441 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference" [id=1441, type=Sub]; +"1442 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference__463" [id=1442, type=Mul]; +"1443 bert/encoder/layer_11/attention/output/LayerNorm/moments/variance" [id=1443, type=ReduceMean]; +"1444 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add" [id=1444, type=Add]; +"1445 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/Rsqrt" [id=1445, type=Sqrt]; +"1446 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/Rsqrt__465" [id=1446, type=Reciprocal]; +"1447 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul" [id=1447, type=Mul]; +"1448 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_2" [id=1448, type=Mul]; +"1449 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/sub" [id=1449, type=Sub]; +"1450 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_1" [id=1450, type=Mul]; +"1451 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1" [id=1451, type=Add]; +"1452 QuantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1452, label="1452 QuantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1453 DequantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1^0_1" [id=1453, label="1453 DequantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1454 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel^0_1" [id=1454, label="1454 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel:0_1", type=QuantizeLinear]; +"1455 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel^0_1" [id=1455, label="1455 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel:0_1", type=DequantizeLinear]; +"1456 bert/encoder/layer_11/intermediate/dense/MatMul" [id=1456, type=MatMul]; +"1457 bert/encoder/layer_11/intermediate/dense/BiasAdd" [id=1457, type=Add]; +"1458 bert/encoder/layer_11/intermediate/dense/Pow" [id=1458, type=Pow]; +"1459 bert/encoder/layer_11/intermediate/dense/mul" [id=1459, type=Mul]; +"1460 bert/encoder/layer_11/intermediate/dense/add" [id=1460, type=Add]; +"1461 bert/encoder/layer_11/intermediate/dense/mul_1" [id=1461, type=Mul]; +"1462 bert/encoder/layer_11/intermediate/dense/Tanh" [id=1462, type=Tanh]; +"1463 bert/encoder/layer_11/intermediate/dense/add_1" [id=1463, type=Add]; +"1464 bert/encoder/layer_11/intermediate/dense/mul_2" [id=1464, type=Mul]; +"1465 bert/encoder/layer_11/intermediate/dense/mul_3" [id=1465, type=Mul]; +"1466 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3^0_1" [id=1466, label="1466 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3:0_1", type=QuantizeLinear]; +"1467 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3^0_1" [id=1467, label="1467 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3:0_1", type=DequantizeLinear]; +"1468 QuantizeLinear_bert/encoder/layer_11/output/dense/kernel^0_1" [id=1468, label="1468 QuantizeLinear_bert/encoder/layer_11/output/dense/kernel:0_1", type=QuantizeLinear]; +"1469 DequantizeLinear_bert/encoder/layer_11/output/dense/kernel^0_1" [id=1469, label="1469 DequantizeLinear_bert/encoder/layer_11/output/dense/kernel:0_1", type=DequantizeLinear]; +"1470 bert/encoder/layer_11/output/dense/MatMul" [id=1470, type=MatMul]; +"1471 bert/encoder/layer_11/output/dense/BiasAdd" [id=1471, type=Add]; +"1472 bert/encoder/layer_11/output/add" [id=1472, type=Add]; +"1473 bert/encoder/layer_11/output/LayerNorm/moments/mean" [id=1473, type=ReduceMean]; +"1474 bert/encoder/layer_11/output/LayerNorm/moments/StopGradient" [id=1474, type=Identity]; +"1475 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference" [id=1475, type=Sub]; +"1476 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference__467" [id=1476, type=Mul]; +"1477 bert/encoder/layer_11/output/LayerNorm/moments/variance" [id=1477, type=ReduceMean]; +"1478 bert/encoder/layer_11/output/LayerNorm/batchnorm/add" [id=1478, type=Add]; +"1479 bert/encoder/layer_11/output/LayerNorm/batchnorm/Rsqrt" [id=1479, type=Sqrt]; +"1480 bert/encoder/layer_11/output/LayerNorm/batchnorm/Rsqrt__469" [id=1480, type=Reciprocal]; +"1481 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul" [id=1481, type=Mul]; +"1482 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_2" [id=1482, type=Mul]; +"1483 bert/encoder/layer_11/output/LayerNorm/batchnorm/sub" [id=1483, type=Sub]; +"1484 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_1" [id=1484, type=Mul]; +"1485 bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1" [id=1485, type=Add]; +"1486 QuantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1^0_1" [id=1486, label="1486 QuantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1:0_1", type=QuantizeLinear]; +"1487 DequantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1^0_1" [id=1487, label="1487 DequantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1:0_1", type=DequantizeLinear]; +"1488 bert/encoder/Reshape_13" [id=1488, type=Reshape]; +"1489 Shape_1" [id=1489, type=Shape]; +"1490 Shape_1__472" [id=1490, type=Cast]; +"1491 strided_slice_1" [id=1491, type=Slice]; +"1492 strided_slice_1__476" [id=1492, type=Squeeze]; +"1493 strided_slice_1__477" [id=1493, type=Cast]; +"1494 mul" [id=1494, type=Mul]; +"1495 Reshape/shape_Unsqueeze__482" [id=1495, type=Unsqueeze]; +"1496 Reshape/shape_Concat__484" [id=1496, type=Concat]; +"1497 Reshape__485" [id=1497, type=Cast]; +"1498 Reshape_1/shape_Unsqueeze__478" [id=1498, type=Unsqueeze]; +"1499 Reshape_1/shape_Concat__481" [id=1499, type=Concat]; +"1500 Reshape_1__487" [id=1500, type=Cast]; +"1501 Reshape" [id=1501, type=Reshape]; +"1502 QuantizeLinear_MatMul__486^0_1" [id=1502, label="1502 QuantizeLinear_MatMul__486:0_1", type=QuantizeLinear]; +"1503 DequantizeLinear_MatMul__486^0_1" [id=1503, label="1503 DequantizeLinear_MatMul__486:0_1", type=DequantizeLinear]; +"1504 MatMul" [id=1504, type=MatMul]; +"1505 BiasAdd" [id=1505, type=Add]; +"1506 Reshape_1" [id=1506, type=Reshape]; +"1507 transpose" [id=1507, type=Transpose]; +"1508 unstack" [id=1508, type=Split]; +"1509 unstack__490" [id=1509, type=Squeeze]; +"1510 unstack_graph_outputs_Identity__4" [id=1510, type=Identity]; +"1511 unstack__488" [id=1511, type=Squeeze]; +"1512 unstack_graph_outputs_Identity__7" [id=1512, type=Identity]; +"1513 nncf_model_input_0" [id=1513, type=nncf_model_input]; +"1514 nncf_model_input_1" [id=1514, type=nncf_model_input]; +"1515 nncf_model_input_2" [id=1515, type=nncf_model_input]; +"1516 nncf_model_input_3" [id=1516, type=nncf_model_input]; +"1517 nncf_model_output_0" [id=1517, type=nncf_model_output]; +"1518 nncf_model_output_1" [id=1518, type=nncf_model_output]; +"1519 nncf_model_output_2" [id=1519, type=nncf_model_output]; +"0 unique_ids_graph_outputs_Identity__10" -> "1519 nncf_model_output_2" [label="[-1]", style=dashed]; +"1 bert/encoder/ones/packed_Unsqueeze__20" -> "129 bert/encoder/ones/packed_Concat__21" [label="[1]", style=dashed]; +"2 bert/encoder/ones/packed_Unsqueeze__19" -> "129 bert/encoder/ones/packed_Concat__21" [label="[1]", style=dashed]; +"3 bert/encoder/layer_9/attention/self/Reshape_3/shape_Unsqueeze__83" -> "248 bert/encoder/layer_9/attention/self/Reshape_3/shape_Concat__84" [label="[1]", style=dashed]; +"4 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__88" -> "251 bert/encoder/layer_9/attention/self/Reshape_2/shape_Concat__89" [label="[1]", style=dashed]; +"5 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__87" -> "251 bert/encoder/layer_9/attention/self/Reshape_2/shape_Concat__89" [label="[1]", style=dashed]; +"6 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__86" -> "251 bert/encoder/layer_9/attention/self/Reshape_2/shape_Concat__89" [label="[1]", style=dashed]; +"7 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__93" -> "254 bert/encoder/layer_9/attention/self/Reshape_1/shape_Concat__94" [label="[1]", style=dashed]; +"8 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__92" -> "254 bert/encoder/layer_9/attention/self/Reshape_1/shape_Concat__94" [label="[1]", style=dashed]; +"9 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__91" -> "254 bert/encoder/layer_9/attention/self/Reshape_1/shape_Concat__94" [label="[1]", style=dashed]; +"10 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__98" -> "257 bert/encoder/layer_9/attention/self/Reshape/shape_Concat__99" [label="[1]", style=dashed]; +"11 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__97" -> "257 bert/encoder/layer_9/attention/self/Reshape/shape_Concat__99" [label="[1]", style=dashed]; +"12 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__96" -> "257 bert/encoder/layer_9/attention/self/Reshape/shape_Concat__99" [label="[1]", style=dashed]; +"13 bert/encoder/layer_8/attention/self/Reshape_3/shape_Unsqueeze__101" -> "261 bert/encoder/layer_8/attention/self/Reshape_3/shape_Concat__102" [label="[1]", style=dashed]; +"14 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__106" -> "264 bert/encoder/layer_8/attention/self/Reshape_2/shape_Concat__107" [label="[1]", style=dashed]; +"15 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__105" -> "264 bert/encoder/layer_8/attention/self/Reshape_2/shape_Concat__107" [label="[1]", style=dashed]; +"16 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__104" -> "264 bert/encoder/layer_8/attention/self/Reshape_2/shape_Concat__107" [label="[1]", style=dashed]; +"17 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__111" -> "267 bert/encoder/layer_8/attention/self/Reshape_1/shape_Concat__112" [label="[1]", style=dashed]; +"18 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__110" -> "267 bert/encoder/layer_8/attention/self/Reshape_1/shape_Concat__112" [label="[1]", style=dashed]; +"19 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__109" -> "267 bert/encoder/layer_8/attention/self/Reshape_1/shape_Concat__112" [label="[1]", style=dashed]; +"20 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__116" -> "270 bert/encoder/layer_8/attention/self/Reshape/shape_Concat__117" [label="[1]", style=dashed]; +"21 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__115" -> "270 bert/encoder/layer_8/attention/self/Reshape/shape_Concat__117" [label="[1]", style=dashed]; +"22 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__114" -> "270 bert/encoder/layer_8/attention/self/Reshape/shape_Concat__117" [label="[1]", style=dashed]; +"23 bert/encoder/layer_7/attention/self/Reshape_3/shape_Unsqueeze__119" -> "274 bert/encoder/layer_7/attention/self/Reshape_3/shape_Concat__120" [label="[1]", style=dashed]; +"24 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__124" -> "277 bert/encoder/layer_7/attention/self/Reshape_2/shape_Concat__125" [label="[1]", style=dashed]; +"25 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__123" -> "277 bert/encoder/layer_7/attention/self/Reshape_2/shape_Concat__125" [label="[1]", style=dashed]; +"26 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__122" -> "277 bert/encoder/layer_7/attention/self/Reshape_2/shape_Concat__125" [label="[1]", style=dashed]; +"27 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__129" -> "280 bert/encoder/layer_7/attention/self/Reshape_1/shape_Concat__130" [label="[1]", style=dashed]; +"28 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__128" -> "280 bert/encoder/layer_7/attention/self/Reshape_1/shape_Concat__130" [label="[1]", style=dashed]; +"29 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__127" -> "280 bert/encoder/layer_7/attention/self/Reshape_1/shape_Concat__130" [label="[1]", style=dashed]; +"30 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__134" -> "283 bert/encoder/layer_7/attention/self/Reshape/shape_Concat__135" [label="[1]", style=dashed]; +"31 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__133" -> "283 bert/encoder/layer_7/attention/self/Reshape/shape_Concat__135" [label="[1]", style=dashed]; +"32 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__132" -> "283 bert/encoder/layer_7/attention/self/Reshape/shape_Concat__135" [label="[1]", style=dashed]; +"33 bert/encoder/layer_6/attention/self/Reshape_3/shape_Unsqueeze__137" -> "287 bert/encoder/layer_6/attention/self/Reshape_3/shape_Concat__138" [label="[1]", style=dashed]; +"34 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__142" -> "290 bert/encoder/layer_6/attention/self/Reshape_2/shape_Concat__143" [label="[1]", style=dashed]; +"35 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__141" -> "290 bert/encoder/layer_6/attention/self/Reshape_2/shape_Concat__143" [label="[1]", style=dashed]; +"36 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__140" -> "290 bert/encoder/layer_6/attention/self/Reshape_2/shape_Concat__143" [label="[1]", style=dashed]; +"37 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__147" -> "293 bert/encoder/layer_6/attention/self/Reshape_1/shape_Concat__148" [label="[1]", style=dashed]; +"38 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__146" -> "293 bert/encoder/layer_6/attention/self/Reshape_1/shape_Concat__148" [label="[1]", style=dashed]; +"39 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__145" -> "293 bert/encoder/layer_6/attention/self/Reshape_1/shape_Concat__148" [label="[1]", style=dashed]; +"40 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__152" -> "296 bert/encoder/layer_6/attention/self/Reshape/shape_Concat__153" [label="[1]", style=dashed]; +"41 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__151" -> "296 bert/encoder/layer_6/attention/self/Reshape/shape_Concat__153" [label="[1]", style=dashed]; +"42 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__150" -> "296 bert/encoder/layer_6/attention/self/Reshape/shape_Concat__153" [label="[1]", style=dashed]; +"43 bert/encoder/layer_5/attention/self/Reshape_3/shape_Unsqueeze__155" -> "300 bert/encoder/layer_5/attention/self/Reshape_3/shape_Concat__156" [label="[1]", style=dashed]; +"44 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__160" -> "303 bert/encoder/layer_5/attention/self/Reshape_2/shape_Concat__161" [label="[1]", style=dashed]; +"45 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__159" -> "303 bert/encoder/layer_5/attention/self/Reshape_2/shape_Concat__161" [label="[1]", style=dashed]; +"46 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__158" -> "303 bert/encoder/layer_5/attention/self/Reshape_2/shape_Concat__161" [label="[1]", style=dashed]; +"47 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__165" -> "306 bert/encoder/layer_5/attention/self/Reshape_1/shape_Concat__166" [label="[1]", style=dashed]; +"48 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__164" -> "306 bert/encoder/layer_5/attention/self/Reshape_1/shape_Concat__166" [label="[1]", style=dashed]; +"49 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__163" -> "306 bert/encoder/layer_5/attention/self/Reshape_1/shape_Concat__166" [label="[1]", style=dashed]; +"50 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__170" -> "309 bert/encoder/layer_5/attention/self/Reshape/shape_Concat__171" [label="[1]", style=dashed]; +"51 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__169" -> "309 bert/encoder/layer_5/attention/self/Reshape/shape_Concat__171" [label="[1]", style=dashed]; +"52 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__168" -> "309 bert/encoder/layer_5/attention/self/Reshape/shape_Concat__171" [label="[1]", style=dashed]; +"53 bert/encoder/layer_4/attention/self/Reshape_3/shape_Unsqueeze__173" -> "313 bert/encoder/layer_4/attention/self/Reshape_3/shape_Concat__174" [label="[1]", style=dashed]; +"54 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__178" -> "316 bert/encoder/layer_4/attention/self/Reshape_2/shape_Concat__179" [label="[1]", style=dashed]; +"55 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__177" -> "316 bert/encoder/layer_4/attention/self/Reshape_2/shape_Concat__179" [label="[1]", style=dashed]; +"56 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__176" -> "316 bert/encoder/layer_4/attention/self/Reshape_2/shape_Concat__179" [label="[1]", style=dashed]; +"57 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__183" -> "319 bert/encoder/layer_4/attention/self/Reshape_1/shape_Concat__184" [label="[1]", style=dashed]; +"58 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__182" -> "319 bert/encoder/layer_4/attention/self/Reshape_1/shape_Concat__184" [label="[1]", style=dashed]; +"59 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__181" -> "319 bert/encoder/layer_4/attention/self/Reshape_1/shape_Concat__184" [label="[1]", style=dashed]; +"60 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__188" -> "322 bert/encoder/layer_4/attention/self/Reshape/shape_Concat__189" [label="[1]", style=dashed]; +"61 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__187" -> "322 bert/encoder/layer_4/attention/self/Reshape/shape_Concat__189" [label="[1]", style=dashed]; +"62 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__186" -> "322 bert/encoder/layer_4/attention/self/Reshape/shape_Concat__189" [label="[1]", style=dashed]; +"63 bert/encoder/layer_3/attention/self/Reshape_3/shape_Unsqueeze__191" -> "326 bert/encoder/layer_3/attention/self/Reshape_3/shape_Concat__192" [label="[1]", style=dashed]; +"64 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__196" -> "329 bert/encoder/layer_3/attention/self/Reshape_2/shape_Concat__197" [label="[1]", style=dashed]; +"65 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__195" -> "329 bert/encoder/layer_3/attention/self/Reshape_2/shape_Concat__197" [label="[1]", style=dashed]; +"66 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__194" -> "329 bert/encoder/layer_3/attention/self/Reshape_2/shape_Concat__197" [label="[1]", style=dashed]; +"67 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__201" -> "332 bert/encoder/layer_3/attention/self/Reshape_1/shape_Concat__202" [label="[1]", style=dashed]; +"68 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__200" -> "332 bert/encoder/layer_3/attention/self/Reshape_1/shape_Concat__202" [label="[1]", style=dashed]; +"69 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__199" -> "332 bert/encoder/layer_3/attention/self/Reshape_1/shape_Concat__202" [label="[1]", style=dashed]; +"70 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__206" -> "335 bert/encoder/layer_3/attention/self/Reshape/shape_Concat__207" [label="[1]", style=dashed]; +"71 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__205" -> "335 bert/encoder/layer_3/attention/self/Reshape/shape_Concat__207" [label="[1]", style=dashed]; +"72 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__204" -> "335 bert/encoder/layer_3/attention/self/Reshape/shape_Concat__207" [label="[1]", style=dashed]; +"73 bert/encoder/layer_2/attention/self/Reshape_3/shape_Unsqueeze__209" -> "339 bert/encoder/layer_2/attention/self/Reshape_3/shape_Concat__210" [label="[1]", style=dashed]; +"74 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__214" -> "342 bert/encoder/layer_2/attention/self/Reshape_2/shape_Concat__215" [label="[1]", style=dashed]; +"75 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__213" -> "342 bert/encoder/layer_2/attention/self/Reshape_2/shape_Concat__215" [label="[1]", style=dashed]; +"76 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__212" -> "342 bert/encoder/layer_2/attention/self/Reshape_2/shape_Concat__215" [label="[1]", style=dashed]; +"77 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__219" -> "345 bert/encoder/layer_2/attention/self/Reshape_1/shape_Concat__220" [label="[1]", style=dashed]; +"78 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__218" -> "345 bert/encoder/layer_2/attention/self/Reshape_1/shape_Concat__220" [label="[1]", style=dashed]; +"79 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__217" -> "345 bert/encoder/layer_2/attention/self/Reshape_1/shape_Concat__220" [label="[1]", style=dashed]; +"80 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__224" -> "348 bert/encoder/layer_2/attention/self/Reshape/shape_Concat__225" [label="[1]", style=dashed]; +"81 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__223" -> "348 bert/encoder/layer_2/attention/self/Reshape/shape_Concat__225" [label="[1]", style=dashed]; +"82 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__222" -> "348 bert/encoder/layer_2/attention/self/Reshape/shape_Concat__225" [label="[1]", style=dashed]; +"83 bert/encoder/layer_11/attention/self/Reshape_3/shape_Unsqueeze__227" -> "352 bert/encoder/layer_11/attention/self/Reshape_3/shape_Concat__228" [label="[1]", style=dashed]; +"84 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__232" -> "355 bert/encoder/layer_11/attention/self/Reshape_2/shape_Concat__233" [label="[1]", style=dashed]; +"85 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__231" -> "355 bert/encoder/layer_11/attention/self/Reshape_2/shape_Concat__233" [label="[1]", style=dashed]; +"86 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__230" -> "355 bert/encoder/layer_11/attention/self/Reshape_2/shape_Concat__233" [label="[1]", style=dashed]; +"87 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__237" -> "358 bert/encoder/layer_11/attention/self/Reshape_1/shape_Concat__238" [label="[1]", style=dashed]; +"88 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__236" -> "358 bert/encoder/layer_11/attention/self/Reshape_1/shape_Concat__238" [label="[1]", style=dashed]; +"89 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__235" -> "358 bert/encoder/layer_11/attention/self/Reshape_1/shape_Concat__238" [label="[1]", style=dashed]; +"90 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__242" -> "361 bert/encoder/layer_11/attention/self/Reshape/shape_Concat__243" [label="[1]", style=dashed]; +"91 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__241" -> "361 bert/encoder/layer_11/attention/self/Reshape/shape_Concat__243" [label="[1]", style=dashed]; +"92 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__240" -> "361 bert/encoder/layer_11/attention/self/Reshape/shape_Concat__243" [label="[1]", style=dashed]; +"93 bert/encoder/layer_10/attention/self/Reshape_3/shape_Unsqueeze__245" -> "365 bert/encoder/layer_10/attention/self/Reshape_3/shape_Concat__246" [label="[1]", style=dashed]; +"94 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__250" -> "368 bert/encoder/layer_10/attention/self/Reshape_2/shape_Concat__251" [label="[1]", style=dashed]; +"95 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__249" -> "368 bert/encoder/layer_10/attention/self/Reshape_2/shape_Concat__251" [label="[1]", style=dashed]; +"96 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__248" -> "368 bert/encoder/layer_10/attention/self/Reshape_2/shape_Concat__251" [label="[1]", style=dashed]; +"97 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__255" -> "371 bert/encoder/layer_10/attention/self/Reshape_1/shape_Concat__256" [label="[1]", style=dashed]; +"98 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__254" -> "371 bert/encoder/layer_10/attention/self/Reshape_1/shape_Concat__256" [label="[1]", style=dashed]; +"99 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__253" -> "371 bert/encoder/layer_10/attention/self/Reshape_1/shape_Concat__256" [label="[1]", style=dashed]; +"100 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__260" -> "374 bert/encoder/layer_10/attention/self/Reshape/shape_Concat__261" [label="[1]", style=dashed]; +"101 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__259" -> "374 bert/encoder/layer_10/attention/self/Reshape/shape_Concat__261" [label="[1]", style=dashed]; +"102 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__258" -> "374 bert/encoder/layer_10/attention/self/Reshape/shape_Concat__261" [label="[1]", style=dashed]; +"103 bert/encoder/layer_1/attention/self/Reshape_3/shape_Unsqueeze__263" -> "378 bert/encoder/layer_1/attention/self/Reshape_3/shape_Concat__264" [label="[1]", style=dashed]; +"104 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__268" -> "381 bert/encoder/layer_1/attention/self/Reshape_2/shape_Concat__269" [label="[1]", style=dashed]; +"105 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__267" -> "381 bert/encoder/layer_1/attention/self/Reshape_2/shape_Concat__269" [label="[1]", style=dashed]; +"106 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__266" -> "381 bert/encoder/layer_1/attention/self/Reshape_2/shape_Concat__269" [label="[1]", style=dashed]; +"107 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__273" -> "384 bert/encoder/layer_1/attention/self/Reshape_1/shape_Concat__274" [label="[1]", style=dashed]; +"108 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__272" -> "384 bert/encoder/layer_1/attention/self/Reshape_1/shape_Concat__274" [label="[1]", style=dashed]; +"109 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__271" -> "384 bert/encoder/layer_1/attention/self/Reshape_1/shape_Concat__274" [label="[1]", style=dashed]; +"110 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__278" -> "387 bert/encoder/layer_1/attention/self/Reshape/shape_Concat__279" [label="[1]", style=dashed]; +"111 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__277" -> "387 bert/encoder/layer_1/attention/self/Reshape/shape_Concat__279" [label="[1]", style=dashed]; +"112 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__276" -> "387 bert/encoder/layer_1/attention/self/Reshape/shape_Concat__279" [label="[1]", style=dashed]; +"113 bert/encoder/layer_0/attention/self/Reshape_3/shape_Unsqueeze__281" -> "391 bert/encoder/layer_0/attention/self/Reshape_3/shape_Concat__282" [label="[1]", style=dashed]; +"114 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__286" -> "394 bert/encoder/layer_0/attention/self/Reshape_2/shape_Concat__287" [label="[1]", style=dashed]; +"115 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__285" -> "394 bert/encoder/layer_0/attention/self/Reshape_2/shape_Concat__287" [label="[1]", style=dashed]; +"116 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__284" -> "394 bert/encoder/layer_0/attention/self/Reshape_2/shape_Concat__287" [label="[1]", style=dashed]; +"117 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__291" -> "397 bert/encoder/layer_0/attention/self/Reshape_1/shape_Concat__292" [label="[1]", style=dashed]; +"118 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__290" -> "397 bert/encoder/layer_0/attention/self/Reshape_1/shape_Concat__292" [label="[1]", style=dashed]; +"119 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__289" -> "397 bert/encoder/layer_0/attention/self/Reshape_1/shape_Concat__292" [label="[1]", style=dashed]; +"120 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__296" -> "400 bert/encoder/layer_0/attention/self/Reshape/shape_Concat__297" [label="[1]", style=dashed]; +"121 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__295" -> "400 bert/encoder/layer_0/attention/self/Reshape/shape_Concat__297" [label="[1]", style=dashed]; +"122 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__294" -> "400 bert/encoder/layer_0/attention/self/Reshape/shape_Concat__297" [label="[1]", style=dashed]; +"123 bert/encoder/Shape" -> "124 bert/encoder/Shape__12" [label="[2]", style=dashed]; +"124 bert/encoder/Shape__12" -> "125 bert/encoder/strided_slice" [label="[2]", style=solid]; +"125 bert/encoder/strided_slice" -> "126 bert/encoder/strided_slice__16" [label="[1]", style=solid]; +"126 bert/encoder/strided_slice__16" -> "127 bert/encoder/strided_slice__17" [label="[]", style=solid]; +"127 bert/encoder/strided_slice__17" -> "128 bert/encoder/ones/packed_Unsqueeze__18" [label="[]", style=dashed]; +"127 bert/encoder/strided_slice__17" -> "135 bert/encoder/Reshape/shape_Unsqueeze__23" [label="[]", style=dashed]; +"128 bert/encoder/ones/packed_Unsqueeze__18" -> "129 bert/encoder/ones/packed_Concat__21" [label="[1]", style=dashed]; +"129 bert/encoder/ones/packed_Concat__21" -> "130 bert/encoder/ones__22" [label="[3]", style=dashed]; +"130 bert/encoder/ones__22" -> "131 bert/encoder/ones" [label="[3]", style=dashed]; +"131 bert/encoder/ones" -> "142 bert/encoder/mul" [label="[-1, -1, -1]", style=solid]; +"132 bert/encoder/Reshape_13/shape_Unsqueeze__300" -> "403 bert/encoder/Reshape_13/shape_Concat__301" [label="[1]", style=dashed]; +"133 bert/encoder/Reshape_13/shape_Unsqueeze__299" -> "403 bert/encoder/Reshape_13/shape_Concat__301" [label="[1]", style=dashed]; +"134 bert/encoder/Reshape_1__302" -> "405 bert/encoder/Reshape_1" [label="[2]", style=dashed]; +"135 bert/encoder/Reshape/shape_Unsqueeze__23" -> "138 bert/encoder/Reshape/shape_Concat__26" [label="[1]", style=dashed]; +"136 bert/encoder/Reshape/shape_Unsqueeze__25" -> "138 bert/encoder/Reshape/shape_Concat__26" [label="[1]", style=dashed]; +"137 bert/encoder/Reshape/shape_Unsqueeze__24" -> "138 bert/encoder/Reshape/shape_Concat__26" [label="[1]", style=dashed]; +"138 bert/encoder/Reshape/shape_Concat__26" -> "139 bert/encoder/Reshape__27" [label="[3]", style=dashed]; +"139 bert/encoder/Reshape__27" -> "140 bert/encoder/Reshape" [label="[3]", style=dashed]; +"140 bert/encoder/Reshape" -> "141 bert/encoder/Cast" [label="[]", style=dashed]; +"141 bert/encoder/Cast" -> "142 bert/encoder/mul" [label="[]", style=solid]; +"142 bert/encoder/mul" -> "143 bert/encoder/layer_9/attention/self/ExpandDims" [label="[]", style=solid]; +"142 bert/encoder/mul" -> "146 bert/encoder/layer_8/attention/self/ExpandDims" [label="[]", style=solid]; +"142 bert/encoder/mul" -> "149 bert/encoder/layer_7/attention/self/ExpandDims" [label="[]", style=solid]; +"142 bert/encoder/mul" -> "152 bert/encoder/layer_6/attention/self/ExpandDims" [label="[]", style=solid]; +"142 bert/encoder/mul" -> "155 bert/encoder/layer_5/attention/self/ExpandDims" [label="[]", style=solid]; +"142 bert/encoder/mul" -> "158 bert/encoder/layer_4/attention/self/ExpandDims" [label="[]", style=solid]; +"142 bert/encoder/mul" -> "161 bert/encoder/layer_3/attention/self/ExpandDims" [label="[]", style=solid]; +"142 bert/encoder/mul" -> "164 bert/encoder/layer_2/attention/self/ExpandDims" [label="[]", style=solid]; +"142 bert/encoder/mul" -> "167 bert/encoder/layer_11/attention/self/ExpandDims" [label="[]", style=solid]; +"142 bert/encoder/mul" -> "170 bert/encoder/layer_10/attention/self/ExpandDims" [label="[]", style=solid]; +"142 bert/encoder/mul" -> "173 bert/encoder/layer_1/attention/self/ExpandDims" [label="[]", style=solid]; +"142 bert/encoder/mul" -> "176 bert/encoder/layer_0/attention/self/ExpandDims" [label="[]", style=solid]; +"143 bert/encoder/layer_9/attention/self/ExpandDims" -> "144 bert/encoder/layer_9/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"144 bert/encoder/layer_9/attention/self/sub" -> "145 bert/encoder/layer_9/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"145 bert/encoder/layer_9/attention/self/mul_1" -> "1247 bert/encoder/layer_9/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"146 bert/encoder/layer_8/attention/self/ExpandDims" -> "147 bert/encoder/layer_8/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"147 bert/encoder/layer_8/attention/self/sub" -> "148 bert/encoder/layer_8/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"148 bert/encoder/layer_8/attention/self/mul_1" -> "1157 bert/encoder/layer_8/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"149 bert/encoder/layer_7/attention/self/ExpandDims" -> "150 bert/encoder/layer_7/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"150 bert/encoder/layer_7/attention/self/sub" -> "151 bert/encoder/layer_7/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"151 bert/encoder/layer_7/attention/self/mul_1" -> "1067 bert/encoder/layer_7/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"152 bert/encoder/layer_6/attention/self/ExpandDims" -> "153 bert/encoder/layer_6/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"153 bert/encoder/layer_6/attention/self/sub" -> "154 bert/encoder/layer_6/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"154 bert/encoder/layer_6/attention/self/mul_1" -> "977 bert/encoder/layer_6/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"155 bert/encoder/layer_5/attention/self/ExpandDims" -> "156 bert/encoder/layer_5/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"156 bert/encoder/layer_5/attention/self/sub" -> "157 bert/encoder/layer_5/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"157 bert/encoder/layer_5/attention/self/mul_1" -> "887 bert/encoder/layer_5/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"158 bert/encoder/layer_4/attention/self/ExpandDims" -> "159 bert/encoder/layer_4/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"159 bert/encoder/layer_4/attention/self/sub" -> "160 bert/encoder/layer_4/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"160 bert/encoder/layer_4/attention/self/mul_1" -> "797 bert/encoder/layer_4/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"161 bert/encoder/layer_3/attention/self/ExpandDims" -> "162 bert/encoder/layer_3/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"162 bert/encoder/layer_3/attention/self/sub" -> "163 bert/encoder/layer_3/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"163 bert/encoder/layer_3/attention/self/mul_1" -> "707 bert/encoder/layer_3/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"164 bert/encoder/layer_2/attention/self/ExpandDims" -> "165 bert/encoder/layer_2/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"165 bert/encoder/layer_2/attention/self/sub" -> "166 bert/encoder/layer_2/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"166 bert/encoder/layer_2/attention/self/mul_1" -> "617 bert/encoder/layer_2/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"167 bert/encoder/layer_11/attention/self/ExpandDims" -> "168 bert/encoder/layer_11/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"168 bert/encoder/layer_11/attention/self/sub" -> "169 bert/encoder/layer_11/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"169 bert/encoder/layer_11/attention/self/mul_1" -> "1427 bert/encoder/layer_11/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"170 bert/encoder/layer_10/attention/self/ExpandDims" -> "171 bert/encoder/layer_10/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"171 bert/encoder/layer_10/attention/self/sub" -> "172 bert/encoder/layer_10/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"172 bert/encoder/layer_10/attention/self/mul_1" -> "1337 bert/encoder/layer_10/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"173 bert/encoder/layer_1/attention/self/ExpandDims" -> "174 bert/encoder/layer_1/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"174 bert/encoder/layer_1/attention/self/sub" -> "175 bert/encoder/layer_1/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"175 bert/encoder/layer_1/attention/self/mul_1" -> "527 bert/encoder/layer_1/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"176 bert/encoder/layer_0/attention/self/ExpandDims" -> "177 bert/encoder/layer_0/attention/self/sub" [label="[-1, 1, 256, 256]", style=solid]; +"177 bert/encoder/layer_0/attention/self/sub" -> "178 bert/encoder/layer_0/attention/self/mul_1" [label="[-1, 1, 256, 256]", style=solid]; +"178 bert/encoder/layer_0/attention/self/mul_1" -> "437 bert/encoder/layer_0/attention/self/add" [label="[-1, 1, 256, 256]", style=solid]; +"179 bert/embeddings/Slice" -> "181 bert/embeddings/Reshape_4" [label="[256, 768]", style=solid]; +"180 bert/embeddings/Reshape_4__42" -> "181 bert/embeddings/Reshape_4" [label="[3]", style=dashed]; +"181 bert/embeddings/Reshape_4" -> "227 bert/embeddings/add_1" [label="[]", style=solid]; +"182 bert/embeddings/Reshape_3/shape_Unsqueeze__69" -> "209 bert/embeddings/Reshape_3/shape_Concat__70" [label="[1]", style=dashed]; +"183 bert/embeddings/Reshape_3/shape_Unsqueeze__68" -> "209 bert/embeddings/Reshape_3/shape_Concat__70" [label="[1]", style=dashed]; +"184 bert/embeddings/Reshape_2__43" -> "185 bert/embeddings/Reshape_2" [label="[1]", style=dashed]; +"185 bert/embeddings/Reshape_2" -> "219 bert/embeddings/one_hot" [label="[]", style=dashed]; +"186 bert/embeddings/Reshape_1/shape_Unsqueeze__57" -> "196 bert/embeddings/Reshape_1/shape_Concat__58" [label="[1]", style=dashed]; +"187 bert/embeddings/Reshape_1/shape_Unsqueeze__56" -> "196 bert/embeddings/Reshape_1/shape_Concat__58" [label="[1]", style=dashed]; +"188 bert/embeddings/Reshape__59" -> "198 bert/embeddings/Reshape" [label="[1]", style=dashed]; +"189 bert/embeddings/ExpandDims" -> "190 bert/embeddings/Shape" [label="[-1, 256, 1]", style=dashed]; +"189 bert/embeddings/ExpandDims" -> "198 bert/embeddings/Reshape" [label="[-1, 256, 1]", style=dashed]; +"190 bert/embeddings/Shape" -> "191 bert/embeddings/Shape__49" [label="[3]", style=dashed]; +"191 bert/embeddings/Shape__49" -> "192 bert/embeddings/strided_slice" [label="[3]", style=solid]; +"192 bert/embeddings/strided_slice" -> "193 bert/embeddings/strided_slice__53" [label="[1]", style=solid]; +"193 bert/embeddings/strided_slice__53" -> "194 bert/embeddings/strided_slice__54" [label="[]", style=solid]; +"194 bert/embeddings/strided_slice__54" -> "195 bert/embeddings/Reshape_1/shape_Unsqueeze__55" [label="[]", style=dashed]; +"195 bert/embeddings/Reshape_1/shape_Unsqueeze__55" -> "196 bert/embeddings/Reshape_1/shape_Concat__58" [label="[1]", style=dashed]; +"196 bert/embeddings/Reshape_1/shape_Concat__58" -> "197 bert/embeddings/Reshape_1__60" [label="[3]", style=dashed]; +"197 bert/embeddings/Reshape_1__60" -> "202 bert/embeddings/Reshape_1" [label="[3]", style=dashed]; +"198 bert/embeddings/Reshape" -> "201 bert/embeddings/GatherV2" [label="[]", style=dashed]; +"199 QuantizeLinear_bert/embeddings/word_embeddings^0_1" -> "200 DequantizeLinear_bert/embeddings/word_embeddings^0_1" [label="[30522, 768]", style=dashed]; +"200 DequantizeLinear_bert/embeddings/word_embeddings^0_1" -> "201 bert/embeddings/GatherV2" [label="[30522, 768]", style=solid]; +"201 bert/embeddings/GatherV2" -> "202 bert/embeddings/Reshape_1" [label="[]", style=solid]; +"202 bert/embeddings/Reshape_1" -> "203 bert/embeddings/Shape_1" [label="[]", style=solid]; +"202 bert/embeddings/Reshape_1" -> "226 bert/embeddings/add" [label="[]", style=solid]; +"203 bert/embeddings/Shape_1" -> "204 bert/embeddings/Shape_1__61" [label="[-1]", style=dashed]; +"204 bert/embeddings/Shape_1__61" -> "205 bert/embeddings/strided_slice_1" [label="[-1]", style=solid]; +"205 bert/embeddings/strided_slice_1" -> "206 bert/embeddings/strided_slice_1__65" [label="[-1]", style=solid]; +"206 bert/embeddings/strided_slice_1__65" -> "207 bert/embeddings/strided_slice_1__66" [label="[]", style=solid]; +"207 bert/embeddings/strided_slice_1__66" -> "208 bert/embeddings/Reshape_3/shape_Unsqueeze__67" [label="[]", style=dashed]; +"208 bert/embeddings/Reshape_3/shape_Unsqueeze__67" -> "209 bert/embeddings/Reshape_3/shape_Concat__70" [label="[1]", style=dashed]; +"209 bert/embeddings/Reshape_3/shape_Concat__70" -> "210 bert/embeddings/Reshape_3__71" [label="[3]", style=dashed]; +"210 bert/embeddings/Reshape_3__71" -> "225 bert/embeddings/Reshape_3" [label="[3]", style=dashed]; +"211 Unsqueeze__46" -> "218 Concat__47" [label="[1]", style=solid]; +"212 Unsqueeze__45" -> "218 Concat__47" [label="[1]", style=solid]; +"213 Unsqueeze__44" -> "219 bert/embeddings/one_hot" [label="[1]", style=dashed]; +"214 Reshape_1/shape_Unsqueeze__480" -> "1499 Reshape_1/shape_Concat__481" [label="[1]", style=dashed]; +"215 Reshape_1/shape_Unsqueeze__479" -> "1499 Reshape_1/shape_Concat__481" [label="[1]", style=dashed]; +"216 Reshape/shape_Unsqueeze__483" -> "1496 Reshape/shape_Concat__484" [label="[1]", style=dashed]; +"217 MatMul__486" -> "1502 QuantizeLinear_MatMul__486^0_1" [label="[768, 2]", style=solid]; +"218 Concat__47" -> "219 bert/embeddings/one_hot" [label="[2]", style=solid]; +"219 bert/embeddings/one_hot" -> "220 QuantizeLinear_bert/embeddings/one_hot^0_1" [label="[]", style=solid]; +"220 QuantizeLinear_bert/embeddings/one_hot^0_1" -> "221 DequantizeLinear_bert/embeddings/one_hot^0_1" [label="[]", style=dashed]; +"221 DequantizeLinear_bert/embeddings/one_hot^0_1" -> "224 bert/embeddings/MatMul" [label="[]", style=solid]; +"222 QuantizeLinear_bert/embeddings/token_type_embeddings^0_1" -> "223 DequantizeLinear_bert/embeddings/token_type_embeddings^0_1" [label="[2, 768]", style=dashed]; +"223 DequantizeLinear_bert/embeddings/token_type_embeddings^0_1" -> "224 bert/embeddings/MatMul" [label="[2, 768]", style=solid]; +"224 bert/embeddings/MatMul" -> "225 bert/embeddings/Reshape_3" [label="[]", style=solid]; +"225 bert/embeddings/Reshape_3" -> "226 bert/embeddings/add" [label="[]", style=solid]; +"226 bert/embeddings/add" -> "227 bert/embeddings/add_1" [label="[]", style=solid]; +"227 bert/embeddings/add_1" -> "228 bert/embeddings/LayerNorm/moments/mean" [label="[]", style=solid]; +"227 bert/embeddings/add_1" -> "230 bert/embeddings/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"227 bert/embeddings/add_1" -> "239 bert/embeddings/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"228 bert/embeddings/LayerNorm/moments/mean" -> "229 bert/embeddings/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"228 bert/embeddings/LayerNorm/moments/mean" -> "237 bert/embeddings/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"229 bert/embeddings/LayerNorm/moments/StopGradient" -> "230 bert/embeddings/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"230 bert/embeddings/LayerNorm/moments/SquaredDifference" -> "231 bert/embeddings/LayerNorm/moments/SquaredDifference__72" [label="[]", style=solid]; +"231 bert/embeddings/LayerNorm/moments/SquaredDifference__72" -> "232 bert/embeddings/LayerNorm/moments/variance" [label="[]", style=solid]; +"232 bert/embeddings/LayerNorm/moments/variance" -> "233 bert/embeddings/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"233 bert/embeddings/LayerNorm/batchnorm/add" -> "234 bert/embeddings/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"234 bert/embeddings/LayerNorm/batchnorm/Rsqrt" -> "235 bert/embeddings/LayerNorm/batchnorm/Rsqrt__74" [label="[]", style=solid]; +"235 bert/embeddings/LayerNorm/batchnorm/Rsqrt__74" -> "236 bert/embeddings/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"236 bert/embeddings/LayerNorm/batchnorm/mul" -> "237 bert/embeddings/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"236 bert/embeddings/LayerNorm/batchnorm/mul" -> "239 bert/embeddings/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"237 bert/embeddings/LayerNorm/batchnorm/mul_2" -> "238 bert/embeddings/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"238 bert/embeddings/LayerNorm/batchnorm/sub" -> "240 bert/embeddings/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"239 bert/embeddings/LayerNorm/batchnorm/mul_1" -> "240 bert/embeddings/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"240 bert/embeddings/LayerNorm/batchnorm/add_1" -> "241 bert/encoder/Shape_2" [label="[]", style=solid]; +"240 bert/embeddings/LayerNorm/batchnorm/add_1" -> "405 bert/encoder/Reshape_1" [label="[]", style=solid]; +"241 bert/encoder/Shape_2" -> "242 bert/encoder/Shape_2__76" [label="[-1]", style=dashed]; +"242 bert/encoder/Shape_2__76" -> "243 bert/encoder/strided_slice_2" [label="[-1]", style=solid]; +"243 bert/encoder/strided_slice_2" -> "244 bert/encoder/strided_slice_2__80" [label="[-1]", style=solid]; +"244 bert/encoder/strided_slice_2__80" -> "245 bert/encoder/strided_slice_2__81" [label="[]", style=solid]; +"245 bert/encoder/strided_slice_2__81" -> "246 bert/encoder/layer_9/attention/self/mul_2" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "250 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__85" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "253 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__90" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "256 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__95" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "259 bert/encoder/layer_8/attention/self/mul_2" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "263 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__103" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "266 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__108" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "269 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__113" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "272 bert/encoder/layer_7/attention/self/mul_2" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "276 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__121" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "279 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__126" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "282 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__131" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "285 bert/encoder/layer_6/attention/self/mul_2" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "289 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__139" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "292 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__144" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "295 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__149" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "298 bert/encoder/layer_5/attention/self/mul_2" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "302 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__157" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "305 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__162" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "308 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__167" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "311 bert/encoder/layer_4/attention/self/mul_2" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "315 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__175" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "318 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__180" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "321 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__185" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "324 bert/encoder/layer_3/attention/self/mul_2" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "328 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__193" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "331 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__198" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "334 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__203" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "337 bert/encoder/layer_2/attention/self/mul_2" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "341 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__211" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "344 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__216" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "347 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__221" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "350 bert/encoder/layer_11/attention/self/mul_2" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "354 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__229" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "357 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__234" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "360 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__239" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "363 bert/encoder/layer_10/attention/self/mul_2" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "367 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__247" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "370 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__252" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "373 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__257" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "376 bert/encoder/layer_1/attention/self/mul_2" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "380 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__265" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "383 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__270" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "386 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__275" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "389 bert/encoder/layer_0/attention/self/mul_2" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "393 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__283" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "396 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__288" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "399 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__293" [label="[]", style=dashed]; +"245 bert/encoder/strided_slice_2__81" -> "402 bert/encoder/Reshape_13/shape_Unsqueeze__298" [label="[]", style=dashed]; +"246 bert/encoder/layer_9/attention/self/mul_2" -> "247 bert/encoder/layer_9/attention/self/Reshape_3/shape_Unsqueeze__82" [label="[]", style=dashed]; +"247 bert/encoder/layer_9/attention/self/Reshape_3/shape_Unsqueeze__82" -> "248 bert/encoder/layer_9/attention/self/Reshape_3/shape_Concat__84" [label="[1]", style=dashed]; +"248 bert/encoder/layer_9/attention/self/Reshape_3/shape_Concat__84" -> "249 bert/encoder/layer_9/attention/self/Reshape_3__434" [label="[2]", style=dashed]; +"249 bert/encoder/layer_9/attention/self/Reshape_3__434" -> "1253 bert/encoder/layer_9/attention/self/Reshape_3" [label="[2]", style=dashed]; +"250 bert/encoder/layer_9/attention/self/Reshape_2/shape_Unsqueeze__85" -> "251 bert/encoder/layer_9/attention/self/Reshape_2/shape_Concat__89" [label="[1]", style=dashed]; +"251 bert/encoder/layer_9/attention/self/Reshape_2/shape_Concat__89" -> "252 bert/encoder/layer_9/attention/self/Reshape_2__429" [label="[4]", style=dashed]; +"252 bert/encoder/layer_9/attention/self/Reshape_2__429" -> "1226 bert/encoder/layer_9/attention/self/Reshape_2" [label="[4]", style=dashed]; +"253 bert/encoder/layer_9/attention/self/Reshape_1/shape_Unsqueeze__90" -> "254 bert/encoder/layer_9/attention/self/Reshape_1/shape_Concat__94" [label="[1]", style=dashed]; +"254 bert/encoder/layer_9/attention/self/Reshape_1/shape_Concat__94" -> "255 bert/encoder/layer_9/attention/self/Reshape_1__431" [label="[4]", style=dashed]; +"255 bert/encoder/layer_9/attention/self/Reshape_1__431" -> "1242 bert/encoder/layer_9/attention/self/Reshape_1" [label="[4]", style=dashed]; +"256 bert/encoder/layer_9/attention/self/Reshape/shape_Unsqueeze__95" -> "257 bert/encoder/layer_9/attention/self/Reshape/shape_Concat__99" [label="[1]", style=dashed]; +"257 bert/encoder/layer_9/attention/self/Reshape/shape_Concat__99" -> "258 bert/encoder/layer_9/attention/self/Reshape__430" [label="[4]", style=dashed]; +"258 bert/encoder/layer_9/attention/self/Reshape__430" -> "1234 bert/encoder/layer_9/attention/self/Reshape" [label="[4]", style=dashed]; +"259 bert/encoder/layer_8/attention/self/mul_2" -> "260 bert/encoder/layer_8/attention/self/Reshape_3/shape_Unsqueeze__100" [label="[]", style=dashed]; +"260 bert/encoder/layer_8/attention/self/Reshape_3/shape_Unsqueeze__100" -> "261 bert/encoder/layer_8/attention/self/Reshape_3/shape_Concat__102" [label="[1]", style=dashed]; +"261 bert/encoder/layer_8/attention/self/Reshape_3/shape_Concat__102" -> "262 bert/encoder/layer_8/attention/self/Reshape_3__420" [label="[2]", style=dashed]; +"262 bert/encoder/layer_8/attention/self/Reshape_3__420" -> "1163 bert/encoder/layer_8/attention/self/Reshape_3" [label="[2]", style=dashed]; +"263 bert/encoder/layer_8/attention/self/Reshape_2/shape_Unsqueeze__103" -> "264 bert/encoder/layer_8/attention/self/Reshape_2/shape_Concat__107" [label="[1]", style=dashed]; +"264 bert/encoder/layer_8/attention/self/Reshape_2/shape_Concat__107" -> "265 bert/encoder/layer_8/attention/self/Reshape_2__415" [label="[4]", style=dashed]; +"265 bert/encoder/layer_8/attention/self/Reshape_2__415" -> "1136 bert/encoder/layer_8/attention/self/Reshape_2" [label="[4]", style=dashed]; +"266 bert/encoder/layer_8/attention/self/Reshape_1/shape_Unsqueeze__108" -> "267 bert/encoder/layer_8/attention/self/Reshape_1/shape_Concat__112" [label="[1]", style=dashed]; +"267 bert/encoder/layer_8/attention/self/Reshape_1/shape_Concat__112" -> "268 bert/encoder/layer_8/attention/self/Reshape_1__417" [label="[4]", style=dashed]; +"268 bert/encoder/layer_8/attention/self/Reshape_1__417" -> "1152 bert/encoder/layer_8/attention/self/Reshape_1" [label="[4]", style=dashed]; +"269 bert/encoder/layer_8/attention/self/Reshape/shape_Unsqueeze__113" -> "270 bert/encoder/layer_8/attention/self/Reshape/shape_Concat__117" [label="[1]", style=dashed]; +"270 bert/encoder/layer_8/attention/self/Reshape/shape_Concat__117" -> "271 bert/encoder/layer_8/attention/self/Reshape__416" [label="[4]", style=dashed]; +"271 bert/encoder/layer_8/attention/self/Reshape__416" -> "1144 bert/encoder/layer_8/attention/self/Reshape" [label="[4]", style=dashed]; +"272 bert/encoder/layer_7/attention/self/mul_2" -> "273 bert/encoder/layer_7/attention/self/Reshape_3/shape_Unsqueeze__118" [label="[]", style=dashed]; +"273 bert/encoder/layer_7/attention/self/Reshape_3/shape_Unsqueeze__118" -> "274 bert/encoder/layer_7/attention/self/Reshape_3/shape_Concat__120" [label="[1]", style=dashed]; +"274 bert/encoder/layer_7/attention/self/Reshape_3/shape_Concat__120" -> "275 bert/encoder/layer_7/attention/self/Reshape_3__406" [label="[2]", style=dashed]; +"275 bert/encoder/layer_7/attention/self/Reshape_3__406" -> "1073 bert/encoder/layer_7/attention/self/Reshape_3" [label="[2]", style=dashed]; +"276 bert/encoder/layer_7/attention/self/Reshape_2/shape_Unsqueeze__121" -> "277 bert/encoder/layer_7/attention/self/Reshape_2/shape_Concat__125" [label="[1]", style=dashed]; +"277 bert/encoder/layer_7/attention/self/Reshape_2/shape_Concat__125" -> "278 bert/encoder/layer_7/attention/self/Reshape_2__401" [label="[4]", style=dashed]; +"278 bert/encoder/layer_7/attention/self/Reshape_2__401" -> "1046 bert/encoder/layer_7/attention/self/Reshape_2" [label="[4]", style=dashed]; +"279 bert/encoder/layer_7/attention/self/Reshape_1/shape_Unsqueeze__126" -> "280 bert/encoder/layer_7/attention/self/Reshape_1/shape_Concat__130" [label="[1]", style=dashed]; +"280 bert/encoder/layer_7/attention/self/Reshape_1/shape_Concat__130" -> "281 bert/encoder/layer_7/attention/self/Reshape_1__403" [label="[4]", style=dashed]; +"281 bert/encoder/layer_7/attention/self/Reshape_1__403" -> "1062 bert/encoder/layer_7/attention/self/Reshape_1" [label="[4]", style=dashed]; +"282 bert/encoder/layer_7/attention/self/Reshape/shape_Unsqueeze__131" -> "283 bert/encoder/layer_7/attention/self/Reshape/shape_Concat__135" [label="[1]", style=dashed]; +"283 bert/encoder/layer_7/attention/self/Reshape/shape_Concat__135" -> "284 bert/encoder/layer_7/attention/self/Reshape__402" [label="[4]", style=dashed]; +"284 bert/encoder/layer_7/attention/self/Reshape__402" -> "1054 bert/encoder/layer_7/attention/self/Reshape" [label="[4]", style=dashed]; +"285 bert/encoder/layer_6/attention/self/mul_2" -> "286 bert/encoder/layer_6/attention/self/Reshape_3/shape_Unsqueeze__136" [label="[]", style=dashed]; +"286 bert/encoder/layer_6/attention/self/Reshape_3/shape_Unsqueeze__136" -> "287 bert/encoder/layer_6/attention/self/Reshape_3/shape_Concat__138" [label="[1]", style=dashed]; +"287 bert/encoder/layer_6/attention/self/Reshape_3/shape_Concat__138" -> "288 bert/encoder/layer_6/attention/self/Reshape_3__392" [label="[2]", style=dashed]; +"288 bert/encoder/layer_6/attention/self/Reshape_3__392" -> "983 bert/encoder/layer_6/attention/self/Reshape_3" [label="[2]", style=dashed]; +"289 bert/encoder/layer_6/attention/self/Reshape_2/shape_Unsqueeze__139" -> "290 bert/encoder/layer_6/attention/self/Reshape_2/shape_Concat__143" [label="[1]", style=dashed]; +"290 bert/encoder/layer_6/attention/self/Reshape_2/shape_Concat__143" -> "291 bert/encoder/layer_6/attention/self/Reshape_2__387" [label="[4]", style=dashed]; +"291 bert/encoder/layer_6/attention/self/Reshape_2__387" -> "956 bert/encoder/layer_6/attention/self/Reshape_2" [label="[4]", style=dashed]; +"292 bert/encoder/layer_6/attention/self/Reshape_1/shape_Unsqueeze__144" -> "293 bert/encoder/layer_6/attention/self/Reshape_1/shape_Concat__148" [label="[1]", style=dashed]; +"293 bert/encoder/layer_6/attention/self/Reshape_1/shape_Concat__148" -> "294 bert/encoder/layer_6/attention/self/Reshape_1__389" [label="[4]", style=dashed]; +"294 bert/encoder/layer_6/attention/self/Reshape_1__389" -> "972 bert/encoder/layer_6/attention/self/Reshape_1" [label="[4]", style=dashed]; +"295 bert/encoder/layer_6/attention/self/Reshape/shape_Unsqueeze__149" -> "296 bert/encoder/layer_6/attention/self/Reshape/shape_Concat__153" [label="[1]", style=dashed]; +"296 bert/encoder/layer_6/attention/self/Reshape/shape_Concat__153" -> "297 bert/encoder/layer_6/attention/self/Reshape__388" [label="[4]", style=dashed]; +"297 bert/encoder/layer_6/attention/self/Reshape__388" -> "964 bert/encoder/layer_6/attention/self/Reshape" [label="[4]", style=dashed]; +"298 bert/encoder/layer_5/attention/self/mul_2" -> "299 bert/encoder/layer_5/attention/self/Reshape_3/shape_Unsqueeze__154" [label="[]", style=dashed]; +"299 bert/encoder/layer_5/attention/self/Reshape_3/shape_Unsqueeze__154" -> "300 bert/encoder/layer_5/attention/self/Reshape_3/shape_Concat__156" [label="[1]", style=dashed]; +"300 bert/encoder/layer_5/attention/self/Reshape_3/shape_Concat__156" -> "301 bert/encoder/layer_5/attention/self/Reshape_3__378" [label="[2]", style=dashed]; +"301 bert/encoder/layer_5/attention/self/Reshape_3__378" -> "893 bert/encoder/layer_5/attention/self/Reshape_3" [label="[2]", style=dashed]; +"302 bert/encoder/layer_5/attention/self/Reshape_2/shape_Unsqueeze__157" -> "303 bert/encoder/layer_5/attention/self/Reshape_2/shape_Concat__161" [label="[1]", style=dashed]; +"303 bert/encoder/layer_5/attention/self/Reshape_2/shape_Concat__161" -> "304 bert/encoder/layer_5/attention/self/Reshape_2__373" [label="[4]", style=dashed]; +"304 bert/encoder/layer_5/attention/self/Reshape_2__373" -> "866 bert/encoder/layer_5/attention/self/Reshape_2" [label="[4]", style=dashed]; +"305 bert/encoder/layer_5/attention/self/Reshape_1/shape_Unsqueeze__162" -> "306 bert/encoder/layer_5/attention/self/Reshape_1/shape_Concat__166" [label="[1]", style=dashed]; +"306 bert/encoder/layer_5/attention/self/Reshape_1/shape_Concat__166" -> "307 bert/encoder/layer_5/attention/self/Reshape_1__375" [label="[4]", style=dashed]; +"307 bert/encoder/layer_5/attention/self/Reshape_1__375" -> "882 bert/encoder/layer_5/attention/self/Reshape_1" [label="[4]", style=dashed]; +"308 bert/encoder/layer_5/attention/self/Reshape/shape_Unsqueeze__167" -> "309 bert/encoder/layer_5/attention/self/Reshape/shape_Concat__171" [label="[1]", style=dashed]; +"309 bert/encoder/layer_5/attention/self/Reshape/shape_Concat__171" -> "310 bert/encoder/layer_5/attention/self/Reshape__374" [label="[4]", style=dashed]; +"310 bert/encoder/layer_5/attention/self/Reshape__374" -> "874 bert/encoder/layer_5/attention/self/Reshape" [label="[4]", style=dashed]; +"311 bert/encoder/layer_4/attention/self/mul_2" -> "312 bert/encoder/layer_4/attention/self/Reshape_3/shape_Unsqueeze__172" [label="[]", style=dashed]; +"312 bert/encoder/layer_4/attention/self/Reshape_3/shape_Unsqueeze__172" -> "313 bert/encoder/layer_4/attention/self/Reshape_3/shape_Concat__174" [label="[1]", style=dashed]; +"313 bert/encoder/layer_4/attention/self/Reshape_3/shape_Concat__174" -> "314 bert/encoder/layer_4/attention/self/Reshape_3__364" [label="[2]", style=dashed]; +"314 bert/encoder/layer_4/attention/self/Reshape_3__364" -> "803 bert/encoder/layer_4/attention/self/Reshape_3" [label="[2]", style=dashed]; +"315 bert/encoder/layer_4/attention/self/Reshape_2/shape_Unsqueeze__175" -> "316 bert/encoder/layer_4/attention/self/Reshape_2/shape_Concat__179" [label="[1]", style=dashed]; +"316 bert/encoder/layer_4/attention/self/Reshape_2/shape_Concat__179" -> "317 bert/encoder/layer_4/attention/self/Reshape_2__359" [label="[4]", style=dashed]; +"317 bert/encoder/layer_4/attention/self/Reshape_2__359" -> "776 bert/encoder/layer_4/attention/self/Reshape_2" [label="[4]", style=dashed]; +"318 bert/encoder/layer_4/attention/self/Reshape_1/shape_Unsqueeze__180" -> "319 bert/encoder/layer_4/attention/self/Reshape_1/shape_Concat__184" [label="[1]", style=dashed]; +"319 bert/encoder/layer_4/attention/self/Reshape_1/shape_Concat__184" -> "320 bert/encoder/layer_4/attention/self/Reshape_1__361" [label="[4]", style=dashed]; +"320 bert/encoder/layer_4/attention/self/Reshape_1__361" -> "792 bert/encoder/layer_4/attention/self/Reshape_1" [label="[4]", style=dashed]; +"321 bert/encoder/layer_4/attention/self/Reshape/shape_Unsqueeze__185" -> "322 bert/encoder/layer_4/attention/self/Reshape/shape_Concat__189" [label="[1]", style=dashed]; +"322 bert/encoder/layer_4/attention/self/Reshape/shape_Concat__189" -> "323 bert/encoder/layer_4/attention/self/Reshape__360" [label="[4]", style=dashed]; +"323 bert/encoder/layer_4/attention/self/Reshape__360" -> "784 bert/encoder/layer_4/attention/self/Reshape" [label="[4]", style=dashed]; +"324 bert/encoder/layer_3/attention/self/mul_2" -> "325 bert/encoder/layer_3/attention/self/Reshape_3/shape_Unsqueeze__190" [label="[]", style=dashed]; +"325 bert/encoder/layer_3/attention/self/Reshape_3/shape_Unsqueeze__190" -> "326 bert/encoder/layer_3/attention/self/Reshape_3/shape_Concat__192" [label="[1]", style=dashed]; +"326 bert/encoder/layer_3/attention/self/Reshape_3/shape_Concat__192" -> "327 bert/encoder/layer_3/attention/self/Reshape_3__350" [label="[2]", style=dashed]; +"327 bert/encoder/layer_3/attention/self/Reshape_3__350" -> "713 bert/encoder/layer_3/attention/self/Reshape_3" [label="[2]", style=dashed]; +"328 bert/encoder/layer_3/attention/self/Reshape_2/shape_Unsqueeze__193" -> "329 bert/encoder/layer_3/attention/self/Reshape_2/shape_Concat__197" [label="[1]", style=dashed]; +"329 bert/encoder/layer_3/attention/self/Reshape_2/shape_Concat__197" -> "330 bert/encoder/layer_3/attention/self/Reshape_2__345" [label="[4]", style=dashed]; +"330 bert/encoder/layer_3/attention/self/Reshape_2__345" -> "686 bert/encoder/layer_3/attention/self/Reshape_2" [label="[4]", style=dashed]; +"331 bert/encoder/layer_3/attention/self/Reshape_1/shape_Unsqueeze__198" -> "332 bert/encoder/layer_3/attention/self/Reshape_1/shape_Concat__202" [label="[1]", style=dashed]; +"332 bert/encoder/layer_3/attention/self/Reshape_1/shape_Concat__202" -> "333 bert/encoder/layer_3/attention/self/Reshape_1__347" [label="[4]", style=dashed]; +"333 bert/encoder/layer_3/attention/self/Reshape_1__347" -> "702 bert/encoder/layer_3/attention/self/Reshape_1" [label="[4]", style=dashed]; +"334 bert/encoder/layer_3/attention/self/Reshape/shape_Unsqueeze__203" -> "335 bert/encoder/layer_3/attention/self/Reshape/shape_Concat__207" [label="[1]", style=dashed]; +"335 bert/encoder/layer_3/attention/self/Reshape/shape_Concat__207" -> "336 bert/encoder/layer_3/attention/self/Reshape__346" [label="[4]", style=dashed]; +"336 bert/encoder/layer_3/attention/self/Reshape__346" -> "694 bert/encoder/layer_3/attention/self/Reshape" [label="[4]", style=dashed]; +"337 bert/encoder/layer_2/attention/self/mul_2" -> "338 bert/encoder/layer_2/attention/self/Reshape_3/shape_Unsqueeze__208" [label="[]", style=dashed]; +"338 bert/encoder/layer_2/attention/self/Reshape_3/shape_Unsqueeze__208" -> "339 bert/encoder/layer_2/attention/self/Reshape_3/shape_Concat__210" [label="[1]", style=dashed]; +"339 bert/encoder/layer_2/attention/self/Reshape_3/shape_Concat__210" -> "340 bert/encoder/layer_2/attention/self/Reshape_3__336" [label="[2]", style=dashed]; +"340 bert/encoder/layer_2/attention/self/Reshape_3__336" -> "623 bert/encoder/layer_2/attention/self/Reshape_3" [label="[2]", style=dashed]; +"341 bert/encoder/layer_2/attention/self/Reshape_2/shape_Unsqueeze__211" -> "342 bert/encoder/layer_2/attention/self/Reshape_2/shape_Concat__215" [label="[1]", style=dashed]; +"342 bert/encoder/layer_2/attention/self/Reshape_2/shape_Concat__215" -> "343 bert/encoder/layer_2/attention/self/Reshape_2__331" [label="[4]", style=dashed]; +"343 bert/encoder/layer_2/attention/self/Reshape_2__331" -> "596 bert/encoder/layer_2/attention/self/Reshape_2" [label="[4]", style=dashed]; +"344 bert/encoder/layer_2/attention/self/Reshape_1/shape_Unsqueeze__216" -> "345 bert/encoder/layer_2/attention/self/Reshape_1/shape_Concat__220" [label="[1]", style=dashed]; +"345 bert/encoder/layer_2/attention/self/Reshape_1/shape_Concat__220" -> "346 bert/encoder/layer_2/attention/self/Reshape_1__333" [label="[4]", style=dashed]; +"346 bert/encoder/layer_2/attention/self/Reshape_1__333" -> "612 bert/encoder/layer_2/attention/self/Reshape_1" [label="[4]", style=dashed]; +"347 bert/encoder/layer_2/attention/self/Reshape/shape_Unsqueeze__221" -> "348 bert/encoder/layer_2/attention/self/Reshape/shape_Concat__225" [label="[1]", style=dashed]; +"348 bert/encoder/layer_2/attention/self/Reshape/shape_Concat__225" -> "349 bert/encoder/layer_2/attention/self/Reshape__332" [label="[4]", style=dashed]; +"349 bert/encoder/layer_2/attention/self/Reshape__332" -> "604 bert/encoder/layer_2/attention/self/Reshape" [label="[4]", style=dashed]; +"350 bert/encoder/layer_11/attention/self/mul_2" -> "351 bert/encoder/layer_11/attention/self/Reshape_3/shape_Unsqueeze__226" [label="[]", style=dashed]; +"351 bert/encoder/layer_11/attention/self/Reshape_3/shape_Unsqueeze__226" -> "352 bert/encoder/layer_11/attention/self/Reshape_3/shape_Concat__228" [label="[1]", style=dashed]; +"352 bert/encoder/layer_11/attention/self/Reshape_3/shape_Concat__228" -> "353 bert/encoder/layer_11/attention/self/Reshape_3__462" [label="[2]", style=dashed]; +"353 bert/encoder/layer_11/attention/self/Reshape_3__462" -> "1433 bert/encoder/layer_11/attention/self/Reshape_3" [label="[2]", style=dashed]; +"354 bert/encoder/layer_11/attention/self/Reshape_2/shape_Unsqueeze__229" -> "355 bert/encoder/layer_11/attention/self/Reshape_2/shape_Concat__233" [label="[1]", style=dashed]; +"355 bert/encoder/layer_11/attention/self/Reshape_2/shape_Concat__233" -> "356 bert/encoder/layer_11/attention/self/Reshape_2__457" [label="[4]", style=dashed]; +"356 bert/encoder/layer_11/attention/self/Reshape_2__457" -> "1406 bert/encoder/layer_11/attention/self/Reshape_2" [label="[4]", style=dashed]; +"357 bert/encoder/layer_11/attention/self/Reshape_1/shape_Unsqueeze__234" -> "358 bert/encoder/layer_11/attention/self/Reshape_1/shape_Concat__238" [label="[1]", style=dashed]; +"358 bert/encoder/layer_11/attention/self/Reshape_1/shape_Concat__238" -> "359 bert/encoder/layer_11/attention/self/Reshape_1__459" [label="[4]", style=dashed]; +"359 bert/encoder/layer_11/attention/self/Reshape_1__459" -> "1422 bert/encoder/layer_11/attention/self/Reshape_1" [label="[4]", style=dashed]; +"360 bert/encoder/layer_11/attention/self/Reshape/shape_Unsqueeze__239" -> "361 bert/encoder/layer_11/attention/self/Reshape/shape_Concat__243" [label="[1]", style=dashed]; +"361 bert/encoder/layer_11/attention/self/Reshape/shape_Concat__243" -> "362 bert/encoder/layer_11/attention/self/Reshape__458" [label="[4]", style=dashed]; +"362 bert/encoder/layer_11/attention/self/Reshape__458" -> "1414 bert/encoder/layer_11/attention/self/Reshape" [label="[4]", style=dashed]; +"363 bert/encoder/layer_10/attention/self/mul_2" -> "364 bert/encoder/layer_10/attention/self/Reshape_3/shape_Unsqueeze__244" [label="[]", style=dashed]; +"364 bert/encoder/layer_10/attention/self/Reshape_3/shape_Unsqueeze__244" -> "365 bert/encoder/layer_10/attention/self/Reshape_3/shape_Concat__246" [label="[1]", style=dashed]; +"365 bert/encoder/layer_10/attention/self/Reshape_3/shape_Concat__246" -> "366 bert/encoder/layer_10/attention/self/Reshape_3__448" [label="[2]", style=dashed]; +"366 bert/encoder/layer_10/attention/self/Reshape_3__448" -> "1343 bert/encoder/layer_10/attention/self/Reshape_3" [label="[2]", style=dashed]; +"367 bert/encoder/layer_10/attention/self/Reshape_2/shape_Unsqueeze__247" -> "368 bert/encoder/layer_10/attention/self/Reshape_2/shape_Concat__251" [label="[1]", style=dashed]; +"368 bert/encoder/layer_10/attention/self/Reshape_2/shape_Concat__251" -> "369 bert/encoder/layer_10/attention/self/Reshape_2__443" [label="[4]", style=dashed]; +"369 bert/encoder/layer_10/attention/self/Reshape_2__443" -> "1316 bert/encoder/layer_10/attention/self/Reshape_2" [label="[4]", style=dashed]; +"370 bert/encoder/layer_10/attention/self/Reshape_1/shape_Unsqueeze__252" -> "371 bert/encoder/layer_10/attention/self/Reshape_1/shape_Concat__256" [label="[1]", style=dashed]; +"371 bert/encoder/layer_10/attention/self/Reshape_1/shape_Concat__256" -> "372 bert/encoder/layer_10/attention/self/Reshape_1__445" [label="[4]", style=dashed]; +"372 bert/encoder/layer_10/attention/self/Reshape_1__445" -> "1332 bert/encoder/layer_10/attention/self/Reshape_1" [label="[4]", style=dashed]; +"373 bert/encoder/layer_10/attention/self/Reshape/shape_Unsqueeze__257" -> "374 bert/encoder/layer_10/attention/self/Reshape/shape_Concat__261" [label="[1]", style=dashed]; +"374 bert/encoder/layer_10/attention/self/Reshape/shape_Concat__261" -> "375 bert/encoder/layer_10/attention/self/Reshape__444" [label="[4]", style=dashed]; +"375 bert/encoder/layer_10/attention/self/Reshape__444" -> "1324 bert/encoder/layer_10/attention/self/Reshape" [label="[4]", style=dashed]; +"376 bert/encoder/layer_1/attention/self/mul_2" -> "377 bert/encoder/layer_1/attention/self/Reshape_3/shape_Unsqueeze__262" [label="[]", style=dashed]; +"377 bert/encoder/layer_1/attention/self/Reshape_3/shape_Unsqueeze__262" -> "378 bert/encoder/layer_1/attention/self/Reshape_3/shape_Concat__264" [label="[1]", style=dashed]; +"378 bert/encoder/layer_1/attention/self/Reshape_3/shape_Concat__264" -> "379 bert/encoder/layer_1/attention/self/Reshape_3__322" [label="[2]", style=dashed]; +"379 bert/encoder/layer_1/attention/self/Reshape_3__322" -> "533 bert/encoder/layer_1/attention/self/Reshape_3" [label="[2]", style=dashed]; +"380 bert/encoder/layer_1/attention/self/Reshape_2/shape_Unsqueeze__265" -> "381 bert/encoder/layer_1/attention/self/Reshape_2/shape_Concat__269" [label="[1]", style=dashed]; +"381 bert/encoder/layer_1/attention/self/Reshape_2/shape_Concat__269" -> "382 bert/encoder/layer_1/attention/self/Reshape_2__317" [label="[4]", style=dashed]; +"382 bert/encoder/layer_1/attention/self/Reshape_2__317" -> "506 bert/encoder/layer_1/attention/self/Reshape_2" [label="[4]", style=dashed]; +"383 bert/encoder/layer_1/attention/self/Reshape_1/shape_Unsqueeze__270" -> "384 bert/encoder/layer_1/attention/self/Reshape_1/shape_Concat__274" [label="[1]", style=dashed]; +"384 bert/encoder/layer_1/attention/self/Reshape_1/shape_Concat__274" -> "385 bert/encoder/layer_1/attention/self/Reshape_1__319" [label="[4]", style=dashed]; +"385 bert/encoder/layer_1/attention/self/Reshape_1__319" -> "522 bert/encoder/layer_1/attention/self/Reshape_1" [label="[4]", style=dashed]; +"386 bert/encoder/layer_1/attention/self/Reshape/shape_Unsqueeze__275" -> "387 bert/encoder/layer_1/attention/self/Reshape/shape_Concat__279" [label="[1]", style=dashed]; +"387 bert/encoder/layer_1/attention/self/Reshape/shape_Concat__279" -> "388 bert/encoder/layer_1/attention/self/Reshape__318" [label="[4]", style=dashed]; +"388 bert/encoder/layer_1/attention/self/Reshape__318" -> "514 bert/encoder/layer_1/attention/self/Reshape" [label="[4]", style=dashed]; +"389 bert/encoder/layer_0/attention/self/mul_2" -> "390 bert/encoder/layer_0/attention/self/Reshape_3/shape_Unsqueeze__280" [label="[]", style=dashed]; +"390 bert/encoder/layer_0/attention/self/Reshape_3/shape_Unsqueeze__280" -> "391 bert/encoder/layer_0/attention/self/Reshape_3/shape_Concat__282" [label="[1]", style=dashed]; +"391 bert/encoder/layer_0/attention/self/Reshape_3/shape_Concat__282" -> "392 bert/encoder/layer_0/attention/self/Reshape_3__308" [label="[2]", style=dashed]; +"392 bert/encoder/layer_0/attention/self/Reshape_3__308" -> "443 bert/encoder/layer_0/attention/self/Reshape_3" [label="[2]", style=dashed]; +"393 bert/encoder/layer_0/attention/self/Reshape_2/shape_Unsqueeze__283" -> "394 bert/encoder/layer_0/attention/self/Reshape_2/shape_Concat__287" [label="[1]", style=dashed]; +"394 bert/encoder/layer_0/attention/self/Reshape_2/shape_Concat__287" -> "395 bert/encoder/layer_0/attention/self/Reshape_2__303" [label="[4]", style=dashed]; +"395 bert/encoder/layer_0/attention/self/Reshape_2__303" -> "416 bert/encoder/layer_0/attention/self/Reshape_2" [label="[4]", style=dashed]; +"396 bert/encoder/layer_0/attention/self/Reshape_1/shape_Unsqueeze__288" -> "397 bert/encoder/layer_0/attention/self/Reshape_1/shape_Concat__292" [label="[1]", style=dashed]; +"397 bert/encoder/layer_0/attention/self/Reshape_1/shape_Concat__292" -> "398 bert/encoder/layer_0/attention/self/Reshape_1__305" [label="[4]", style=dashed]; +"398 bert/encoder/layer_0/attention/self/Reshape_1__305" -> "432 bert/encoder/layer_0/attention/self/Reshape_1" [label="[4]", style=dashed]; +"399 bert/encoder/layer_0/attention/self/Reshape/shape_Unsqueeze__293" -> "400 bert/encoder/layer_0/attention/self/Reshape/shape_Concat__297" [label="[1]", style=dashed]; +"400 bert/encoder/layer_0/attention/self/Reshape/shape_Concat__297" -> "401 bert/encoder/layer_0/attention/self/Reshape__304" [label="[4]", style=dashed]; +"401 bert/encoder/layer_0/attention/self/Reshape__304" -> "424 bert/encoder/layer_0/attention/self/Reshape" [label="[4]", style=dashed]; +"402 bert/encoder/Reshape_13/shape_Unsqueeze__298" -> "403 bert/encoder/Reshape_13/shape_Concat__301" [label="[1]", style=dashed]; +"403 bert/encoder/Reshape_13/shape_Concat__301" -> "404 bert/encoder/Reshape_13__471" [label="[3]", style=dashed]; +"404 bert/encoder/Reshape_13__471" -> "1488 bert/encoder/Reshape_13" [label="[3]", style=dashed]; +"405 bert/encoder/Reshape_1" -> "406 QuantizeLinear_bert/encoder/Reshape_1^0_3" [label="[]", style=solid]; +"405 bert/encoder/Reshape_1" -> "408 QuantizeLinear_bert/encoder/Reshape_1^0_2" [label="[]", style=solid]; +"405 bert/encoder/Reshape_1" -> "410 QuantizeLinear_bert/encoder/Reshape_1^0_1" [label="[]", style=solid]; +"405 bert/encoder/Reshape_1" -> "448 bert/encoder/layer_0/attention/output/add" [label="[]", style=solid]; +"406 QuantizeLinear_bert/encoder/Reshape_1^0_3" -> "407 DequantizeLinear_bert/encoder/Reshape_1^0_3" [label="[]", style=dashed]; +"407 DequantizeLinear_bert/encoder/Reshape_1^0_3" -> "428 bert/encoder/layer_0/attention/self/key/MatMul" [label="[]", style=solid]; +"408 QuantizeLinear_bert/encoder/Reshape_1^0_2" -> "409 DequantizeLinear_bert/encoder/Reshape_1^0_2" [label="[]", style=dashed]; +"409 DequantizeLinear_bert/encoder/Reshape_1^0_2" -> "420 bert/encoder/layer_0/attention/self/query/MatMul" [label="[]", style=solid]; +"410 QuantizeLinear_bert/encoder/Reshape_1^0_1" -> "411 DequantizeLinear_bert/encoder/Reshape_1^0_1" [label="[]", style=dashed]; +"411 DequantizeLinear_bert/encoder/Reshape_1^0_1" -> "414 bert/encoder/layer_0/attention/self/value/MatMul" [label="[]", style=solid]; +"412 QuantizeLinear_bert/encoder/layer_0/attention/self/value/kernel^0_1" -> "413 DequantizeLinear_bert/encoder/layer_0/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"413 DequantizeLinear_bert/encoder/layer_0/attention/self/value/kernel^0_1" -> "414 bert/encoder/layer_0/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"414 bert/encoder/layer_0/attention/self/value/MatMul" -> "415 bert/encoder/layer_0/attention/self/value/BiasAdd" [label="[]", style=solid]; +"415 bert/encoder/layer_0/attention/self/value/BiasAdd" -> "416 bert/encoder/layer_0/attention/self/Reshape_2" [label="[]", style=solid]; +"416 bert/encoder/layer_0/attention/self/Reshape_2" -> "417 bert/encoder/layer_0/attention/self/transpose_2" [label="[]", style=solid]; +"417 bert/encoder/layer_0/attention/self/transpose_2" -> "439 bert/encoder/layer_0/attention/self/MatMul_1" [label="[]", style=solid]; +"418 QuantizeLinear_bert/encoder/layer_0/attention/self/query/kernel^0_1" -> "419 DequantizeLinear_bert/encoder/layer_0/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"419 DequantizeLinear_bert/encoder/layer_0/attention/self/query/kernel^0_1" -> "420 bert/encoder/layer_0/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"420 bert/encoder/layer_0/attention/self/query/MatMul" -> "421 bert/encoder/layer_0/attention/self/query/BiasAdd" [label="[]", style=solid]; +"421 bert/encoder/layer_0/attention/self/query/BiasAdd" -> "422 QuantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"422 QuantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd^0_1" -> "423 DequantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"423 DequantizeLinear_bert/encoder/layer_0/attention/self/query/BiasAdd^0_1" -> "424 bert/encoder/layer_0/attention/self/Reshape" [label="[]", style=solid]; +"424 bert/encoder/layer_0/attention/self/Reshape" -> "425 bert/encoder/layer_0/attention/self/transpose" [label="[]", style=solid]; +"425 bert/encoder/layer_0/attention/self/transpose" -> "435 bert/encoder/layer_0/attention/self/MatMul" [label="[]", style=solid]; +"426 QuantizeLinear_bert/encoder/layer_0/attention/self/key/kernel^0_1" -> "427 DequantizeLinear_bert/encoder/layer_0/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"427 DequantizeLinear_bert/encoder/layer_0/attention/self/key/kernel^0_1" -> "428 bert/encoder/layer_0/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"428 bert/encoder/layer_0/attention/self/key/MatMul" -> "429 bert/encoder/layer_0/attention/self/key/BiasAdd" [label="[]", style=solid]; +"429 bert/encoder/layer_0/attention/self/key/BiasAdd" -> "430 QuantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"430 QuantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd^0_1" -> "431 DequantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"431 DequantizeLinear_bert/encoder/layer_0/attention/self/key/BiasAdd^0_1" -> "432 bert/encoder/layer_0/attention/self/Reshape_1" [label="[]", style=solid]; +"432 bert/encoder/layer_0/attention/self/Reshape_1" -> "433 bert/encoder/layer_0/attention/self/transpose_1" [label="[]", style=solid]; +"433 bert/encoder/layer_0/attention/self/transpose_1" -> "434 bert/encoder/layer_0/attention/self/MatMul__306" [label="[]", style=solid]; +"434 bert/encoder/layer_0/attention/self/MatMul__306" -> "435 bert/encoder/layer_0/attention/self/MatMul" [label="[]", style=solid]; +"435 bert/encoder/layer_0/attention/self/MatMul" -> "436 bert/encoder/layer_0/attention/self/Mul" [label="[]", style=solid]; +"436 bert/encoder/layer_0/attention/self/Mul" -> "437 bert/encoder/layer_0/attention/self/add" [label="[]", style=solid]; +"437 bert/encoder/layer_0/attention/self/add" -> "438 bert/encoder/layer_0/attention/self/Softmax" [label="[]", style=solid]; +"438 bert/encoder/layer_0/attention/self/Softmax" -> "439 bert/encoder/layer_0/attention/self/MatMul_1" [label="[]", style=solid]; +"439 bert/encoder/layer_0/attention/self/MatMul_1" -> "440 QuantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"440 QuantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1^0_1" -> "441 DequantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"441 DequantizeLinear_bert/encoder/layer_0/attention/self/MatMul_1^0_1" -> "442 bert/encoder/layer_0/attention/self/transpose_3" [label="[]", style=solid]; +"442 bert/encoder/layer_0/attention/self/transpose_3" -> "443 bert/encoder/layer_0/attention/self/Reshape_3" [label="[]", style=solid]; +"443 bert/encoder/layer_0/attention/self/Reshape_3" -> "446 bert/encoder/layer_0/attention/output/dense/MatMul" [label="[]", style=solid]; +"444 QuantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel^0_1" -> "445 DequantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"445 DequantizeLinear_bert/encoder/layer_0/attention/output/dense/kernel^0_1" -> "446 bert/encoder/layer_0/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"446 bert/encoder/layer_0/attention/output/dense/MatMul" -> "447 bert/encoder/layer_0/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"447 bert/encoder/layer_0/attention/output/dense/BiasAdd" -> "448 bert/encoder/layer_0/attention/output/add" [label="[]", style=solid]; +"448 bert/encoder/layer_0/attention/output/add" -> "449 bert/encoder/layer_0/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"448 bert/encoder/layer_0/attention/output/add" -> "451 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"448 bert/encoder/layer_0/attention/output/add" -> "460 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"449 bert/encoder/layer_0/attention/output/LayerNorm/moments/mean" -> "450 bert/encoder/layer_0/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"449 bert/encoder/layer_0/attention/output/LayerNorm/moments/mean" -> "458 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"450 bert/encoder/layer_0/attention/output/LayerNorm/moments/StopGradient" -> "451 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"451 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference" -> "452 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference__309" [label="[]", style=solid]; +"452 bert/encoder/layer_0/attention/output/LayerNorm/moments/SquaredDifference__309" -> "453 bert/encoder/layer_0/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"453 bert/encoder/layer_0/attention/output/LayerNorm/moments/variance" -> "454 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"454 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add" -> "455 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"455 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/Rsqrt" -> "456 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/Rsqrt__311" [label="[]", style=solid]; +"456 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/Rsqrt__311" -> "457 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"457 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul" -> "458 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"457 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul" -> "460 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"458 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_2" -> "459 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"459 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/sub" -> "461 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"460 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/mul_1" -> "461 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"461 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1" -> "462 QuantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"461 bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1" -> "482 bert/encoder/layer_0/output/add" [label="[]", style=solid]; +"462 QuantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "463 DequantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"463 DequantizeLinear_bert/encoder/layer_0/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "466 bert/encoder/layer_0/intermediate/dense/MatMul" [label="[]", style=solid]; +"464 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel^0_1" -> "465 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"465 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/kernel^0_1" -> "466 bert/encoder/layer_0/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"466 bert/encoder/layer_0/intermediate/dense/MatMul" -> "467 bert/encoder/layer_0/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"467 bert/encoder/layer_0/intermediate/dense/BiasAdd" -> "468 bert/encoder/layer_0/intermediate/dense/Pow" [label="[]", style=solid]; +"467 bert/encoder/layer_0/intermediate/dense/BiasAdd" -> "470 bert/encoder/layer_0/intermediate/dense/add" [label="[]", style=solid]; +"467 bert/encoder/layer_0/intermediate/dense/BiasAdd" -> "475 bert/encoder/layer_0/intermediate/dense/mul_3" [label="[]", style=solid]; +"468 bert/encoder/layer_0/intermediate/dense/Pow" -> "469 bert/encoder/layer_0/intermediate/dense/mul" [label="[]", style=solid]; +"469 bert/encoder/layer_0/intermediate/dense/mul" -> "470 bert/encoder/layer_0/intermediate/dense/add" [label="[]", style=solid]; +"470 bert/encoder/layer_0/intermediate/dense/add" -> "471 bert/encoder/layer_0/intermediate/dense/mul_1" [label="[]", style=solid]; +"471 bert/encoder/layer_0/intermediate/dense/mul_1" -> "472 bert/encoder/layer_0/intermediate/dense/Tanh" [label="[]", style=solid]; +"472 bert/encoder/layer_0/intermediate/dense/Tanh" -> "473 bert/encoder/layer_0/intermediate/dense/add_1" [label="[]", style=solid]; +"473 bert/encoder/layer_0/intermediate/dense/add_1" -> "474 bert/encoder/layer_0/intermediate/dense/mul_2" [label="[]", style=solid]; +"474 bert/encoder/layer_0/intermediate/dense/mul_2" -> "475 bert/encoder/layer_0/intermediate/dense/mul_3" [label="[]", style=solid]; +"475 bert/encoder/layer_0/intermediate/dense/mul_3" -> "476 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"476 QuantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3^0_1" -> "477 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"477 DequantizeLinear_bert/encoder/layer_0/intermediate/dense/mul_3^0_1" -> "480 bert/encoder/layer_0/output/dense/MatMul" [label="[]", style=solid]; +"478 QuantizeLinear_bert/encoder/layer_0/output/dense/kernel^0_1" -> "479 DequantizeLinear_bert/encoder/layer_0/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"479 DequantizeLinear_bert/encoder/layer_0/output/dense/kernel^0_1" -> "480 bert/encoder/layer_0/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"480 bert/encoder/layer_0/output/dense/MatMul" -> "481 bert/encoder/layer_0/output/dense/BiasAdd" [label="[]", style=solid]; +"481 bert/encoder/layer_0/output/dense/BiasAdd" -> "482 bert/encoder/layer_0/output/add" [label="[]", style=solid]; +"482 bert/encoder/layer_0/output/add" -> "483 bert/encoder/layer_0/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"482 bert/encoder/layer_0/output/add" -> "485 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"482 bert/encoder/layer_0/output/add" -> "494 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"483 bert/encoder/layer_0/output/LayerNorm/moments/mean" -> "484 bert/encoder/layer_0/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"483 bert/encoder/layer_0/output/LayerNorm/moments/mean" -> "492 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"484 bert/encoder/layer_0/output/LayerNorm/moments/StopGradient" -> "485 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"485 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference" -> "486 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference__313" [label="[]", style=solid]; +"486 bert/encoder/layer_0/output/LayerNorm/moments/SquaredDifference__313" -> "487 bert/encoder/layer_0/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"487 bert/encoder/layer_0/output/LayerNorm/moments/variance" -> "488 bert/encoder/layer_0/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"488 bert/encoder/layer_0/output/LayerNorm/batchnorm/add" -> "489 bert/encoder/layer_0/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"489 bert/encoder/layer_0/output/LayerNorm/batchnorm/Rsqrt" -> "490 bert/encoder/layer_0/output/LayerNorm/batchnorm/Rsqrt__315" [label="[]", style=solid]; +"490 bert/encoder/layer_0/output/LayerNorm/batchnorm/Rsqrt__315" -> "491 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"491 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul" -> "492 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"491 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul" -> "494 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"492 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_2" -> "493 bert/encoder/layer_0/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"493 bert/encoder/layer_0/output/LayerNorm/batchnorm/sub" -> "495 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"494 bert/encoder/layer_0/output/LayerNorm/batchnorm/mul_1" -> "495 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"495 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" -> "496 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"495 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" -> "498 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"495 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" -> "500 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"495 bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1" -> "538 bert/encoder/layer_1/attention/output/add" [label="[]", style=solid]; +"496 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_3" -> "497 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"497 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_3" -> "518 bert/encoder/layer_1/attention/self/key/MatMul" [label="[]", style=solid]; +"498 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_2" -> "499 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"499 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_2" -> "510 bert/encoder/layer_1/attention/self/query/MatMul" [label="[]", style=solid]; +"500 QuantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_1" -> "501 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"501 DequantizeLinear_bert/encoder/layer_0/output/LayerNorm/batchnorm/add_1^0_1" -> "504 bert/encoder/layer_1/attention/self/value/MatMul" [label="[]", style=solid]; +"502 QuantizeLinear_bert/encoder/layer_1/attention/self/value/kernel^0_1" -> "503 DequantizeLinear_bert/encoder/layer_1/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"503 DequantizeLinear_bert/encoder/layer_1/attention/self/value/kernel^0_1" -> "504 bert/encoder/layer_1/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"504 bert/encoder/layer_1/attention/self/value/MatMul" -> "505 bert/encoder/layer_1/attention/self/value/BiasAdd" [label="[]", style=solid]; +"505 bert/encoder/layer_1/attention/self/value/BiasAdd" -> "506 bert/encoder/layer_1/attention/self/Reshape_2" [label="[]", style=solid]; +"506 bert/encoder/layer_1/attention/self/Reshape_2" -> "507 bert/encoder/layer_1/attention/self/transpose_2" [label="[]", style=solid]; +"507 bert/encoder/layer_1/attention/self/transpose_2" -> "529 bert/encoder/layer_1/attention/self/MatMul_1" [label="[]", style=solid]; +"508 QuantizeLinear_bert/encoder/layer_1/attention/self/query/kernel^0_1" -> "509 DequantizeLinear_bert/encoder/layer_1/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"509 DequantizeLinear_bert/encoder/layer_1/attention/self/query/kernel^0_1" -> "510 bert/encoder/layer_1/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"510 bert/encoder/layer_1/attention/self/query/MatMul" -> "511 bert/encoder/layer_1/attention/self/query/BiasAdd" [label="[]", style=solid]; +"511 bert/encoder/layer_1/attention/self/query/BiasAdd" -> "512 QuantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"512 QuantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd^0_1" -> "513 DequantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"513 DequantizeLinear_bert/encoder/layer_1/attention/self/query/BiasAdd^0_1" -> "514 bert/encoder/layer_1/attention/self/Reshape" [label="[]", style=solid]; +"514 bert/encoder/layer_1/attention/self/Reshape" -> "515 bert/encoder/layer_1/attention/self/transpose" [label="[]", style=solid]; +"515 bert/encoder/layer_1/attention/self/transpose" -> "525 bert/encoder/layer_1/attention/self/MatMul" [label="[]", style=solid]; +"516 QuantizeLinear_bert/encoder/layer_1/attention/self/key/kernel^0_1" -> "517 DequantizeLinear_bert/encoder/layer_1/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"517 DequantizeLinear_bert/encoder/layer_1/attention/self/key/kernel^0_1" -> "518 bert/encoder/layer_1/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"518 bert/encoder/layer_1/attention/self/key/MatMul" -> "519 bert/encoder/layer_1/attention/self/key/BiasAdd" [label="[]", style=solid]; +"519 bert/encoder/layer_1/attention/self/key/BiasAdd" -> "520 QuantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"520 QuantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd^0_1" -> "521 DequantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"521 DequantizeLinear_bert/encoder/layer_1/attention/self/key/BiasAdd^0_1" -> "522 bert/encoder/layer_1/attention/self/Reshape_1" [label="[]", style=solid]; +"522 bert/encoder/layer_1/attention/self/Reshape_1" -> "523 bert/encoder/layer_1/attention/self/transpose_1" [label="[]", style=solid]; +"523 bert/encoder/layer_1/attention/self/transpose_1" -> "524 bert/encoder/layer_1/attention/self/MatMul__320" [label="[]", style=solid]; +"524 bert/encoder/layer_1/attention/self/MatMul__320" -> "525 bert/encoder/layer_1/attention/self/MatMul" [label="[]", style=solid]; +"525 bert/encoder/layer_1/attention/self/MatMul" -> "526 bert/encoder/layer_1/attention/self/Mul" [label="[]", style=solid]; +"526 bert/encoder/layer_1/attention/self/Mul" -> "527 bert/encoder/layer_1/attention/self/add" [label="[]", style=solid]; +"527 bert/encoder/layer_1/attention/self/add" -> "528 bert/encoder/layer_1/attention/self/Softmax" [label="[]", style=solid]; +"528 bert/encoder/layer_1/attention/self/Softmax" -> "529 bert/encoder/layer_1/attention/self/MatMul_1" [label="[]", style=solid]; +"529 bert/encoder/layer_1/attention/self/MatMul_1" -> "530 QuantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"530 QuantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1^0_1" -> "531 DequantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"531 DequantizeLinear_bert/encoder/layer_1/attention/self/MatMul_1^0_1" -> "532 bert/encoder/layer_1/attention/self/transpose_3" [label="[]", style=solid]; +"532 bert/encoder/layer_1/attention/self/transpose_3" -> "533 bert/encoder/layer_1/attention/self/Reshape_3" [label="[]", style=solid]; +"533 bert/encoder/layer_1/attention/self/Reshape_3" -> "536 bert/encoder/layer_1/attention/output/dense/MatMul" [label="[]", style=solid]; +"534 QuantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel^0_1" -> "535 DequantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"535 DequantizeLinear_bert/encoder/layer_1/attention/output/dense/kernel^0_1" -> "536 bert/encoder/layer_1/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"536 bert/encoder/layer_1/attention/output/dense/MatMul" -> "537 bert/encoder/layer_1/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"537 bert/encoder/layer_1/attention/output/dense/BiasAdd" -> "538 bert/encoder/layer_1/attention/output/add" [label="[]", style=solid]; +"538 bert/encoder/layer_1/attention/output/add" -> "539 bert/encoder/layer_1/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"538 bert/encoder/layer_1/attention/output/add" -> "541 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"538 bert/encoder/layer_1/attention/output/add" -> "550 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"539 bert/encoder/layer_1/attention/output/LayerNorm/moments/mean" -> "540 bert/encoder/layer_1/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"539 bert/encoder/layer_1/attention/output/LayerNorm/moments/mean" -> "548 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"540 bert/encoder/layer_1/attention/output/LayerNorm/moments/StopGradient" -> "541 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"541 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference" -> "542 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference__323" [label="[]", style=solid]; +"542 bert/encoder/layer_1/attention/output/LayerNorm/moments/SquaredDifference__323" -> "543 bert/encoder/layer_1/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"543 bert/encoder/layer_1/attention/output/LayerNorm/moments/variance" -> "544 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"544 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add" -> "545 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"545 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/Rsqrt" -> "546 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/Rsqrt__325" [label="[]", style=solid]; +"546 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/Rsqrt__325" -> "547 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"547 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul" -> "548 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"547 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul" -> "550 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"548 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_2" -> "549 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"549 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/sub" -> "551 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"550 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/mul_1" -> "551 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"551 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1" -> "552 QuantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"551 bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1" -> "572 bert/encoder/layer_1/output/add" [label="[]", style=solid]; +"552 QuantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "553 DequantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"553 DequantizeLinear_bert/encoder/layer_1/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "556 bert/encoder/layer_1/intermediate/dense/MatMul" [label="[]", style=solid]; +"554 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel^0_1" -> "555 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"555 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/kernel^0_1" -> "556 bert/encoder/layer_1/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"556 bert/encoder/layer_1/intermediate/dense/MatMul" -> "557 bert/encoder/layer_1/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"557 bert/encoder/layer_1/intermediate/dense/BiasAdd" -> "558 bert/encoder/layer_1/intermediate/dense/Pow" [label="[]", style=solid]; +"557 bert/encoder/layer_1/intermediate/dense/BiasAdd" -> "560 bert/encoder/layer_1/intermediate/dense/add" [label="[]", style=solid]; +"557 bert/encoder/layer_1/intermediate/dense/BiasAdd" -> "565 bert/encoder/layer_1/intermediate/dense/mul_3" [label="[]", style=solid]; +"558 bert/encoder/layer_1/intermediate/dense/Pow" -> "559 bert/encoder/layer_1/intermediate/dense/mul" [label="[]", style=solid]; +"559 bert/encoder/layer_1/intermediate/dense/mul" -> "560 bert/encoder/layer_1/intermediate/dense/add" [label="[]", style=solid]; +"560 bert/encoder/layer_1/intermediate/dense/add" -> "561 bert/encoder/layer_1/intermediate/dense/mul_1" [label="[]", style=solid]; +"561 bert/encoder/layer_1/intermediate/dense/mul_1" -> "562 bert/encoder/layer_1/intermediate/dense/Tanh" [label="[]", style=solid]; +"562 bert/encoder/layer_1/intermediate/dense/Tanh" -> "563 bert/encoder/layer_1/intermediate/dense/add_1" [label="[]", style=solid]; +"563 bert/encoder/layer_1/intermediate/dense/add_1" -> "564 bert/encoder/layer_1/intermediate/dense/mul_2" [label="[]", style=solid]; +"564 bert/encoder/layer_1/intermediate/dense/mul_2" -> "565 bert/encoder/layer_1/intermediate/dense/mul_3" [label="[]", style=solid]; +"565 bert/encoder/layer_1/intermediate/dense/mul_3" -> "566 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"566 QuantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3^0_1" -> "567 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"567 DequantizeLinear_bert/encoder/layer_1/intermediate/dense/mul_3^0_1" -> "570 bert/encoder/layer_1/output/dense/MatMul" [label="[]", style=solid]; +"568 QuantizeLinear_bert/encoder/layer_1/output/dense/kernel^0_1" -> "569 DequantizeLinear_bert/encoder/layer_1/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"569 DequantizeLinear_bert/encoder/layer_1/output/dense/kernel^0_1" -> "570 bert/encoder/layer_1/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"570 bert/encoder/layer_1/output/dense/MatMul" -> "571 bert/encoder/layer_1/output/dense/BiasAdd" [label="[]", style=solid]; +"571 bert/encoder/layer_1/output/dense/BiasAdd" -> "572 bert/encoder/layer_1/output/add" [label="[]", style=solid]; +"572 bert/encoder/layer_1/output/add" -> "573 bert/encoder/layer_1/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"572 bert/encoder/layer_1/output/add" -> "575 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"572 bert/encoder/layer_1/output/add" -> "584 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"573 bert/encoder/layer_1/output/LayerNorm/moments/mean" -> "574 bert/encoder/layer_1/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"573 bert/encoder/layer_1/output/LayerNorm/moments/mean" -> "582 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"574 bert/encoder/layer_1/output/LayerNorm/moments/StopGradient" -> "575 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"575 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference" -> "576 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference__327" [label="[]", style=solid]; +"576 bert/encoder/layer_1/output/LayerNorm/moments/SquaredDifference__327" -> "577 bert/encoder/layer_1/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"577 bert/encoder/layer_1/output/LayerNorm/moments/variance" -> "578 bert/encoder/layer_1/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"578 bert/encoder/layer_1/output/LayerNorm/batchnorm/add" -> "579 bert/encoder/layer_1/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"579 bert/encoder/layer_1/output/LayerNorm/batchnorm/Rsqrt" -> "580 bert/encoder/layer_1/output/LayerNorm/batchnorm/Rsqrt__329" [label="[]", style=solid]; +"580 bert/encoder/layer_1/output/LayerNorm/batchnorm/Rsqrt__329" -> "581 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"581 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul" -> "582 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"581 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul" -> "584 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"582 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_2" -> "583 bert/encoder/layer_1/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"583 bert/encoder/layer_1/output/LayerNorm/batchnorm/sub" -> "585 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"584 bert/encoder/layer_1/output/LayerNorm/batchnorm/mul_1" -> "585 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"585 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" -> "586 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"585 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" -> "588 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"585 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" -> "590 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"585 bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1" -> "628 bert/encoder/layer_2/attention/output/add" [label="[]", style=solid]; +"586 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_3" -> "587 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"587 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_3" -> "608 bert/encoder/layer_2/attention/self/key/MatMul" [label="[]", style=solid]; +"588 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_2" -> "589 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"589 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_2" -> "600 bert/encoder/layer_2/attention/self/query/MatMul" [label="[]", style=solid]; +"590 QuantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_1" -> "591 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"591 DequantizeLinear_bert/encoder/layer_1/output/LayerNorm/batchnorm/add_1^0_1" -> "594 bert/encoder/layer_2/attention/self/value/MatMul" [label="[]", style=solid]; +"592 QuantizeLinear_bert/encoder/layer_2/attention/self/value/kernel^0_1" -> "593 DequantizeLinear_bert/encoder/layer_2/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"593 DequantizeLinear_bert/encoder/layer_2/attention/self/value/kernel^0_1" -> "594 bert/encoder/layer_2/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"594 bert/encoder/layer_2/attention/self/value/MatMul" -> "595 bert/encoder/layer_2/attention/self/value/BiasAdd" [label="[]", style=solid]; +"595 bert/encoder/layer_2/attention/self/value/BiasAdd" -> "596 bert/encoder/layer_2/attention/self/Reshape_2" [label="[]", style=solid]; +"596 bert/encoder/layer_2/attention/self/Reshape_2" -> "597 bert/encoder/layer_2/attention/self/transpose_2" [label="[]", style=solid]; +"597 bert/encoder/layer_2/attention/self/transpose_2" -> "619 bert/encoder/layer_2/attention/self/MatMul_1" [label="[]", style=solid]; +"598 QuantizeLinear_bert/encoder/layer_2/attention/self/query/kernel^0_1" -> "599 DequantizeLinear_bert/encoder/layer_2/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"599 DequantizeLinear_bert/encoder/layer_2/attention/self/query/kernel^0_1" -> "600 bert/encoder/layer_2/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"600 bert/encoder/layer_2/attention/self/query/MatMul" -> "601 bert/encoder/layer_2/attention/self/query/BiasAdd" [label="[]", style=solid]; +"601 bert/encoder/layer_2/attention/self/query/BiasAdd" -> "602 QuantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"602 QuantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd^0_1" -> "603 DequantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"603 DequantizeLinear_bert/encoder/layer_2/attention/self/query/BiasAdd^0_1" -> "604 bert/encoder/layer_2/attention/self/Reshape" [label="[]", style=solid]; +"604 bert/encoder/layer_2/attention/self/Reshape" -> "605 bert/encoder/layer_2/attention/self/transpose" [label="[]", style=solid]; +"605 bert/encoder/layer_2/attention/self/transpose" -> "615 bert/encoder/layer_2/attention/self/MatMul" [label="[]", style=solid]; +"606 QuantizeLinear_bert/encoder/layer_2/attention/self/key/kernel^0_1" -> "607 DequantizeLinear_bert/encoder/layer_2/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"607 DequantizeLinear_bert/encoder/layer_2/attention/self/key/kernel^0_1" -> "608 bert/encoder/layer_2/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"608 bert/encoder/layer_2/attention/self/key/MatMul" -> "609 bert/encoder/layer_2/attention/self/key/BiasAdd" [label="[]", style=solid]; +"609 bert/encoder/layer_2/attention/self/key/BiasAdd" -> "610 QuantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"610 QuantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd^0_1" -> "611 DequantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"611 DequantizeLinear_bert/encoder/layer_2/attention/self/key/BiasAdd^0_1" -> "612 bert/encoder/layer_2/attention/self/Reshape_1" [label="[]", style=solid]; +"612 bert/encoder/layer_2/attention/self/Reshape_1" -> "613 bert/encoder/layer_2/attention/self/transpose_1" [label="[]", style=solid]; +"613 bert/encoder/layer_2/attention/self/transpose_1" -> "614 bert/encoder/layer_2/attention/self/MatMul__334" [label="[]", style=solid]; +"614 bert/encoder/layer_2/attention/self/MatMul__334" -> "615 bert/encoder/layer_2/attention/self/MatMul" [label="[]", style=solid]; +"615 bert/encoder/layer_2/attention/self/MatMul" -> "616 bert/encoder/layer_2/attention/self/Mul" [label="[]", style=solid]; +"616 bert/encoder/layer_2/attention/self/Mul" -> "617 bert/encoder/layer_2/attention/self/add" [label="[]", style=solid]; +"617 bert/encoder/layer_2/attention/self/add" -> "618 bert/encoder/layer_2/attention/self/Softmax" [label="[]", style=solid]; +"618 bert/encoder/layer_2/attention/self/Softmax" -> "619 bert/encoder/layer_2/attention/self/MatMul_1" [label="[]", style=solid]; +"619 bert/encoder/layer_2/attention/self/MatMul_1" -> "620 QuantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"620 QuantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1^0_1" -> "621 DequantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"621 DequantizeLinear_bert/encoder/layer_2/attention/self/MatMul_1^0_1" -> "622 bert/encoder/layer_2/attention/self/transpose_3" [label="[]", style=solid]; +"622 bert/encoder/layer_2/attention/self/transpose_3" -> "623 bert/encoder/layer_2/attention/self/Reshape_3" [label="[]", style=solid]; +"623 bert/encoder/layer_2/attention/self/Reshape_3" -> "626 bert/encoder/layer_2/attention/output/dense/MatMul" [label="[]", style=solid]; +"624 QuantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel^0_1" -> "625 DequantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"625 DequantizeLinear_bert/encoder/layer_2/attention/output/dense/kernel^0_1" -> "626 bert/encoder/layer_2/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"626 bert/encoder/layer_2/attention/output/dense/MatMul" -> "627 bert/encoder/layer_2/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"627 bert/encoder/layer_2/attention/output/dense/BiasAdd" -> "628 bert/encoder/layer_2/attention/output/add" [label="[]", style=solid]; +"628 bert/encoder/layer_2/attention/output/add" -> "629 bert/encoder/layer_2/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"628 bert/encoder/layer_2/attention/output/add" -> "631 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"628 bert/encoder/layer_2/attention/output/add" -> "640 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"629 bert/encoder/layer_2/attention/output/LayerNorm/moments/mean" -> "630 bert/encoder/layer_2/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"629 bert/encoder/layer_2/attention/output/LayerNorm/moments/mean" -> "638 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"630 bert/encoder/layer_2/attention/output/LayerNorm/moments/StopGradient" -> "631 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"631 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference" -> "632 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference__337" [label="[]", style=solid]; +"632 bert/encoder/layer_2/attention/output/LayerNorm/moments/SquaredDifference__337" -> "633 bert/encoder/layer_2/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"633 bert/encoder/layer_2/attention/output/LayerNorm/moments/variance" -> "634 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"634 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add" -> "635 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"635 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/Rsqrt" -> "636 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/Rsqrt__339" [label="[]", style=solid]; +"636 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/Rsqrt__339" -> "637 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"637 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul" -> "638 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"637 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul" -> "640 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"638 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_2" -> "639 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"639 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/sub" -> "641 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"640 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/mul_1" -> "641 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"641 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1" -> "642 QuantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"641 bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1" -> "662 bert/encoder/layer_2/output/add" [label="[]", style=solid]; +"642 QuantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "643 DequantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"643 DequantizeLinear_bert/encoder/layer_2/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "646 bert/encoder/layer_2/intermediate/dense/MatMul" [label="[]", style=solid]; +"644 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel^0_1" -> "645 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"645 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/kernel^0_1" -> "646 bert/encoder/layer_2/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"646 bert/encoder/layer_2/intermediate/dense/MatMul" -> "647 bert/encoder/layer_2/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"647 bert/encoder/layer_2/intermediate/dense/BiasAdd" -> "648 bert/encoder/layer_2/intermediate/dense/Pow" [label="[]", style=solid]; +"647 bert/encoder/layer_2/intermediate/dense/BiasAdd" -> "650 bert/encoder/layer_2/intermediate/dense/add" [label="[]", style=solid]; +"647 bert/encoder/layer_2/intermediate/dense/BiasAdd" -> "655 bert/encoder/layer_2/intermediate/dense/mul_3" [label="[]", style=solid]; +"648 bert/encoder/layer_2/intermediate/dense/Pow" -> "649 bert/encoder/layer_2/intermediate/dense/mul" [label="[]", style=solid]; +"649 bert/encoder/layer_2/intermediate/dense/mul" -> "650 bert/encoder/layer_2/intermediate/dense/add" [label="[]", style=solid]; +"650 bert/encoder/layer_2/intermediate/dense/add" -> "651 bert/encoder/layer_2/intermediate/dense/mul_1" [label="[]", style=solid]; +"651 bert/encoder/layer_2/intermediate/dense/mul_1" -> "652 bert/encoder/layer_2/intermediate/dense/Tanh" [label="[]", style=solid]; +"652 bert/encoder/layer_2/intermediate/dense/Tanh" -> "653 bert/encoder/layer_2/intermediate/dense/add_1" [label="[]", style=solid]; +"653 bert/encoder/layer_2/intermediate/dense/add_1" -> "654 bert/encoder/layer_2/intermediate/dense/mul_2" [label="[]", style=solid]; +"654 bert/encoder/layer_2/intermediate/dense/mul_2" -> "655 bert/encoder/layer_2/intermediate/dense/mul_3" [label="[]", style=solid]; +"655 bert/encoder/layer_2/intermediate/dense/mul_3" -> "656 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"656 QuantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3^0_1" -> "657 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"657 DequantizeLinear_bert/encoder/layer_2/intermediate/dense/mul_3^0_1" -> "660 bert/encoder/layer_2/output/dense/MatMul" [label="[]", style=solid]; +"658 QuantizeLinear_bert/encoder/layer_2/output/dense/kernel^0_1" -> "659 DequantizeLinear_bert/encoder/layer_2/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"659 DequantizeLinear_bert/encoder/layer_2/output/dense/kernel^0_1" -> "660 bert/encoder/layer_2/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"660 bert/encoder/layer_2/output/dense/MatMul" -> "661 bert/encoder/layer_2/output/dense/BiasAdd" [label="[]", style=solid]; +"661 bert/encoder/layer_2/output/dense/BiasAdd" -> "662 bert/encoder/layer_2/output/add" [label="[]", style=solid]; +"662 bert/encoder/layer_2/output/add" -> "663 bert/encoder/layer_2/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"662 bert/encoder/layer_2/output/add" -> "665 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"662 bert/encoder/layer_2/output/add" -> "674 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"663 bert/encoder/layer_2/output/LayerNorm/moments/mean" -> "664 bert/encoder/layer_2/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"663 bert/encoder/layer_2/output/LayerNorm/moments/mean" -> "672 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"664 bert/encoder/layer_2/output/LayerNorm/moments/StopGradient" -> "665 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"665 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference" -> "666 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference__341" [label="[]", style=solid]; +"666 bert/encoder/layer_2/output/LayerNorm/moments/SquaredDifference__341" -> "667 bert/encoder/layer_2/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"667 bert/encoder/layer_2/output/LayerNorm/moments/variance" -> "668 bert/encoder/layer_2/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"668 bert/encoder/layer_2/output/LayerNorm/batchnorm/add" -> "669 bert/encoder/layer_2/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"669 bert/encoder/layer_2/output/LayerNorm/batchnorm/Rsqrt" -> "670 bert/encoder/layer_2/output/LayerNorm/batchnorm/Rsqrt__343" [label="[]", style=solid]; +"670 bert/encoder/layer_2/output/LayerNorm/batchnorm/Rsqrt__343" -> "671 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"671 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul" -> "672 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"671 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul" -> "674 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"672 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_2" -> "673 bert/encoder/layer_2/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"673 bert/encoder/layer_2/output/LayerNorm/batchnorm/sub" -> "675 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"674 bert/encoder/layer_2/output/LayerNorm/batchnorm/mul_1" -> "675 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"675 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" -> "676 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"675 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" -> "678 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"675 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" -> "680 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"675 bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1" -> "718 bert/encoder/layer_3/attention/output/add" [label="[]", style=solid]; +"676 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_3" -> "677 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"677 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_3" -> "698 bert/encoder/layer_3/attention/self/key/MatMul" [label="[]", style=solid]; +"678 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_2" -> "679 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"679 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_2" -> "690 bert/encoder/layer_3/attention/self/query/MatMul" [label="[]", style=solid]; +"680 QuantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_1" -> "681 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"681 DequantizeLinear_bert/encoder/layer_2/output/LayerNorm/batchnorm/add_1^0_1" -> "684 bert/encoder/layer_3/attention/self/value/MatMul" [label="[]", style=solid]; +"682 QuantizeLinear_bert/encoder/layer_3/attention/self/value/kernel^0_1" -> "683 DequantizeLinear_bert/encoder/layer_3/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"683 DequantizeLinear_bert/encoder/layer_3/attention/self/value/kernel^0_1" -> "684 bert/encoder/layer_3/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"684 bert/encoder/layer_3/attention/self/value/MatMul" -> "685 bert/encoder/layer_3/attention/self/value/BiasAdd" [label="[]", style=solid]; +"685 bert/encoder/layer_3/attention/self/value/BiasAdd" -> "686 bert/encoder/layer_3/attention/self/Reshape_2" [label="[]", style=solid]; +"686 bert/encoder/layer_3/attention/self/Reshape_2" -> "687 bert/encoder/layer_3/attention/self/transpose_2" [label="[]", style=solid]; +"687 bert/encoder/layer_3/attention/self/transpose_2" -> "709 bert/encoder/layer_3/attention/self/MatMul_1" [label="[]", style=solid]; +"688 QuantizeLinear_bert/encoder/layer_3/attention/self/query/kernel^0_1" -> "689 DequantizeLinear_bert/encoder/layer_3/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"689 DequantizeLinear_bert/encoder/layer_3/attention/self/query/kernel^0_1" -> "690 bert/encoder/layer_3/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"690 bert/encoder/layer_3/attention/self/query/MatMul" -> "691 bert/encoder/layer_3/attention/self/query/BiasAdd" [label="[]", style=solid]; +"691 bert/encoder/layer_3/attention/self/query/BiasAdd" -> "692 QuantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"692 QuantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd^0_1" -> "693 DequantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"693 DequantizeLinear_bert/encoder/layer_3/attention/self/query/BiasAdd^0_1" -> "694 bert/encoder/layer_3/attention/self/Reshape" [label="[]", style=solid]; +"694 bert/encoder/layer_3/attention/self/Reshape" -> "695 bert/encoder/layer_3/attention/self/transpose" [label="[]", style=solid]; +"695 bert/encoder/layer_3/attention/self/transpose" -> "705 bert/encoder/layer_3/attention/self/MatMul" [label="[]", style=solid]; +"696 QuantizeLinear_bert/encoder/layer_3/attention/self/key/kernel^0_1" -> "697 DequantizeLinear_bert/encoder/layer_3/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"697 DequantizeLinear_bert/encoder/layer_3/attention/self/key/kernel^0_1" -> "698 bert/encoder/layer_3/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"698 bert/encoder/layer_3/attention/self/key/MatMul" -> "699 bert/encoder/layer_3/attention/self/key/BiasAdd" [label="[]", style=solid]; +"699 bert/encoder/layer_3/attention/self/key/BiasAdd" -> "700 QuantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"700 QuantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd^0_1" -> "701 DequantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"701 DequantizeLinear_bert/encoder/layer_3/attention/self/key/BiasAdd^0_1" -> "702 bert/encoder/layer_3/attention/self/Reshape_1" [label="[]", style=solid]; +"702 bert/encoder/layer_3/attention/self/Reshape_1" -> "703 bert/encoder/layer_3/attention/self/transpose_1" [label="[]", style=solid]; +"703 bert/encoder/layer_3/attention/self/transpose_1" -> "704 bert/encoder/layer_3/attention/self/MatMul__348" [label="[]", style=solid]; +"704 bert/encoder/layer_3/attention/self/MatMul__348" -> "705 bert/encoder/layer_3/attention/self/MatMul" [label="[]", style=solid]; +"705 bert/encoder/layer_3/attention/self/MatMul" -> "706 bert/encoder/layer_3/attention/self/Mul" [label="[]", style=solid]; +"706 bert/encoder/layer_3/attention/self/Mul" -> "707 bert/encoder/layer_3/attention/self/add" [label="[]", style=solid]; +"707 bert/encoder/layer_3/attention/self/add" -> "708 bert/encoder/layer_3/attention/self/Softmax" [label="[]", style=solid]; +"708 bert/encoder/layer_3/attention/self/Softmax" -> "709 bert/encoder/layer_3/attention/self/MatMul_1" [label="[]", style=solid]; +"709 bert/encoder/layer_3/attention/self/MatMul_1" -> "710 QuantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"710 QuantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1^0_1" -> "711 DequantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"711 DequantizeLinear_bert/encoder/layer_3/attention/self/MatMul_1^0_1" -> "712 bert/encoder/layer_3/attention/self/transpose_3" [label="[]", style=solid]; +"712 bert/encoder/layer_3/attention/self/transpose_3" -> "713 bert/encoder/layer_3/attention/self/Reshape_3" [label="[]", style=solid]; +"713 bert/encoder/layer_3/attention/self/Reshape_3" -> "716 bert/encoder/layer_3/attention/output/dense/MatMul" [label="[]", style=solid]; +"714 QuantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel^0_1" -> "715 DequantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"715 DequantizeLinear_bert/encoder/layer_3/attention/output/dense/kernel^0_1" -> "716 bert/encoder/layer_3/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"716 bert/encoder/layer_3/attention/output/dense/MatMul" -> "717 bert/encoder/layer_3/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"717 bert/encoder/layer_3/attention/output/dense/BiasAdd" -> "718 bert/encoder/layer_3/attention/output/add" [label="[]", style=solid]; +"718 bert/encoder/layer_3/attention/output/add" -> "719 bert/encoder/layer_3/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"718 bert/encoder/layer_3/attention/output/add" -> "721 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"718 bert/encoder/layer_3/attention/output/add" -> "730 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"719 bert/encoder/layer_3/attention/output/LayerNorm/moments/mean" -> "720 bert/encoder/layer_3/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"719 bert/encoder/layer_3/attention/output/LayerNorm/moments/mean" -> "728 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"720 bert/encoder/layer_3/attention/output/LayerNorm/moments/StopGradient" -> "721 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"721 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference" -> "722 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference__351" [label="[]", style=solid]; +"722 bert/encoder/layer_3/attention/output/LayerNorm/moments/SquaredDifference__351" -> "723 bert/encoder/layer_3/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"723 bert/encoder/layer_3/attention/output/LayerNorm/moments/variance" -> "724 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"724 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add" -> "725 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"725 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/Rsqrt" -> "726 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/Rsqrt__353" [label="[]", style=solid]; +"726 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/Rsqrt__353" -> "727 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"727 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul" -> "728 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"727 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul" -> "730 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"728 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_2" -> "729 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"729 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/sub" -> "731 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"730 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/mul_1" -> "731 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"731 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1" -> "732 QuantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"731 bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1" -> "752 bert/encoder/layer_3/output/add" [label="[]", style=solid]; +"732 QuantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "733 DequantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"733 DequantizeLinear_bert/encoder/layer_3/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "736 bert/encoder/layer_3/intermediate/dense/MatMul" [label="[]", style=solid]; +"734 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel^0_1" -> "735 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"735 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/kernel^0_1" -> "736 bert/encoder/layer_3/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"736 bert/encoder/layer_3/intermediate/dense/MatMul" -> "737 bert/encoder/layer_3/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"737 bert/encoder/layer_3/intermediate/dense/BiasAdd" -> "738 bert/encoder/layer_3/intermediate/dense/Pow" [label="[]", style=solid]; +"737 bert/encoder/layer_3/intermediate/dense/BiasAdd" -> "740 bert/encoder/layer_3/intermediate/dense/add" [label="[]", style=solid]; +"737 bert/encoder/layer_3/intermediate/dense/BiasAdd" -> "745 bert/encoder/layer_3/intermediate/dense/mul_3" [label="[]", style=solid]; +"738 bert/encoder/layer_3/intermediate/dense/Pow" -> "739 bert/encoder/layer_3/intermediate/dense/mul" [label="[]", style=solid]; +"739 bert/encoder/layer_3/intermediate/dense/mul" -> "740 bert/encoder/layer_3/intermediate/dense/add" [label="[]", style=solid]; +"740 bert/encoder/layer_3/intermediate/dense/add" -> "741 bert/encoder/layer_3/intermediate/dense/mul_1" [label="[]", style=solid]; +"741 bert/encoder/layer_3/intermediate/dense/mul_1" -> "742 bert/encoder/layer_3/intermediate/dense/Tanh" [label="[]", style=solid]; +"742 bert/encoder/layer_3/intermediate/dense/Tanh" -> "743 bert/encoder/layer_3/intermediate/dense/add_1" [label="[]", style=solid]; +"743 bert/encoder/layer_3/intermediate/dense/add_1" -> "744 bert/encoder/layer_3/intermediate/dense/mul_2" [label="[]", style=solid]; +"744 bert/encoder/layer_3/intermediate/dense/mul_2" -> "745 bert/encoder/layer_3/intermediate/dense/mul_3" [label="[]", style=solid]; +"745 bert/encoder/layer_3/intermediate/dense/mul_3" -> "746 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"746 QuantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3^0_1" -> "747 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"747 DequantizeLinear_bert/encoder/layer_3/intermediate/dense/mul_3^0_1" -> "750 bert/encoder/layer_3/output/dense/MatMul" [label="[]", style=solid]; +"748 QuantizeLinear_bert/encoder/layer_3/output/dense/kernel^0_1" -> "749 DequantizeLinear_bert/encoder/layer_3/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"749 DequantizeLinear_bert/encoder/layer_3/output/dense/kernel^0_1" -> "750 bert/encoder/layer_3/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"750 bert/encoder/layer_3/output/dense/MatMul" -> "751 bert/encoder/layer_3/output/dense/BiasAdd" [label="[]", style=solid]; +"751 bert/encoder/layer_3/output/dense/BiasAdd" -> "752 bert/encoder/layer_3/output/add" [label="[]", style=solid]; +"752 bert/encoder/layer_3/output/add" -> "753 bert/encoder/layer_3/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"752 bert/encoder/layer_3/output/add" -> "755 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"752 bert/encoder/layer_3/output/add" -> "764 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"753 bert/encoder/layer_3/output/LayerNorm/moments/mean" -> "754 bert/encoder/layer_3/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"753 bert/encoder/layer_3/output/LayerNorm/moments/mean" -> "762 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"754 bert/encoder/layer_3/output/LayerNorm/moments/StopGradient" -> "755 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"755 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference" -> "756 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference__355" [label="[]", style=solid]; +"756 bert/encoder/layer_3/output/LayerNorm/moments/SquaredDifference__355" -> "757 bert/encoder/layer_3/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"757 bert/encoder/layer_3/output/LayerNorm/moments/variance" -> "758 bert/encoder/layer_3/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"758 bert/encoder/layer_3/output/LayerNorm/batchnorm/add" -> "759 bert/encoder/layer_3/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"759 bert/encoder/layer_3/output/LayerNorm/batchnorm/Rsqrt" -> "760 bert/encoder/layer_3/output/LayerNorm/batchnorm/Rsqrt__357" [label="[]", style=solid]; +"760 bert/encoder/layer_3/output/LayerNorm/batchnorm/Rsqrt__357" -> "761 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"761 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul" -> "762 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"761 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul" -> "764 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"762 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_2" -> "763 bert/encoder/layer_3/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"763 bert/encoder/layer_3/output/LayerNorm/batchnorm/sub" -> "765 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"764 bert/encoder/layer_3/output/LayerNorm/batchnorm/mul_1" -> "765 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"765 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" -> "766 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"765 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" -> "768 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"765 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" -> "770 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"765 bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1" -> "808 bert/encoder/layer_4/attention/output/add" [label="[]", style=solid]; +"766 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_3" -> "767 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"767 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_3" -> "788 bert/encoder/layer_4/attention/self/key/MatMul" [label="[]", style=solid]; +"768 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_2" -> "769 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"769 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_2" -> "780 bert/encoder/layer_4/attention/self/query/MatMul" [label="[]", style=solid]; +"770 QuantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_1" -> "771 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"771 DequantizeLinear_bert/encoder/layer_3/output/LayerNorm/batchnorm/add_1^0_1" -> "774 bert/encoder/layer_4/attention/self/value/MatMul" [label="[]", style=solid]; +"772 QuantizeLinear_bert/encoder/layer_4/attention/self/value/kernel^0_1" -> "773 DequantizeLinear_bert/encoder/layer_4/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"773 DequantizeLinear_bert/encoder/layer_4/attention/self/value/kernel^0_1" -> "774 bert/encoder/layer_4/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"774 bert/encoder/layer_4/attention/self/value/MatMul" -> "775 bert/encoder/layer_4/attention/self/value/BiasAdd" [label="[]", style=solid]; +"775 bert/encoder/layer_4/attention/self/value/BiasAdd" -> "776 bert/encoder/layer_4/attention/self/Reshape_2" [label="[]", style=solid]; +"776 bert/encoder/layer_4/attention/self/Reshape_2" -> "777 bert/encoder/layer_4/attention/self/transpose_2" [label="[]", style=solid]; +"777 bert/encoder/layer_4/attention/self/transpose_2" -> "799 bert/encoder/layer_4/attention/self/MatMul_1" [label="[]", style=solid]; +"778 QuantizeLinear_bert/encoder/layer_4/attention/self/query/kernel^0_1" -> "779 DequantizeLinear_bert/encoder/layer_4/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"779 DequantizeLinear_bert/encoder/layer_4/attention/self/query/kernel^0_1" -> "780 bert/encoder/layer_4/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"780 bert/encoder/layer_4/attention/self/query/MatMul" -> "781 bert/encoder/layer_4/attention/self/query/BiasAdd" [label="[]", style=solid]; +"781 bert/encoder/layer_4/attention/self/query/BiasAdd" -> "782 QuantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"782 QuantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd^0_1" -> "783 DequantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"783 DequantizeLinear_bert/encoder/layer_4/attention/self/query/BiasAdd^0_1" -> "784 bert/encoder/layer_4/attention/self/Reshape" [label="[]", style=solid]; +"784 bert/encoder/layer_4/attention/self/Reshape" -> "785 bert/encoder/layer_4/attention/self/transpose" [label="[]", style=solid]; +"785 bert/encoder/layer_4/attention/self/transpose" -> "795 bert/encoder/layer_4/attention/self/MatMul" [label="[]", style=solid]; +"786 QuantizeLinear_bert/encoder/layer_4/attention/self/key/kernel^0_1" -> "787 DequantizeLinear_bert/encoder/layer_4/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"787 DequantizeLinear_bert/encoder/layer_4/attention/self/key/kernel^0_1" -> "788 bert/encoder/layer_4/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"788 bert/encoder/layer_4/attention/self/key/MatMul" -> "789 bert/encoder/layer_4/attention/self/key/BiasAdd" [label="[]", style=solid]; +"789 bert/encoder/layer_4/attention/self/key/BiasAdd" -> "790 QuantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"790 QuantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd^0_1" -> "791 DequantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"791 DequantizeLinear_bert/encoder/layer_4/attention/self/key/BiasAdd^0_1" -> "792 bert/encoder/layer_4/attention/self/Reshape_1" [label="[]", style=solid]; +"792 bert/encoder/layer_4/attention/self/Reshape_1" -> "793 bert/encoder/layer_4/attention/self/transpose_1" [label="[]", style=solid]; +"793 bert/encoder/layer_4/attention/self/transpose_1" -> "794 bert/encoder/layer_4/attention/self/MatMul__362" [label="[]", style=solid]; +"794 bert/encoder/layer_4/attention/self/MatMul__362" -> "795 bert/encoder/layer_4/attention/self/MatMul" [label="[]", style=solid]; +"795 bert/encoder/layer_4/attention/self/MatMul" -> "796 bert/encoder/layer_4/attention/self/Mul" [label="[]", style=solid]; +"796 bert/encoder/layer_4/attention/self/Mul" -> "797 bert/encoder/layer_4/attention/self/add" [label="[]", style=solid]; +"797 bert/encoder/layer_4/attention/self/add" -> "798 bert/encoder/layer_4/attention/self/Softmax" [label="[]", style=solid]; +"798 bert/encoder/layer_4/attention/self/Softmax" -> "799 bert/encoder/layer_4/attention/self/MatMul_1" [label="[]", style=solid]; +"799 bert/encoder/layer_4/attention/self/MatMul_1" -> "800 QuantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"800 QuantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1^0_1" -> "801 DequantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"801 DequantizeLinear_bert/encoder/layer_4/attention/self/MatMul_1^0_1" -> "802 bert/encoder/layer_4/attention/self/transpose_3" [label="[]", style=solid]; +"802 bert/encoder/layer_4/attention/self/transpose_3" -> "803 bert/encoder/layer_4/attention/self/Reshape_3" [label="[]", style=solid]; +"803 bert/encoder/layer_4/attention/self/Reshape_3" -> "806 bert/encoder/layer_4/attention/output/dense/MatMul" [label="[]", style=solid]; +"804 QuantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel^0_1" -> "805 DequantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"805 DequantizeLinear_bert/encoder/layer_4/attention/output/dense/kernel^0_1" -> "806 bert/encoder/layer_4/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"806 bert/encoder/layer_4/attention/output/dense/MatMul" -> "807 bert/encoder/layer_4/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"807 bert/encoder/layer_4/attention/output/dense/BiasAdd" -> "808 bert/encoder/layer_4/attention/output/add" [label="[]", style=solid]; +"808 bert/encoder/layer_4/attention/output/add" -> "809 bert/encoder/layer_4/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"808 bert/encoder/layer_4/attention/output/add" -> "811 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"808 bert/encoder/layer_4/attention/output/add" -> "820 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"809 bert/encoder/layer_4/attention/output/LayerNorm/moments/mean" -> "810 bert/encoder/layer_4/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"809 bert/encoder/layer_4/attention/output/LayerNorm/moments/mean" -> "818 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"810 bert/encoder/layer_4/attention/output/LayerNorm/moments/StopGradient" -> "811 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"811 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference" -> "812 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference__365" [label="[]", style=solid]; +"812 bert/encoder/layer_4/attention/output/LayerNorm/moments/SquaredDifference__365" -> "813 bert/encoder/layer_4/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"813 bert/encoder/layer_4/attention/output/LayerNorm/moments/variance" -> "814 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"814 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add" -> "815 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"815 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/Rsqrt" -> "816 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/Rsqrt__367" [label="[]", style=solid]; +"816 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/Rsqrt__367" -> "817 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"817 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul" -> "818 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"817 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul" -> "820 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"818 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_2" -> "819 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"819 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/sub" -> "821 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"820 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/mul_1" -> "821 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"821 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1" -> "822 QuantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"821 bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1" -> "842 bert/encoder/layer_4/output/add" [label="[]", style=solid]; +"822 QuantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "823 DequantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"823 DequantizeLinear_bert/encoder/layer_4/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "826 bert/encoder/layer_4/intermediate/dense/MatMul" [label="[]", style=solid]; +"824 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel^0_1" -> "825 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"825 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/kernel^0_1" -> "826 bert/encoder/layer_4/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"826 bert/encoder/layer_4/intermediate/dense/MatMul" -> "827 bert/encoder/layer_4/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"827 bert/encoder/layer_4/intermediate/dense/BiasAdd" -> "828 bert/encoder/layer_4/intermediate/dense/Pow" [label="[]", style=solid]; +"827 bert/encoder/layer_4/intermediate/dense/BiasAdd" -> "830 bert/encoder/layer_4/intermediate/dense/add" [label="[]", style=solid]; +"827 bert/encoder/layer_4/intermediate/dense/BiasAdd" -> "835 bert/encoder/layer_4/intermediate/dense/mul_3" [label="[]", style=solid]; +"828 bert/encoder/layer_4/intermediate/dense/Pow" -> "829 bert/encoder/layer_4/intermediate/dense/mul" [label="[]", style=solid]; +"829 bert/encoder/layer_4/intermediate/dense/mul" -> "830 bert/encoder/layer_4/intermediate/dense/add" [label="[]", style=solid]; +"830 bert/encoder/layer_4/intermediate/dense/add" -> "831 bert/encoder/layer_4/intermediate/dense/mul_1" [label="[]", style=solid]; +"831 bert/encoder/layer_4/intermediate/dense/mul_1" -> "832 bert/encoder/layer_4/intermediate/dense/Tanh" [label="[]", style=solid]; +"832 bert/encoder/layer_4/intermediate/dense/Tanh" -> "833 bert/encoder/layer_4/intermediate/dense/add_1" [label="[]", style=solid]; +"833 bert/encoder/layer_4/intermediate/dense/add_1" -> "834 bert/encoder/layer_4/intermediate/dense/mul_2" [label="[]", style=solid]; +"834 bert/encoder/layer_4/intermediate/dense/mul_2" -> "835 bert/encoder/layer_4/intermediate/dense/mul_3" [label="[]", style=solid]; +"835 bert/encoder/layer_4/intermediate/dense/mul_3" -> "836 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"836 QuantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3^0_1" -> "837 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"837 DequantizeLinear_bert/encoder/layer_4/intermediate/dense/mul_3^0_1" -> "840 bert/encoder/layer_4/output/dense/MatMul" [label="[]", style=solid]; +"838 QuantizeLinear_bert/encoder/layer_4/output/dense/kernel^0_1" -> "839 DequantizeLinear_bert/encoder/layer_4/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"839 DequantizeLinear_bert/encoder/layer_4/output/dense/kernel^0_1" -> "840 bert/encoder/layer_4/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"840 bert/encoder/layer_4/output/dense/MatMul" -> "841 bert/encoder/layer_4/output/dense/BiasAdd" [label="[]", style=solid]; +"841 bert/encoder/layer_4/output/dense/BiasAdd" -> "842 bert/encoder/layer_4/output/add" [label="[]", style=solid]; +"842 bert/encoder/layer_4/output/add" -> "843 bert/encoder/layer_4/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"842 bert/encoder/layer_4/output/add" -> "845 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"842 bert/encoder/layer_4/output/add" -> "854 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"843 bert/encoder/layer_4/output/LayerNorm/moments/mean" -> "844 bert/encoder/layer_4/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"843 bert/encoder/layer_4/output/LayerNorm/moments/mean" -> "852 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"844 bert/encoder/layer_4/output/LayerNorm/moments/StopGradient" -> "845 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"845 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference" -> "846 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference__369" [label="[]", style=solid]; +"846 bert/encoder/layer_4/output/LayerNorm/moments/SquaredDifference__369" -> "847 bert/encoder/layer_4/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"847 bert/encoder/layer_4/output/LayerNorm/moments/variance" -> "848 bert/encoder/layer_4/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"848 bert/encoder/layer_4/output/LayerNorm/batchnorm/add" -> "849 bert/encoder/layer_4/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"849 bert/encoder/layer_4/output/LayerNorm/batchnorm/Rsqrt" -> "850 bert/encoder/layer_4/output/LayerNorm/batchnorm/Rsqrt__371" [label="[]", style=solid]; +"850 bert/encoder/layer_4/output/LayerNorm/batchnorm/Rsqrt__371" -> "851 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"851 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul" -> "852 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"851 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul" -> "854 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"852 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_2" -> "853 bert/encoder/layer_4/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"853 bert/encoder/layer_4/output/LayerNorm/batchnorm/sub" -> "855 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"854 bert/encoder/layer_4/output/LayerNorm/batchnorm/mul_1" -> "855 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"855 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" -> "856 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"855 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" -> "858 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"855 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" -> "860 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"855 bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1" -> "898 bert/encoder/layer_5/attention/output/add" [label="[]", style=solid]; +"856 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_3" -> "857 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"857 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_3" -> "878 bert/encoder/layer_5/attention/self/key/MatMul" [label="[]", style=solid]; +"858 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_2" -> "859 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"859 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_2" -> "870 bert/encoder/layer_5/attention/self/query/MatMul" [label="[]", style=solid]; +"860 QuantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_1" -> "861 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"861 DequantizeLinear_bert/encoder/layer_4/output/LayerNorm/batchnorm/add_1^0_1" -> "864 bert/encoder/layer_5/attention/self/value/MatMul" [label="[]", style=solid]; +"862 QuantizeLinear_bert/encoder/layer_5/attention/self/value/kernel^0_1" -> "863 DequantizeLinear_bert/encoder/layer_5/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"863 DequantizeLinear_bert/encoder/layer_5/attention/self/value/kernel^0_1" -> "864 bert/encoder/layer_5/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"864 bert/encoder/layer_5/attention/self/value/MatMul" -> "865 bert/encoder/layer_5/attention/self/value/BiasAdd" [label="[]", style=solid]; +"865 bert/encoder/layer_5/attention/self/value/BiasAdd" -> "866 bert/encoder/layer_5/attention/self/Reshape_2" [label="[]", style=solid]; +"866 bert/encoder/layer_5/attention/self/Reshape_2" -> "867 bert/encoder/layer_5/attention/self/transpose_2" [label="[]", style=solid]; +"867 bert/encoder/layer_5/attention/self/transpose_2" -> "889 bert/encoder/layer_5/attention/self/MatMul_1" [label="[]", style=solid]; +"868 QuantizeLinear_bert/encoder/layer_5/attention/self/query/kernel^0_1" -> "869 DequantizeLinear_bert/encoder/layer_5/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"869 DequantizeLinear_bert/encoder/layer_5/attention/self/query/kernel^0_1" -> "870 bert/encoder/layer_5/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"870 bert/encoder/layer_5/attention/self/query/MatMul" -> "871 bert/encoder/layer_5/attention/self/query/BiasAdd" [label="[]", style=solid]; +"871 bert/encoder/layer_5/attention/self/query/BiasAdd" -> "872 QuantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"872 QuantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd^0_1" -> "873 DequantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"873 DequantizeLinear_bert/encoder/layer_5/attention/self/query/BiasAdd^0_1" -> "874 bert/encoder/layer_5/attention/self/Reshape" [label="[]", style=solid]; +"874 bert/encoder/layer_5/attention/self/Reshape" -> "875 bert/encoder/layer_5/attention/self/transpose" [label="[]", style=solid]; +"875 bert/encoder/layer_5/attention/self/transpose" -> "885 bert/encoder/layer_5/attention/self/MatMul" [label="[]", style=solid]; +"876 QuantizeLinear_bert/encoder/layer_5/attention/self/key/kernel^0_1" -> "877 DequantizeLinear_bert/encoder/layer_5/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"877 DequantizeLinear_bert/encoder/layer_5/attention/self/key/kernel^0_1" -> "878 bert/encoder/layer_5/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"878 bert/encoder/layer_5/attention/self/key/MatMul" -> "879 bert/encoder/layer_5/attention/self/key/BiasAdd" [label="[]", style=solid]; +"879 bert/encoder/layer_5/attention/self/key/BiasAdd" -> "880 QuantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"880 QuantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd^0_1" -> "881 DequantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"881 DequantizeLinear_bert/encoder/layer_5/attention/self/key/BiasAdd^0_1" -> "882 bert/encoder/layer_5/attention/self/Reshape_1" [label="[]", style=solid]; +"882 bert/encoder/layer_5/attention/self/Reshape_1" -> "883 bert/encoder/layer_5/attention/self/transpose_1" [label="[]", style=solid]; +"883 bert/encoder/layer_5/attention/self/transpose_1" -> "884 bert/encoder/layer_5/attention/self/MatMul__376" [label="[]", style=solid]; +"884 bert/encoder/layer_5/attention/self/MatMul__376" -> "885 bert/encoder/layer_5/attention/self/MatMul" [label="[]", style=solid]; +"885 bert/encoder/layer_5/attention/self/MatMul" -> "886 bert/encoder/layer_5/attention/self/Mul" [label="[]", style=solid]; +"886 bert/encoder/layer_5/attention/self/Mul" -> "887 bert/encoder/layer_5/attention/self/add" [label="[]", style=solid]; +"887 bert/encoder/layer_5/attention/self/add" -> "888 bert/encoder/layer_5/attention/self/Softmax" [label="[]", style=solid]; +"888 bert/encoder/layer_5/attention/self/Softmax" -> "889 bert/encoder/layer_5/attention/self/MatMul_1" [label="[]", style=solid]; +"889 bert/encoder/layer_5/attention/self/MatMul_1" -> "890 QuantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"890 QuantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1^0_1" -> "891 DequantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"891 DequantizeLinear_bert/encoder/layer_5/attention/self/MatMul_1^0_1" -> "892 bert/encoder/layer_5/attention/self/transpose_3" [label="[]", style=solid]; +"892 bert/encoder/layer_5/attention/self/transpose_3" -> "893 bert/encoder/layer_5/attention/self/Reshape_3" [label="[]", style=solid]; +"893 bert/encoder/layer_5/attention/self/Reshape_3" -> "896 bert/encoder/layer_5/attention/output/dense/MatMul" [label="[]", style=solid]; +"894 QuantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel^0_1" -> "895 DequantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"895 DequantizeLinear_bert/encoder/layer_5/attention/output/dense/kernel^0_1" -> "896 bert/encoder/layer_5/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"896 bert/encoder/layer_5/attention/output/dense/MatMul" -> "897 bert/encoder/layer_5/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"897 bert/encoder/layer_5/attention/output/dense/BiasAdd" -> "898 bert/encoder/layer_5/attention/output/add" [label="[]", style=solid]; +"898 bert/encoder/layer_5/attention/output/add" -> "899 bert/encoder/layer_5/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"898 bert/encoder/layer_5/attention/output/add" -> "901 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"898 bert/encoder/layer_5/attention/output/add" -> "910 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"899 bert/encoder/layer_5/attention/output/LayerNorm/moments/mean" -> "900 bert/encoder/layer_5/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"899 bert/encoder/layer_5/attention/output/LayerNorm/moments/mean" -> "908 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"900 bert/encoder/layer_5/attention/output/LayerNorm/moments/StopGradient" -> "901 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"901 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference" -> "902 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference__379" [label="[]", style=solid]; +"902 bert/encoder/layer_5/attention/output/LayerNorm/moments/SquaredDifference__379" -> "903 bert/encoder/layer_5/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"903 bert/encoder/layer_5/attention/output/LayerNorm/moments/variance" -> "904 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"904 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add" -> "905 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"905 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/Rsqrt" -> "906 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/Rsqrt__381" [label="[]", style=solid]; +"906 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/Rsqrt__381" -> "907 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"907 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul" -> "908 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"907 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul" -> "910 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"908 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_2" -> "909 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"909 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/sub" -> "911 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"910 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/mul_1" -> "911 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"911 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1" -> "912 QuantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"911 bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1" -> "932 bert/encoder/layer_5/output/add" [label="[]", style=solid]; +"912 QuantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "913 DequantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"913 DequantizeLinear_bert/encoder/layer_5/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "916 bert/encoder/layer_5/intermediate/dense/MatMul" [label="[]", style=solid]; +"914 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel^0_1" -> "915 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"915 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/kernel^0_1" -> "916 bert/encoder/layer_5/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"916 bert/encoder/layer_5/intermediate/dense/MatMul" -> "917 bert/encoder/layer_5/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"917 bert/encoder/layer_5/intermediate/dense/BiasAdd" -> "918 bert/encoder/layer_5/intermediate/dense/Pow" [label="[]", style=solid]; +"917 bert/encoder/layer_5/intermediate/dense/BiasAdd" -> "920 bert/encoder/layer_5/intermediate/dense/add" [label="[]", style=solid]; +"917 bert/encoder/layer_5/intermediate/dense/BiasAdd" -> "925 bert/encoder/layer_5/intermediate/dense/mul_3" [label="[]", style=solid]; +"918 bert/encoder/layer_5/intermediate/dense/Pow" -> "919 bert/encoder/layer_5/intermediate/dense/mul" [label="[]", style=solid]; +"919 bert/encoder/layer_5/intermediate/dense/mul" -> "920 bert/encoder/layer_5/intermediate/dense/add" [label="[]", style=solid]; +"920 bert/encoder/layer_5/intermediate/dense/add" -> "921 bert/encoder/layer_5/intermediate/dense/mul_1" [label="[]", style=solid]; +"921 bert/encoder/layer_5/intermediate/dense/mul_1" -> "922 bert/encoder/layer_5/intermediate/dense/Tanh" [label="[]", style=solid]; +"922 bert/encoder/layer_5/intermediate/dense/Tanh" -> "923 bert/encoder/layer_5/intermediate/dense/add_1" [label="[]", style=solid]; +"923 bert/encoder/layer_5/intermediate/dense/add_1" -> "924 bert/encoder/layer_5/intermediate/dense/mul_2" [label="[]", style=solid]; +"924 bert/encoder/layer_5/intermediate/dense/mul_2" -> "925 bert/encoder/layer_5/intermediate/dense/mul_3" [label="[]", style=solid]; +"925 bert/encoder/layer_5/intermediate/dense/mul_3" -> "926 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"926 QuantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3^0_1" -> "927 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"927 DequantizeLinear_bert/encoder/layer_5/intermediate/dense/mul_3^0_1" -> "930 bert/encoder/layer_5/output/dense/MatMul" [label="[]", style=solid]; +"928 QuantizeLinear_bert/encoder/layer_5/output/dense/kernel^0_1" -> "929 DequantizeLinear_bert/encoder/layer_5/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"929 DequantizeLinear_bert/encoder/layer_5/output/dense/kernel^0_1" -> "930 bert/encoder/layer_5/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"930 bert/encoder/layer_5/output/dense/MatMul" -> "931 bert/encoder/layer_5/output/dense/BiasAdd" [label="[]", style=solid]; +"931 bert/encoder/layer_5/output/dense/BiasAdd" -> "932 bert/encoder/layer_5/output/add" [label="[]", style=solid]; +"932 bert/encoder/layer_5/output/add" -> "933 bert/encoder/layer_5/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"932 bert/encoder/layer_5/output/add" -> "935 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"932 bert/encoder/layer_5/output/add" -> "944 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"933 bert/encoder/layer_5/output/LayerNorm/moments/mean" -> "934 bert/encoder/layer_5/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"933 bert/encoder/layer_5/output/LayerNorm/moments/mean" -> "942 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"934 bert/encoder/layer_5/output/LayerNorm/moments/StopGradient" -> "935 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"935 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference" -> "936 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference__383" [label="[]", style=solid]; +"936 bert/encoder/layer_5/output/LayerNorm/moments/SquaredDifference__383" -> "937 bert/encoder/layer_5/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"937 bert/encoder/layer_5/output/LayerNorm/moments/variance" -> "938 bert/encoder/layer_5/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"938 bert/encoder/layer_5/output/LayerNorm/batchnorm/add" -> "939 bert/encoder/layer_5/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"939 bert/encoder/layer_5/output/LayerNorm/batchnorm/Rsqrt" -> "940 bert/encoder/layer_5/output/LayerNorm/batchnorm/Rsqrt__385" [label="[]", style=solid]; +"940 bert/encoder/layer_5/output/LayerNorm/batchnorm/Rsqrt__385" -> "941 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"941 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul" -> "942 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"941 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul" -> "944 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"942 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_2" -> "943 bert/encoder/layer_5/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"943 bert/encoder/layer_5/output/LayerNorm/batchnorm/sub" -> "945 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"944 bert/encoder/layer_5/output/LayerNorm/batchnorm/mul_1" -> "945 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"945 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" -> "946 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"945 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" -> "948 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"945 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" -> "950 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"945 bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1" -> "988 bert/encoder/layer_6/attention/output/add" [label="[]", style=solid]; +"946 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_3" -> "947 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"947 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_3" -> "968 bert/encoder/layer_6/attention/self/key/MatMul" [label="[]", style=solid]; +"948 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_2" -> "949 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"949 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_2" -> "960 bert/encoder/layer_6/attention/self/query/MatMul" [label="[]", style=solid]; +"950 QuantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_1" -> "951 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"951 DequantizeLinear_bert/encoder/layer_5/output/LayerNorm/batchnorm/add_1^0_1" -> "954 bert/encoder/layer_6/attention/self/value/MatMul" [label="[]", style=solid]; +"952 QuantizeLinear_bert/encoder/layer_6/attention/self/value/kernel^0_1" -> "953 DequantizeLinear_bert/encoder/layer_6/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"953 DequantizeLinear_bert/encoder/layer_6/attention/self/value/kernel^0_1" -> "954 bert/encoder/layer_6/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"954 bert/encoder/layer_6/attention/self/value/MatMul" -> "955 bert/encoder/layer_6/attention/self/value/BiasAdd" [label="[]", style=solid]; +"955 bert/encoder/layer_6/attention/self/value/BiasAdd" -> "956 bert/encoder/layer_6/attention/self/Reshape_2" [label="[]", style=solid]; +"956 bert/encoder/layer_6/attention/self/Reshape_2" -> "957 bert/encoder/layer_6/attention/self/transpose_2" [label="[]", style=solid]; +"957 bert/encoder/layer_6/attention/self/transpose_2" -> "979 bert/encoder/layer_6/attention/self/MatMul_1" [label="[]", style=solid]; +"958 QuantizeLinear_bert/encoder/layer_6/attention/self/query/kernel^0_1" -> "959 DequantizeLinear_bert/encoder/layer_6/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"959 DequantizeLinear_bert/encoder/layer_6/attention/self/query/kernel^0_1" -> "960 bert/encoder/layer_6/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"960 bert/encoder/layer_6/attention/self/query/MatMul" -> "961 bert/encoder/layer_6/attention/self/query/BiasAdd" [label="[]", style=solid]; +"961 bert/encoder/layer_6/attention/self/query/BiasAdd" -> "962 QuantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"962 QuantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd^0_1" -> "963 DequantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"963 DequantizeLinear_bert/encoder/layer_6/attention/self/query/BiasAdd^0_1" -> "964 bert/encoder/layer_6/attention/self/Reshape" [label="[]", style=solid]; +"964 bert/encoder/layer_6/attention/self/Reshape" -> "965 bert/encoder/layer_6/attention/self/transpose" [label="[]", style=solid]; +"965 bert/encoder/layer_6/attention/self/transpose" -> "975 bert/encoder/layer_6/attention/self/MatMul" [label="[]", style=solid]; +"966 QuantizeLinear_bert/encoder/layer_6/attention/self/key/kernel^0_1" -> "967 DequantizeLinear_bert/encoder/layer_6/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"967 DequantizeLinear_bert/encoder/layer_6/attention/self/key/kernel^0_1" -> "968 bert/encoder/layer_6/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"968 bert/encoder/layer_6/attention/self/key/MatMul" -> "969 bert/encoder/layer_6/attention/self/key/BiasAdd" [label="[]", style=solid]; +"969 bert/encoder/layer_6/attention/self/key/BiasAdd" -> "970 QuantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"970 QuantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd^0_1" -> "971 DequantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"971 DequantizeLinear_bert/encoder/layer_6/attention/self/key/BiasAdd^0_1" -> "972 bert/encoder/layer_6/attention/self/Reshape_1" [label="[]", style=solid]; +"972 bert/encoder/layer_6/attention/self/Reshape_1" -> "973 bert/encoder/layer_6/attention/self/transpose_1" [label="[]", style=solid]; +"973 bert/encoder/layer_6/attention/self/transpose_1" -> "974 bert/encoder/layer_6/attention/self/MatMul__390" [label="[]", style=solid]; +"974 bert/encoder/layer_6/attention/self/MatMul__390" -> "975 bert/encoder/layer_6/attention/self/MatMul" [label="[]", style=solid]; +"975 bert/encoder/layer_6/attention/self/MatMul" -> "976 bert/encoder/layer_6/attention/self/Mul" [label="[]", style=solid]; +"976 bert/encoder/layer_6/attention/self/Mul" -> "977 bert/encoder/layer_6/attention/self/add" [label="[]", style=solid]; +"977 bert/encoder/layer_6/attention/self/add" -> "978 bert/encoder/layer_6/attention/self/Softmax" [label="[]", style=solid]; +"978 bert/encoder/layer_6/attention/self/Softmax" -> "979 bert/encoder/layer_6/attention/self/MatMul_1" [label="[]", style=solid]; +"979 bert/encoder/layer_6/attention/self/MatMul_1" -> "980 QuantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"980 QuantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1^0_1" -> "981 DequantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"981 DequantizeLinear_bert/encoder/layer_6/attention/self/MatMul_1^0_1" -> "982 bert/encoder/layer_6/attention/self/transpose_3" [label="[]", style=solid]; +"982 bert/encoder/layer_6/attention/self/transpose_3" -> "983 bert/encoder/layer_6/attention/self/Reshape_3" [label="[]", style=solid]; +"983 bert/encoder/layer_6/attention/self/Reshape_3" -> "986 bert/encoder/layer_6/attention/output/dense/MatMul" [label="[]", style=solid]; +"984 QuantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel^0_1" -> "985 DequantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"985 DequantizeLinear_bert/encoder/layer_6/attention/output/dense/kernel^0_1" -> "986 bert/encoder/layer_6/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"986 bert/encoder/layer_6/attention/output/dense/MatMul" -> "987 bert/encoder/layer_6/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"987 bert/encoder/layer_6/attention/output/dense/BiasAdd" -> "988 bert/encoder/layer_6/attention/output/add" [label="[]", style=solid]; +"988 bert/encoder/layer_6/attention/output/add" -> "989 bert/encoder/layer_6/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"988 bert/encoder/layer_6/attention/output/add" -> "991 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"988 bert/encoder/layer_6/attention/output/add" -> "1000 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"989 bert/encoder/layer_6/attention/output/LayerNorm/moments/mean" -> "990 bert/encoder/layer_6/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"989 bert/encoder/layer_6/attention/output/LayerNorm/moments/mean" -> "998 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"990 bert/encoder/layer_6/attention/output/LayerNorm/moments/StopGradient" -> "991 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"991 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference" -> "992 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference__393" [label="[]", style=solid]; +"992 bert/encoder/layer_6/attention/output/LayerNorm/moments/SquaredDifference__393" -> "993 bert/encoder/layer_6/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"993 bert/encoder/layer_6/attention/output/LayerNorm/moments/variance" -> "994 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"994 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add" -> "995 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"995 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/Rsqrt" -> "996 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/Rsqrt__395" [label="[]", style=solid]; +"996 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/Rsqrt__395" -> "997 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"997 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul" -> "998 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"997 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul" -> "1000 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"998 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_2" -> "999 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"999 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/sub" -> "1001 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1000 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/mul_1" -> "1001 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1001 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1" -> "1002 QuantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1001 bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1" -> "1022 bert/encoder/layer_6/output/add" [label="[]", style=solid]; +"1002 QuantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1003 DequantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1003 DequantizeLinear_bert/encoder/layer_6/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1006 bert/encoder/layer_6/intermediate/dense/MatMul" [label="[]", style=solid]; +"1004 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel^0_1" -> "1005 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"1005 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/kernel^0_1" -> "1006 bert/encoder/layer_6/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"1006 bert/encoder/layer_6/intermediate/dense/MatMul" -> "1007 bert/encoder/layer_6/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"1007 bert/encoder/layer_6/intermediate/dense/BiasAdd" -> "1008 bert/encoder/layer_6/intermediate/dense/Pow" [label="[]", style=solid]; +"1007 bert/encoder/layer_6/intermediate/dense/BiasAdd" -> "1010 bert/encoder/layer_6/intermediate/dense/add" [label="[]", style=solid]; +"1007 bert/encoder/layer_6/intermediate/dense/BiasAdd" -> "1015 bert/encoder/layer_6/intermediate/dense/mul_3" [label="[]", style=solid]; +"1008 bert/encoder/layer_6/intermediate/dense/Pow" -> "1009 bert/encoder/layer_6/intermediate/dense/mul" [label="[]", style=solid]; +"1009 bert/encoder/layer_6/intermediate/dense/mul" -> "1010 bert/encoder/layer_6/intermediate/dense/add" [label="[]", style=solid]; +"1010 bert/encoder/layer_6/intermediate/dense/add" -> "1011 bert/encoder/layer_6/intermediate/dense/mul_1" [label="[]", style=solid]; +"1011 bert/encoder/layer_6/intermediate/dense/mul_1" -> "1012 bert/encoder/layer_6/intermediate/dense/Tanh" [label="[]", style=solid]; +"1012 bert/encoder/layer_6/intermediate/dense/Tanh" -> "1013 bert/encoder/layer_6/intermediate/dense/add_1" [label="[]", style=solid]; +"1013 bert/encoder/layer_6/intermediate/dense/add_1" -> "1014 bert/encoder/layer_6/intermediate/dense/mul_2" [label="[]", style=solid]; +"1014 bert/encoder/layer_6/intermediate/dense/mul_2" -> "1015 bert/encoder/layer_6/intermediate/dense/mul_3" [label="[]", style=solid]; +"1015 bert/encoder/layer_6/intermediate/dense/mul_3" -> "1016 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"1016 QuantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3^0_1" -> "1017 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"1017 DequantizeLinear_bert/encoder/layer_6/intermediate/dense/mul_3^0_1" -> "1020 bert/encoder/layer_6/output/dense/MatMul" [label="[]", style=solid]; +"1018 QuantizeLinear_bert/encoder/layer_6/output/dense/kernel^0_1" -> "1019 DequantizeLinear_bert/encoder/layer_6/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"1019 DequantizeLinear_bert/encoder/layer_6/output/dense/kernel^0_1" -> "1020 bert/encoder/layer_6/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"1020 bert/encoder/layer_6/output/dense/MatMul" -> "1021 bert/encoder/layer_6/output/dense/BiasAdd" [label="[]", style=solid]; +"1021 bert/encoder/layer_6/output/dense/BiasAdd" -> "1022 bert/encoder/layer_6/output/add" [label="[]", style=solid]; +"1022 bert/encoder/layer_6/output/add" -> "1023 bert/encoder/layer_6/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1022 bert/encoder/layer_6/output/add" -> "1025 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1022 bert/encoder/layer_6/output/add" -> "1034 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1023 bert/encoder/layer_6/output/LayerNorm/moments/mean" -> "1024 bert/encoder/layer_6/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1023 bert/encoder/layer_6/output/LayerNorm/moments/mean" -> "1032 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1024 bert/encoder/layer_6/output/LayerNorm/moments/StopGradient" -> "1025 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1025 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference" -> "1026 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference__397" [label="[]", style=solid]; +"1026 bert/encoder/layer_6/output/LayerNorm/moments/SquaredDifference__397" -> "1027 bert/encoder/layer_6/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1027 bert/encoder/layer_6/output/LayerNorm/moments/variance" -> "1028 bert/encoder/layer_6/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1028 bert/encoder/layer_6/output/LayerNorm/batchnorm/add" -> "1029 bert/encoder/layer_6/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1029 bert/encoder/layer_6/output/LayerNorm/batchnorm/Rsqrt" -> "1030 bert/encoder/layer_6/output/LayerNorm/batchnorm/Rsqrt__399" [label="[]", style=solid]; +"1030 bert/encoder/layer_6/output/LayerNorm/batchnorm/Rsqrt__399" -> "1031 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1031 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul" -> "1032 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1031 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul" -> "1034 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1032 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_2" -> "1033 bert/encoder/layer_6/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1033 bert/encoder/layer_6/output/LayerNorm/batchnorm/sub" -> "1035 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1034 bert/encoder/layer_6/output/LayerNorm/batchnorm/mul_1" -> "1035 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1035 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" -> "1036 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"1035 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" -> "1038 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"1035 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" -> "1040 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1035 bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1" -> "1078 bert/encoder/layer_7/attention/output/add" [label="[]", style=solid]; +"1036 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_3" -> "1037 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"1037 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_3" -> "1058 bert/encoder/layer_7/attention/self/key/MatMul" [label="[]", style=solid]; +"1038 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_2" -> "1039 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"1039 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_2" -> "1050 bert/encoder/layer_7/attention/self/query/MatMul" [label="[]", style=solid]; +"1040 QuantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_1" -> "1041 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1041 DequantizeLinear_bert/encoder/layer_6/output/LayerNorm/batchnorm/add_1^0_1" -> "1044 bert/encoder/layer_7/attention/self/value/MatMul" [label="[]", style=solid]; +"1042 QuantizeLinear_bert/encoder/layer_7/attention/self/value/kernel^0_1" -> "1043 DequantizeLinear_bert/encoder/layer_7/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"1043 DequantizeLinear_bert/encoder/layer_7/attention/self/value/kernel^0_1" -> "1044 bert/encoder/layer_7/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"1044 bert/encoder/layer_7/attention/self/value/MatMul" -> "1045 bert/encoder/layer_7/attention/self/value/BiasAdd" [label="[]", style=solid]; +"1045 bert/encoder/layer_7/attention/self/value/BiasAdd" -> "1046 bert/encoder/layer_7/attention/self/Reshape_2" [label="[]", style=solid]; +"1046 bert/encoder/layer_7/attention/self/Reshape_2" -> "1047 bert/encoder/layer_7/attention/self/transpose_2" [label="[]", style=solid]; +"1047 bert/encoder/layer_7/attention/self/transpose_2" -> "1069 bert/encoder/layer_7/attention/self/MatMul_1" [label="[]", style=solid]; +"1048 QuantizeLinear_bert/encoder/layer_7/attention/self/query/kernel^0_1" -> "1049 DequantizeLinear_bert/encoder/layer_7/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"1049 DequantizeLinear_bert/encoder/layer_7/attention/self/query/kernel^0_1" -> "1050 bert/encoder/layer_7/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"1050 bert/encoder/layer_7/attention/self/query/MatMul" -> "1051 bert/encoder/layer_7/attention/self/query/BiasAdd" [label="[]", style=solid]; +"1051 bert/encoder/layer_7/attention/self/query/BiasAdd" -> "1052 QuantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"1052 QuantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd^0_1" -> "1053 DequantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"1053 DequantizeLinear_bert/encoder/layer_7/attention/self/query/BiasAdd^0_1" -> "1054 bert/encoder/layer_7/attention/self/Reshape" [label="[]", style=solid]; +"1054 bert/encoder/layer_7/attention/self/Reshape" -> "1055 bert/encoder/layer_7/attention/self/transpose" [label="[]", style=solid]; +"1055 bert/encoder/layer_7/attention/self/transpose" -> "1065 bert/encoder/layer_7/attention/self/MatMul" [label="[]", style=solid]; +"1056 QuantizeLinear_bert/encoder/layer_7/attention/self/key/kernel^0_1" -> "1057 DequantizeLinear_bert/encoder/layer_7/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"1057 DequantizeLinear_bert/encoder/layer_7/attention/self/key/kernel^0_1" -> "1058 bert/encoder/layer_7/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"1058 bert/encoder/layer_7/attention/self/key/MatMul" -> "1059 bert/encoder/layer_7/attention/self/key/BiasAdd" [label="[]", style=solid]; +"1059 bert/encoder/layer_7/attention/self/key/BiasAdd" -> "1060 QuantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"1060 QuantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd^0_1" -> "1061 DequantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"1061 DequantizeLinear_bert/encoder/layer_7/attention/self/key/BiasAdd^0_1" -> "1062 bert/encoder/layer_7/attention/self/Reshape_1" [label="[]", style=solid]; +"1062 bert/encoder/layer_7/attention/self/Reshape_1" -> "1063 bert/encoder/layer_7/attention/self/transpose_1" [label="[]", style=solid]; +"1063 bert/encoder/layer_7/attention/self/transpose_1" -> "1064 bert/encoder/layer_7/attention/self/MatMul__404" [label="[]", style=solid]; +"1064 bert/encoder/layer_7/attention/self/MatMul__404" -> "1065 bert/encoder/layer_7/attention/self/MatMul" [label="[]", style=solid]; +"1065 bert/encoder/layer_7/attention/self/MatMul" -> "1066 bert/encoder/layer_7/attention/self/Mul" [label="[]", style=solid]; +"1066 bert/encoder/layer_7/attention/self/Mul" -> "1067 bert/encoder/layer_7/attention/self/add" [label="[]", style=solid]; +"1067 bert/encoder/layer_7/attention/self/add" -> "1068 bert/encoder/layer_7/attention/self/Softmax" [label="[]", style=solid]; +"1068 bert/encoder/layer_7/attention/self/Softmax" -> "1069 bert/encoder/layer_7/attention/self/MatMul_1" [label="[]", style=solid]; +"1069 bert/encoder/layer_7/attention/self/MatMul_1" -> "1070 QuantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"1070 QuantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1^0_1" -> "1071 DequantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"1071 DequantizeLinear_bert/encoder/layer_7/attention/self/MatMul_1^0_1" -> "1072 bert/encoder/layer_7/attention/self/transpose_3" [label="[]", style=solid]; +"1072 bert/encoder/layer_7/attention/self/transpose_3" -> "1073 bert/encoder/layer_7/attention/self/Reshape_3" [label="[]", style=solid]; +"1073 bert/encoder/layer_7/attention/self/Reshape_3" -> "1076 bert/encoder/layer_7/attention/output/dense/MatMul" [label="[]", style=solid]; +"1074 QuantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel^0_1" -> "1075 DequantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"1075 DequantizeLinear_bert/encoder/layer_7/attention/output/dense/kernel^0_1" -> "1076 bert/encoder/layer_7/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"1076 bert/encoder/layer_7/attention/output/dense/MatMul" -> "1077 bert/encoder/layer_7/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"1077 bert/encoder/layer_7/attention/output/dense/BiasAdd" -> "1078 bert/encoder/layer_7/attention/output/add" [label="[]", style=solid]; +"1078 bert/encoder/layer_7/attention/output/add" -> "1079 bert/encoder/layer_7/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1078 bert/encoder/layer_7/attention/output/add" -> "1081 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1078 bert/encoder/layer_7/attention/output/add" -> "1090 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1079 bert/encoder/layer_7/attention/output/LayerNorm/moments/mean" -> "1080 bert/encoder/layer_7/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1079 bert/encoder/layer_7/attention/output/LayerNorm/moments/mean" -> "1088 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1080 bert/encoder/layer_7/attention/output/LayerNorm/moments/StopGradient" -> "1081 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1081 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference" -> "1082 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference__407" [label="[]", style=solid]; +"1082 bert/encoder/layer_7/attention/output/LayerNorm/moments/SquaredDifference__407" -> "1083 bert/encoder/layer_7/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1083 bert/encoder/layer_7/attention/output/LayerNorm/moments/variance" -> "1084 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1084 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add" -> "1085 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1085 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/Rsqrt" -> "1086 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/Rsqrt__409" [label="[]", style=solid]; +"1086 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/Rsqrt__409" -> "1087 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1087 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul" -> "1088 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1087 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul" -> "1090 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1088 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_2" -> "1089 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1089 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/sub" -> "1091 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1090 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/mul_1" -> "1091 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1091 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1" -> "1092 QuantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1091 bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1" -> "1112 bert/encoder/layer_7/output/add" [label="[]", style=solid]; +"1092 QuantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1093 DequantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1093 DequantizeLinear_bert/encoder/layer_7/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1096 bert/encoder/layer_7/intermediate/dense/MatMul" [label="[]", style=solid]; +"1094 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel^0_1" -> "1095 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"1095 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/kernel^0_1" -> "1096 bert/encoder/layer_7/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"1096 bert/encoder/layer_7/intermediate/dense/MatMul" -> "1097 bert/encoder/layer_7/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"1097 bert/encoder/layer_7/intermediate/dense/BiasAdd" -> "1098 bert/encoder/layer_7/intermediate/dense/Pow" [label="[]", style=solid]; +"1097 bert/encoder/layer_7/intermediate/dense/BiasAdd" -> "1100 bert/encoder/layer_7/intermediate/dense/add" [label="[]", style=solid]; +"1097 bert/encoder/layer_7/intermediate/dense/BiasAdd" -> "1105 bert/encoder/layer_7/intermediate/dense/mul_3" [label="[]", style=solid]; +"1098 bert/encoder/layer_7/intermediate/dense/Pow" -> "1099 bert/encoder/layer_7/intermediate/dense/mul" [label="[]", style=solid]; +"1099 bert/encoder/layer_7/intermediate/dense/mul" -> "1100 bert/encoder/layer_7/intermediate/dense/add" [label="[]", style=solid]; +"1100 bert/encoder/layer_7/intermediate/dense/add" -> "1101 bert/encoder/layer_7/intermediate/dense/mul_1" [label="[]", style=solid]; +"1101 bert/encoder/layer_7/intermediate/dense/mul_1" -> "1102 bert/encoder/layer_7/intermediate/dense/Tanh" [label="[]", style=solid]; +"1102 bert/encoder/layer_7/intermediate/dense/Tanh" -> "1103 bert/encoder/layer_7/intermediate/dense/add_1" [label="[]", style=solid]; +"1103 bert/encoder/layer_7/intermediate/dense/add_1" -> "1104 bert/encoder/layer_7/intermediate/dense/mul_2" [label="[]", style=solid]; +"1104 bert/encoder/layer_7/intermediate/dense/mul_2" -> "1105 bert/encoder/layer_7/intermediate/dense/mul_3" [label="[]", style=solid]; +"1105 bert/encoder/layer_7/intermediate/dense/mul_3" -> "1106 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"1106 QuantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3^0_1" -> "1107 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"1107 DequantizeLinear_bert/encoder/layer_7/intermediate/dense/mul_3^0_1" -> "1110 bert/encoder/layer_7/output/dense/MatMul" [label="[]", style=solid]; +"1108 QuantizeLinear_bert/encoder/layer_7/output/dense/kernel^0_1" -> "1109 DequantizeLinear_bert/encoder/layer_7/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"1109 DequantizeLinear_bert/encoder/layer_7/output/dense/kernel^0_1" -> "1110 bert/encoder/layer_7/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"1110 bert/encoder/layer_7/output/dense/MatMul" -> "1111 bert/encoder/layer_7/output/dense/BiasAdd" [label="[]", style=solid]; +"1111 bert/encoder/layer_7/output/dense/BiasAdd" -> "1112 bert/encoder/layer_7/output/add" [label="[]", style=solid]; +"1112 bert/encoder/layer_7/output/add" -> "1113 bert/encoder/layer_7/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1112 bert/encoder/layer_7/output/add" -> "1115 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1112 bert/encoder/layer_7/output/add" -> "1124 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1113 bert/encoder/layer_7/output/LayerNorm/moments/mean" -> "1114 bert/encoder/layer_7/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1113 bert/encoder/layer_7/output/LayerNorm/moments/mean" -> "1122 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1114 bert/encoder/layer_7/output/LayerNorm/moments/StopGradient" -> "1115 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1115 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference" -> "1116 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference__411" [label="[]", style=solid]; +"1116 bert/encoder/layer_7/output/LayerNorm/moments/SquaredDifference__411" -> "1117 bert/encoder/layer_7/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1117 bert/encoder/layer_7/output/LayerNorm/moments/variance" -> "1118 bert/encoder/layer_7/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1118 bert/encoder/layer_7/output/LayerNorm/batchnorm/add" -> "1119 bert/encoder/layer_7/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1119 bert/encoder/layer_7/output/LayerNorm/batchnorm/Rsqrt" -> "1120 bert/encoder/layer_7/output/LayerNorm/batchnorm/Rsqrt__413" [label="[]", style=solid]; +"1120 bert/encoder/layer_7/output/LayerNorm/batchnorm/Rsqrt__413" -> "1121 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1121 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul" -> "1122 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1121 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul" -> "1124 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1122 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_2" -> "1123 bert/encoder/layer_7/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1123 bert/encoder/layer_7/output/LayerNorm/batchnorm/sub" -> "1125 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1124 bert/encoder/layer_7/output/LayerNorm/batchnorm/mul_1" -> "1125 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1125 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" -> "1126 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"1125 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" -> "1128 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"1125 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" -> "1130 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1125 bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1" -> "1168 bert/encoder/layer_8/attention/output/add" [label="[]", style=solid]; +"1126 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_3" -> "1127 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"1127 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_3" -> "1148 bert/encoder/layer_8/attention/self/key/MatMul" [label="[]", style=solid]; +"1128 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_2" -> "1129 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"1129 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_2" -> "1140 bert/encoder/layer_8/attention/self/query/MatMul" [label="[]", style=solid]; +"1130 QuantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_1" -> "1131 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1131 DequantizeLinear_bert/encoder/layer_7/output/LayerNorm/batchnorm/add_1^0_1" -> "1134 bert/encoder/layer_8/attention/self/value/MatMul" [label="[]", style=solid]; +"1132 QuantizeLinear_bert/encoder/layer_8/attention/self/value/kernel^0_1" -> "1133 DequantizeLinear_bert/encoder/layer_8/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"1133 DequantizeLinear_bert/encoder/layer_8/attention/self/value/kernel^0_1" -> "1134 bert/encoder/layer_8/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"1134 bert/encoder/layer_8/attention/self/value/MatMul" -> "1135 bert/encoder/layer_8/attention/self/value/BiasAdd" [label="[]", style=solid]; +"1135 bert/encoder/layer_8/attention/self/value/BiasAdd" -> "1136 bert/encoder/layer_8/attention/self/Reshape_2" [label="[]", style=solid]; +"1136 bert/encoder/layer_8/attention/self/Reshape_2" -> "1137 bert/encoder/layer_8/attention/self/transpose_2" [label="[]", style=solid]; +"1137 bert/encoder/layer_8/attention/self/transpose_2" -> "1159 bert/encoder/layer_8/attention/self/MatMul_1" [label="[]", style=solid]; +"1138 QuantizeLinear_bert/encoder/layer_8/attention/self/query/kernel^0_1" -> "1139 DequantizeLinear_bert/encoder/layer_8/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"1139 DequantizeLinear_bert/encoder/layer_8/attention/self/query/kernel^0_1" -> "1140 bert/encoder/layer_8/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"1140 bert/encoder/layer_8/attention/self/query/MatMul" -> "1141 bert/encoder/layer_8/attention/self/query/BiasAdd" [label="[]", style=solid]; +"1141 bert/encoder/layer_8/attention/self/query/BiasAdd" -> "1142 QuantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"1142 QuantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd^0_1" -> "1143 DequantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"1143 DequantizeLinear_bert/encoder/layer_8/attention/self/query/BiasAdd^0_1" -> "1144 bert/encoder/layer_8/attention/self/Reshape" [label="[]", style=solid]; +"1144 bert/encoder/layer_8/attention/self/Reshape" -> "1145 bert/encoder/layer_8/attention/self/transpose" [label="[]", style=solid]; +"1145 bert/encoder/layer_8/attention/self/transpose" -> "1155 bert/encoder/layer_8/attention/self/MatMul" [label="[]", style=solid]; +"1146 QuantizeLinear_bert/encoder/layer_8/attention/self/key/kernel^0_1" -> "1147 DequantizeLinear_bert/encoder/layer_8/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"1147 DequantizeLinear_bert/encoder/layer_8/attention/self/key/kernel^0_1" -> "1148 bert/encoder/layer_8/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"1148 bert/encoder/layer_8/attention/self/key/MatMul" -> "1149 bert/encoder/layer_8/attention/self/key/BiasAdd" [label="[]", style=solid]; +"1149 bert/encoder/layer_8/attention/self/key/BiasAdd" -> "1150 QuantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"1150 QuantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd^0_1" -> "1151 DequantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"1151 DequantizeLinear_bert/encoder/layer_8/attention/self/key/BiasAdd^0_1" -> "1152 bert/encoder/layer_8/attention/self/Reshape_1" [label="[]", style=solid]; +"1152 bert/encoder/layer_8/attention/self/Reshape_1" -> "1153 bert/encoder/layer_8/attention/self/transpose_1" [label="[]", style=solid]; +"1153 bert/encoder/layer_8/attention/self/transpose_1" -> "1154 bert/encoder/layer_8/attention/self/MatMul__418" [label="[]", style=solid]; +"1154 bert/encoder/layer_8/attention/self/MatMul__418" -> "1155 bert/encoder/layer_8/attention/self/MatMul" [label="[]", style=solid]; +"1155 bert/encoder/layer_8/attention/self/MatMul" -> "1156 bert/encoder/layer_8/attention/self/Mul" [label="[]", style=solid]; +"1156 bert/encoder/layer_8/attention/self/Mul" -> "1157 bert/encoder/layer_8/attention/self/add" [label="[]", style=solid]; +"1157 bert/encoder/layer_8/attention/self/add" -> "1158 bert/encoder/layer_8/attention/self/Softmax" [label="[]", style=solid]; +"1158 bert/encoder/layer_8/attention/self/Softmax" -> "1159 bert/encoder/layer_8/attention/self/MatMul_1" [label="[]", style=solid]; +"1159 bert/encoder/layer_8/attention/self/MatMul_1" -> "1160 QuantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"1160 QuantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1^0_1" -> "1161 DequantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"1161 DequantizeLinear_bert/encoder/layer_8/attention/self/MatMul_1^0_1" -> "1162 bert/encoder/layer_8/attention/self/transpose_3" [label="[]", style=solid]; +"1162 bert/encoder/layer_8/attention/self/transpose_3" -> "1163 bert/encoder/layer_8/attention/self/Reshape_3" [label="[]", style=solid]; +"1163 bert/encoder/layer_8/attention/self/Reshape_3" -> "1166 bert/encoder/layer_8/attention/output/dense/MatMul" [label="[]", style=solid]; +"1164 QuantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel^0_1" -> "1165 DequantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"1165 DequantizeLinear_bert/encoder/layer_8/attention/output/dense/kernel^0_1" -> "1166 bert/encoder/layer_8/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"1166 bert/encoder/layer_8/attention/output/dense/MatMul" -> "1167 bert/encoder/layer_8/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"1167 bert/encoder/layer_8/attention/output/dense/BiasAdd" -> "1168 bert/encoder/layer_8/attention/output/add" [label="[]", style=solid]; +"1168 bert/encoder/layer_8/attention/output/add" -> "1169 bert/encoder/layer_8/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1168 bert/encoder/layer_8/attention/output/add" -> "1171 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1168 bert/encoder/layer_8/attention/output/add" -> "1180 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1169 bert/encoder/layer_8/attention/output/LayerNorm/moments/mean" -> "1170 bert/encoder/layer_8/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1169 bert/encoder/layer_8/attention/output/LayerNorm/moments/mean" -> "1178 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1170 bert/encoder/layer_8/attention/output/LayerNorm/moments/StopGradient" -> "1171 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1171 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference" -> "1172 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference__421" [label="[]", style=solid]; +"1172 bert/encoder/layer_8/attention/output/LayerNorm/moments/SquaredDifference__421" -> "1173 bert/encoder/layer_8/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1173 bert/encoder/layer_8/attention/output/LayerNorm/moments/variance" -> "1174 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1174 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add" -> "1175 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1175 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/Rsqrt" -> "1176 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/Rsqrt__423" [label="[]", style=solid]; +"1176 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/Rsqrt__423" -> "1177 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1177 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul" -> "1178 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1177 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul" -> "1180 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1178 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_2" -> "1179 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1179 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/sub" -> "1181 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1180 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/mul_1" -> "1181 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1181 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1" -> "1182 QuantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1181 bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1" -> "1202 bert/encoder/layer_8/output/add" [label="[]", style=solid]; +"1182 QuantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1183 DequantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1183 DequantizeLinear_bert/encoder/layer_8/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1186 bert/encoder/layer_8/intermediate/dense/MatMul" [label="[]", style=solid]; +"1184 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel^0_1" -> "1185 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"1185 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/kernel^0_1" -> "1186 bert/encoder/layer_8/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"1186 bert/encoder/layer_8/intermediate/dense/MatMul" -> "1187 bert/encoder/layer_8/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"1187 bert/encoder/layer_8/intermediate/dense/BiasAdd" -> "1188 bert/encoder/layer_8/intermediate/dense/Pow" [label="[]", style=solid]; +"1187 bert/encoder/layer_8/intermediate/dense/BiasAdd" -> "1190 bert/encoder/layer_8/intermediate/dense/add" [label="[]", style=solid]; +"1187 bert/encoder/layer_8/intermediate/dense/BiasAdd" -> "1195 bert/encoder/layer_8/intermediate/dense/mul_3" [label="[]", style=solid]; +"1188 bert/encoder/layer_8/intermediate/dense/Pow" -> "1189 bert/encoder/layer_8/intermediate/dense/mul" [label="[]", style=solid]; +"1189 bert/encoder/layer_8/intermediate/dense/mul" -> "1190 bert/encoder/layer_8/intermediate/dense/add" [label="[]", style=solid]; +"1190 bert/encoder/layer_8/intermediate/dense/add" -> "1191 bert/encoder/layer_8/intermediate/dense/mul_1" [label="[]", style=solid]; +"1191 bert/encoder/layer_8/intermediate/dense/mul_1" -> "1192 bert/encoder/layer_8/intermediate/dense/Tanh" [label="[]", style=solid]; +"1192 bert/encoder/layer_8/intermediate/dense/Tanh" -> "1193 bert/encoder/layer_8/intermediate/dense/add_1" [label="[]", style=solid]; +"1193 bert/encoder/layer_8/intermediate/dense/add_1" -> "1194 bert/encoder/layer_8/intermediate/dense/mul_2" [label="[]", style=solid]; +"1194 bert/encoder/layer_8/intermediate/dense/mul_2" -> "1195 bert/encoder/layer_8/intermediate/dense/mul_3" [label="[]", style=solid]; +"1195 bert/encoder/layer_8/intermediate/dense/mul_3" -> "1196 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"1196 QuantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3^0_1" -> "1197 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"1197 DequantizeLinear_bert/encoder/layer_8/intermediate/dense/mul_3^0_1" -> "1200 bert/encoder/layer_8/output/dense/MatMul" [label="[]", style=solid]; +"1198 QuantizeLinear_bert/encoder/layer_8/output/dense/kernel^0_1" -> "1199 DequantizeLinear_bert/encoder/layer_8/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"1199 DequantizeLinear_bert/encoder/layer_8/output/dense/kernel^0_1" -> "1200 bert/encoder/layer_8/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"1200 bert/encoder/layer_8/output/dense/MatMul" -> "1201 bert/encoder/layer_8/output/dense/BiasAdd" [label="[]", style=solid]; +"1201 bert/encoder/layer_8/output/dense/BiasAdd" -> "1202 bert/encoder/layer_8/output/add" [label="[]", style=solid]; +"1202 bert/encoder/layer_8/output/add" -> "1203 bert/encoder/layer_8/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1202 bert/encoder/layer_8/output/add" -> "1205 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1202 bert/encoder/layer_8/output/add" -> "1214 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1203 bert/encoder/layer_8/output/LayerNorm/moments/mean" -> "1204 bert/encoder/layer_8/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1203 bert/encoder/layer_8/output/LayerNorm/moments/mean" -> "1212 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1204 bert/encoder/layer_8/output/LayerNorm/moments/StopGradient" -> "1205 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1205 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference" -> "1206 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference__425" [label="[]", style=solid]; +"1206 bert/encoder/layer_8/output/LayerNorm/moments/SquaredDifference__425" -> "1207 bert/encoder/layer_8/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1207 bert/encoder/layer_8/output/LayerNorm/moments/variance" -> "1208 bert/encoder/layer_8/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1208 bert/encoder/layer_8/output/LayerNorm/batchnorm/add" -> "1209 bert/encoder/layer_8/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1209 bert/encoder/layer_8/output/LayerNorm/batchnorm/Rsqrt" -> "1210 bert/encoder/layer_8/output/LayerNorm/batchnorm/Rsqrt__427" [label="[]", style=solid]; +"1210 bert/encoder/layer_8/output/LayerNorm/batchnorm/Rsqrt__427" -> "1211 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1211 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul" -> "1212 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1211 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul" -> "1214 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1212 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_2" -> "1213 bert/encoder/layer_8/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1213 bert/encoder/layer_8/output/LayerNorm/batchnorm/sub" -> "1215 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1214 bert/encoder/layer_8/output/LayerNorm/batchnorm/mul_1" -> "1215 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1215 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" -> "1216 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"1215 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" -> "1218 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"1215 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" -> "1220 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1215 bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1" -> "1258 bert/encoder/layer_9/attention/output/add" [label="[]", style=solid]; +"1216 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_3" -> "1217 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"1217 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_3" -> "1238 bert/encoder/layer_9/attention/self/key/MatMul" [label="[]", style=solid]; +"1218 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_2" -> "1219 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"1219 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_2" -> "1230 bert/encoder/layer_9/attention/self/query/MatMul" [label="[]", style=solid]; +"1220 QuantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_1" -> "1221 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1221 DequantizeLinear_bert/encoder/layer_8/output/LayerNorm/batchnorm/add_1^0_1" -> "1224 bert/encoder/layer_9/attention/self/value/MatMul" [label="[]", style=solid]; +"1222 QuantizeLinear_bert/encoder/layer_9/attention/self/value/kernel^0_1" -> "1223 DequantizeLinear_bert/encoder/layer_9/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"1223 DequantizeLinear_bert/encoder/layer_9/attention/self/value/kernel^0_1" -> "1224 bert/encoder/layer_9/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"1224 bert/encoder/layer_9/attention/self/value/MatMul" -> "1225 bert/encoder/layer_9/attention/self/value/BiasAdd" [label="[]", style=solid]; +"1225 bert/encoder/layer_9/attention/self/value/BiasAdd" -> "1226 bert/encoder/layer_9/attention/self/Reshape_2" [label="[]", style=solid]; +"1226 bert/encoder/layer_9/attention/self/Reshape_2" -> "1227 bert/encoder/layer_9/attention/self/transpose_2" [label="[]", style=solid]; +"1227 bert/encoder/layer_9/attention/self/transpose_2" -> "1249 bert/encoder/layer_9/attention/self/MatMul_1" [label="[]", style=solid]; +"1228 QuantizeLinear_bert/encoder/layer_9/attention/self/query/kernel^0_1" -> "1229 DequantizeLinear_bert/encoder/layer_9/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"1229 DequantizeLinear_bert/encoder/layer_9/attention/self/query/kernel^0_1" -> "1230 bert/encoder/layer_9/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"1230 bert/encoder/layer_9/attention/self/query/MatMul" -> "1231 bert/encoder/layer_9/attention/self/query/BiasAdd" [label="[]", style=solid]; +"1231 bert/encoder/layer_9/attention/self/query/BiasAdd" -> "1232 QuantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"1232 QuantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd^0_1" -> "1233 DequantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"1233 DequantizeLinear_bert/encoder/layer_9/attention/self/query/BiasAdd^0_1" -> "1234 bert/encoder/layer_9/attention/self/Reshape" [label="[]", style=solid]; +"1234 bert/encoder/layer_9/attention/self/Reshape" -> "1235 bert/encoder/layer_9/attention/self/transpose" [label="[]", style=solid]; +"1235 bert/encoder/layer_9/attention/self/transpose" -> "1245 bert/encoder/layer_9/attention/self/MatMul" [label="[]", style=solid]; +"1236 QuantizeLinear_bert/encoder/layer_9/attention/self/key/kernel^0_1" -> "1237 DequantizeLinear_bert/encoder/layer_9/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"1237 DequantizeLinear_bert/encoder/layer_9/attention/self/key/kernel^0_1" -> "1238 bert/encoder/layer_9/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"1238 bert/encoder/layer_9/attention/self/key/MatMul" -> "1239 bert/encoder/layer_9/attention/self/key/BiasAdd" [label="[]", style=solid]; +"1239 bert/encoder/layer_9/attention/self/key/BiasAdd" -> "1240 QuantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"1240 QuantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd^0_1" -> "1241 DequantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"1241 DequantizeLinear_bert/encoder/layer_9/attention/self/key/BiasAdd^0_1" -> "1242 bert/encoder/layer_9/attention/self/Reshape_1" [label="[]", style=solid]; +"1242 bert/encoder/layer_9/attention/self/Reshape_1" -> "1243 bert/encoder/layer_9/attention/self/transpose_1" [label="[]", style=solid]; +"1243 bert/encoder/layer_9/attention/self/transpose_1" -> "1244 bert/encoder/layer_9/attention/self/MatMul__432" [label="[]", style=solid]; +"1244 bert/encoder/layer_9/attention/self/MatMul__432" -> "1245 bert/encoder/layer_9/attention/self/MatMul" [label="[]", style=solid]; +"1245 bert/encoder/layer_9/attention/self/MatMul" -> "1246 bert/encoder/layer_9/attention/self/Mul" [label="[]", style=solid]; +"1246 bert/encoder/layer_9/attention/self/Mul" -> "1247 bert/encoder/layer_9/attention/self/add" [label="[]", style=solid]; +"1247 bert/encoder/layer_9/attention/self/add" -> "1248 bert/encoder/layer_9/attention/self/Softmax" [label="[]", style=solid]; +"1248 bert/encoder/layer_9/attention/self/Softmax" -> "1249 bert/encoder/layer_9/attention/self/MatMul_1" [label="[]", style=solid]; +"1249 bert/encoder/layer_9/attention/self/MatMul_1" -> "1250 QuantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"1250 QuantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1^0_1" -> "1251 DequantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"1251 DequantizeLinear_bert/encoder/layer_9/attention/self/MatMul_1^0_1" -> "1252 bert/encoder/layer_9/attention/self/transpose_3" [label="[]", style=solid]; +"1252 bert/encoder/layer_9/attention/self/transpose_3" -> "1253 bert/encoder/layer_9/attention/self/Reshape_3" [label="[]", style=solid]; +"1253 bert/encoder/layer_9/attention/self/Reshape_3" -> "1256 bert/encoder/layer_9/attention/output/dense/MatMul" [label="[]", style=solid]; +"1254 QuantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel^0_1" -> "1255 DequantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"1255 DequantizeLinear_bert/encoder/layer_9/attention/output/dense/kernel^0_1" -> "1256 bert/encoder/layer_9/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"1256 bert/encoder/layer_9/attention/output/dense/MatMul" -> "1257 bert/encoder/layer_9/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"1257 bert/encoder/layer_9/attention/output/dense/BiasAdd" -> "1258 bert/encoder/layer_9/attention/output/add" [label="[]", style=solid]; +"1258 bert/encoder/layer_9/attention/output/add" -> "1259 bert/encoder/layer_9/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1258 bert/encoder/layer_9/attention/output/add" -> "1261 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1258 bert/encoder/layer_9/attention/output/add" -> "1270 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1259 bert/encoder/layer_9/attention/output/LayerNorm/moments/mean" -> "1260 bert/encoder/layer_9/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1259 bert/encoder/layer_9/attention/output/LayerNorm/moments/mean" -> "1268 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1260 bert/encoder/layer_9/attention/output/LayerNorm/moments/StopGradient" -> "1261 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1261 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference" -> "1262 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference__435" [label="[]", style=solid]; +"1262 bert/encoder/layer_9/attention/output/LayerNorm/moments/SquaredDifference__435" -> "1263 bert/encoder/layer_9/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1263 bert/encoder/layer_9/attention/output/LayerNorm/moments/variance" -> "1264 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1264 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add" -> "1265 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1265 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/Rsqrt" -> "1266 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/Rsqrt__437" [label="[]", style=solid]; +"1266 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/Rsqrt__437" -> "1267 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1267 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul" -> "1268 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1267 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul" -> "1270 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1268 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_2" -> "1269 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1269 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/sub" -> "1271 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1270 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/mul_1" -> "1271 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1271 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1" -> "1272 QuantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1271 bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1" -> "1292 bert/encoder/layer_9/output/add" [label="[]", style=solid]; +"1272 QuantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1273 DequantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1273 DequantizeLinear_bert/encoder/layer_9/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1276 bert/encoder/layer_9/intermediate/dense/MatMul" [label="[]", style=solid]; +"1274 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel^0_1" -> "1275 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"1275 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/kernel^0_1" -> "1276 bert/encoder/layer_9/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"1276 bert/encoder/layer_9/intermediate/dense/MatMul" -> "1277 bert/encoder/layer_9/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"1277 bert/encoder/layer_9/intermediate/dense/BiasAdd" -> "1278 bert/encoder/layer_9/intermediate/dense/Pow" [label="[]", style=solid]; +"1277 bert/encoder/layer_9/intermediate/dense/BiasAdd" -> "1280 bert/encoder/layer_9/intermediate/dense/add" [label="[]", style=solid]; +"1277 bert/encoder/layer_9/intermediate/dense/BiasAdd" -> "1285 bert/encoder/layer_9/intermediate/dense/mul_3" [label="[]", style=solid]; +"1278 bert/encoder/layer_9/intermediate/dense/Pow" -> "1279 bert/encoder/layer_9/intermediate/dense/mul" [label="[]", style=solid]; +"1279 bert/encoder/layer_9/intermediate/dense/mul" -> "1280 bert/encoder/layer_9/intermediate/dense/add" [label="[]", style=solid]; +"1280 bert/encoder/layer_9/intermediate/dense/add" -> "1281 bert/encoder/layer_9/intermediate/dense/mul_1" [label="[]", style=solid]; +"1281 bert/encoder/layer_9/intermediate/dense/mul_1" -> "1282 bert/encoder/layer_9/intermediate/dense/Tanh" [label="[]", style=solid]; +"1282 bert/encoder/layer_9/intermediate/dense/Tanh" -> "1283 bert/encoder/layer_9/intermediate/dense/add_1" [label="[]", style=solid]; +"1283 bert/encoder/layer_9/intermediate/dense/add_1" -> "1284 bert/encoder/layer_9/intermediate/dense/mul_2" [label="[]", style=solid]; +"1284 bert/encoder/layer_9/intermediate/dense/mul_2" -> "1285 bert/encoder/layer_9/intermediate/dense/mul_3" [label="[]", style=solid]; +"1285 bert/encoder/layer_9/intermediate/dense/mul_3" -> "1286 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"1286 QuantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3^0_1" -> "1287 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"1287 DequantizeLinear_bert/encoder/layer_9/intermediate/dense/mul_3^0_1" -> "1290 bert/encoder/layer_9/output/dense/MatMul" [label="[]", style=solid]; +"1288 QuantizeLinear_bert/encoder/layer_9/output/dense/kernel^0_1" -> "1289 DequantizeLinear_bert/encoder/layer_9/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"1289 DequantizeLinear_bert/encoder/layer_9/output/dense/kernel^0_1" -> "1290 bert/encoder/layer_9/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"1290 bert/encoder/layer_9/output/dense/MatMul" -> "1291 bert/encoder/layer_9/output/dense/BiasAdd" [label="[]", style=solid]; +"1291 bert/encoder/layer_9/output/dense/BiasAdd" -> "1292 bert/encoder/layer_9/output/add" [label="[]", style=solid]; +"1292 bert/encoder/layer_9/output/add" -> "1293 bert/encoder/layer_9/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1292 bert/encoder/layer_9/output/add" -> "1295 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1292 bert/encoder/layer_9/output/add" -> "1304 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1293 bert/encoder/layer_9/output/LayerNorm/moments/mean" -> "1294 bert/encoder/layer_9/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1293 bert/encoder/layer_9/output/LayerNorm/moments/mean" -> "1302 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1294 bert/encoder/layer_9/output/LayerNorm/moments/StopGradient" -> "1295 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1295 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference" -> "1296 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference__439" [label="[]", style=solid]; +"1296 bert/encoder/layer_9/output/LayerNorm/moments/SquaredDifference__439" -> "1297 bert/encoder/layer_9/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1297 bert/encoder/layer_9/output/LayerNorm/moments/variance" -> "1298 bert/encoder/layer_9/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1298 bert/encoder/layer_9/output/LayerNorm/batchnorm/add" -> "1299 bert/encoder/layer_9/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1299 bert/encoder/layer_9/output/LayerNorm/batchnorm/Rsqrt" -> "1300 bert/encoder/layer_9/output/LayerNorm/batchnorm/Rsqrt__441" [label="[]", style=solid]; +"1300 bert/encoder/layer_9/output/LayerNorm/batchnorm/Rsqrt__441" -> "1301 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1301 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul" -> "1302 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1301 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul" -> "1304 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1302 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_2" -> "1303 bert/encoder/layer_9/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1303 bert/encoder/layer_9/output/LayerNorm/batchnorm/sub" -> "1305 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1304 bert/encoder/layer_9/output/LayerNorm/batchnorm/mul_1" -> "1305 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1305 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" -> "1306 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"1305 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" -> "1308 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"1305 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" -> "1310 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1305 bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1" -> "1348 bert/encoder/layer_10/attention/output/add" [label="[]", style=solid]; +"1306 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_3" -> "1307 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"1307 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_3" -> "1328 bert/encoder/layer_10/attention/self/key/MatMul" [label="[]", style=solid]; +"1308 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_2" -> "1309 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"1309 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_2" -> "1320 bert/encoder/layer_10/attention/self/query/MatMul" [label="[]", style=solid]; +"1310 QuantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_1" -> "1311 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1311 DequantizeLinear_bert/encoder/layer_9/output/LayerNorm/batchnorm/add_1^0_1" -> "1314 bert/encoder/layer_10/attention/self/value/MatMul" [label="[]", style=solid]; +"1312 QuantizeLinear_bert/encoder/layer_10/attention/self/value/kernel^0_1" -> "1313 DequantizeLinear_bert/encoder/layer_10/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"1313 DequantizeLinear_bert/encoder/layer_10/attention/self/value/kernel^0_1" -> "1314 bert/encoder/layer_10/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"1314 bert/encoder/layer_10/attention/self/value/MatMul" -> "1315 bert/encoder/layer_10/attention/self/value/BiasAdd" [label="[]", style=solid]; +"1315 bert/encoder/layer_10/attention/self/value/BiasAdd" -> "1316 bert/encoder/layer_10/attention/self/Reshape_2" [label="[]", style=solid]; +"1316 bert/encoder/layer_10/attention/self/Reshape_2" -> "1317 bert/encoder/layer_10/attention/self/transpose_2" [label="[]", style=solid]; +"1317 bert/encoder/layer_10/attention/self/transpose_2" -> "1339 bert/encoder/layer_10/attention/self/MatMul_1" [label="[]", style=solid]; +"1318 QuantizeLinear_bert/encoder/layer_10/attention/self/query/kernel^0_1" -> "1319 DequantizeLinear_bert/encoder/layer_10/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"1319 DequantizeLinear_bert/encoder/layer_10/attention/self/query/kernel^0_1" -> "1320 bert/encoder/layer_10/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"1320 bert/encoder/layer_10/attention/self/query/MatMul" -> "1321 bert/encoder/layer_10/attention/self/query/BiasAdd" [label="[]", style=solid]; +"1321 bert/encoder/layer_10/attention/self/query/BiasAdd" -> "1322 QuantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"1322 QuantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd^0_1" -> "1323 DequantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"1323 DequantizeLinear_bert/encoder/layer_10/attention/self/query/BiasAdd^0_1" -> "1324 bert/encoder/layer_10/attention/self/Reshape" [label="[]", style=solid]; +"1324 bert/encoder/layer_10/attention/self/Reshape" -> "1325 bert/encoder/layer_10/attention/self/transpose" [label="[]", style=solid]; +"1325 bert/encoder/layer_10/attention/self/transpose" -> "1335 bert/encoder/layer_10/attention/self/MatMul" [label="[]", style=solid]; +"1326 QuantizeLinear_bert/encoder/layer_10/attention/self/key/kernel^0_1" -> "1327 DequantizeLinear_bert/encoder/layer_10/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"1327 DequantizeLinear_bert/encoder/layer_10/attention/self/key/kernel^0_1" -> "1328 bert/encoder/layer_10/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"1328 bert/encoder/layer_10/attention/self/key/MatMul" -> "1329 bert/encoder/layer_10/attention/self/key/BiasAdd" [label="[]", style=solid]; +"1329 bert/encoder/layer_10/attention/self/key/BiasAdd" -> "1330 QuantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"1330 QuantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd^0_1" -> "1331 DequantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"1331 DequantizeLinear_bert/encoder/layer_10/attention/self/key/BiasAdd^0_1" -> "1332 bert/encoder/layer_10/attention/self/Reshape_1" [label="[]", style=solid]; +"1332 bert/encoder/layer_10/attention/self/Reshape_1" -> "1333 bert/encoder/layer_10/attention/self/transpose_1" [label="[]", style=solid]; +"1333 bert/encoder/layer_10/attention/self/transpose_1" -> "1334 bert/encoder/layer_10/attention/self/MatMul__446" [label="[]", style=solid]; +"1334 bert/encoder/layer_10/attention/self/MatMul__446" -> "1335 bert/encoder/layer_10/attention/self/MatMul" [label="[]", style=solid]; +"1335 bert/encoder/layer_10/attention/self/MatMul" -> "1336 bert/encoder/layer_10/attention/self/Mul" [label="[]", style=solid]; +"1336 bert/encoder/layer_10/attention/self/Mul" -> "1337 bert/encoder/layer_10/attention/self/add" [label="[]", style=solid]; +"1337 bert/encoder/layer_10/attention/self/add" -> "1338 bert/encoder/layer_10/attention/self/Softmax" [label="[]", style=solid]; +"1338 bert/encoder/layer_10/attention/self/Softmax" -> "1339 bert/encoder/layer_10/attention/self/MatMul_1" [label="[]", style=solid]; +"1339 bert/encoder/layer_10/attention/self/MatMul_1" -> "1340 QuantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"1340 QuantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1^0_1" -> "1341 DequantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"1341 DequantizeLinear_bert/encoder/layer_10/attention/self/MatMul_1^0_1" -> "1342 bert/encoder/layer_10/attention/self/transpose_3" [label="[]", style=solid]; +"1342 bert/encoder/layer_10/attention/self/transpose_3" -> "1343 bert/encoder/layer_10/attention/self/Reshape_3" [label="[]", style=solid]; +"1343 bert/encoder/layer_10/attention/self/Reshape_3" -> "1346 bert/encoder/layer_10/attention/output/dense/MatMul" [label="[]", style=solid]; +"1344 QuantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel^0_1" -> "1345 DequantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"1345 DequantizeLinear_bert/encoder/layer_10/attention/output/dense/kernel^0_1" -> "1346 bert/encoder/layer_10/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"1346 bert/encoder/layer_10/attention/output/dense/MatMul" -> "1347 bert/encoder/layer_10/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"1347 bert/encoder/layer_10/attention/output/dense/BiasAdd" -> "1348 bert/encoder/layer_10/attention/output/add" [label="[]", style=solid]; +"1348 bert/encoder/layer_10/attention/output/add" -> "1349 bert/encoder/layer_10/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1348 bert/encoder/layer_10/attention/output/add" -> "1351 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1348 bert/encoder/layer_10/attention/output/add" -> "1360 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1349 bert/encoder/layer_10/attention/output/LayerNorm/moments/mean" -> "1350 bert/encoder/layer_10/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1349 bert/encoder/layer_10/attention/output/LayerNorm/moments/mean" -> "1358 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1350 bert/encoder/layer_10/attention/output/LayerNorm/moments/StopGradient" -> "1351 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1351 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference" -> "1352 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference__449" [label="[]", style=solid]; +"1352 bert/encoder/layer_10/attention/output/LayerNorm/moments/SquaredDifference__449" -> "1353 bert/encoder/layer_10/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1353 bert/encoder/layer_10/attention/output/LayerNorm/moments/variance" -> "1354 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1354 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add" -> "1355 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1355 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/Rsqrt" -> "1356 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/Rsqrt__451" [label="[]", style=solid]; +"1356 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/Rsqrt__451" -> "1357 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1357 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul" -> "1358 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1357 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul" -> "1360 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1358 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_2" -> "1359 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1359 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/sub" -> "1361 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1360 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/mul_1" -> "1361 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1361 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1" -> "1362 QuantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1361 bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1" -> "1382 bert/encoder/layer_10/output/add" [label="[]", style=solid]; +"1362 QuantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1363 DequantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1363 DequantizeLinear_bert/encoder/layer_10/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1366 bert/encoder/layer_10/intermediate/dense/MatMul" [label="[]", style=solid]; +"1364 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel^0_1" -> "1365 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"1365 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/kernel^0_1" -> "1366 bert/encoder/layer_10/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"1366 bert/encoder/layer_10/intermediate/dense/MatMul" -> "1367 bert/encoder/layer_10/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"1367 bert/encoder/layer_10/intermediate/dense/BiasAdd" -> "1368 bert/encoder/layer_10/intermediate/dense/Pow" [label="[]", style=solid]; +"1367 bert/encoder/layer_10/intermediate/dense/BiasAdd" -> "1370 bert/encoder/layer_10/intermediate/dense/add" [label="[]", style=solid]; +"1367 bert/encoder/layer_10/intermediate/dense/BiasAdd" -> "1375 bert/encoder/layer_10/intermediate/dense/mul_3" [label="[]", style=solid]; +"1368 bert/encoder/layer_10/intermediate/dense/Pow" -> "1369 bert/encoder/layer_10/intermediate/dense/mul" [label="[]", style=solid]; +"1369 bert/encoder/layer_10/intermediate/dense/mul" -> "1370 bert/encoder/layer_10/intermediate/dense/add" [label="[]", style=solid]; +"1370 bert/encoder/layer_10/intermediate/dense/add" -> "1371 bert/encoder/layer_10/intermediate/dense/mul_1" [label="[]", style=solid]; +"1371 bert/encoder/layer_10/intermediate/dense/mul_1" -> "1372 bert/encoder/layer_10/intermediate/dense/Tanh" [label="[]", style=solid]; +"1372 bert/encoder/layer_10/intermediate/dense/Tanh" -> "1373 bert/encoder/layer_10/intermediate/dense/add_1" [label="[]", style=solid]; +"1373 bert/encoder/layer_10/intermediate/dense/add_1" -> "1374 bert/encoder/layer_10/intermediate/dense/mul_2" [label="[]", style=solid]; +"1374 bert/encoder/layer_10/intermediate/dense/mul_2" -> "1375 bert/encoder/layer_10/intermediate/dense/mul_3" [label="[]", style=solid]; +"1375 bert/encoder/layer_10/intermediate/dense/mul_3" -> "1376 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"1376 QuantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3^0_1" -> "1377 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"1377 DequantizeLinear_bert/encoder/layer_10/intermediate/dense/mul_3^0_1" -> "1380 bert/encoder/layer_10/output/dense/MatMul" [label="[]", style=solid]; +"1378 QuantizeLinear_bert/encoder/layer_10/output/dense/kernel^0_1" -> "1379 DequantizeLinear_bert/encoder/layer_10/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"1379 DequantizeLinear_bert/encoder/layer_10/output/dense/kernel^0_1" -> "1380 bert/encoder/layer_10/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"1380 bert/encoder/layer_10/output/dense/MatMul" -> "1381 bert/encoder/layer_10/output/dense/BiasAdd" [label="[]", style=solid]; +"1381 bert/encoder/layer_10/output/dense/BiasAdd" -> "1382 bert/encoder/layer_10/output/add" [label="[]", style=solid]; +"1382 bert/encoder/layer_10/output/add" -> "1383 bert/encoder/layer_10/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1382 bert/encoder/layer_10/output/add" -> "1385 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1382 bert/encoder/layer_10/output/add" -> "1394 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1383 bert/encoder/layer_10/output/LayerNorm/moments/mean" -> "1384 bert/encoder/layer_10/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1383 bert/encoder/layer_10/output/LayerNorm/moments/mean" -> "1392 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1384 bert/encoder/layer_10/output/LayerNorm/moments/StopGradient" -> "1385 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1385 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference" -> "1386 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference__453" [label="[]", style=solid]; +"1386 bert/encoder/layer_10/output/LayerNorm/moments/SquaredDifference__453" -> "1387 bert/encoder/layer_10/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1387 bert/encoder/layer_10/output/LayerNorm/moments/variance" -> "1388 bert/encoder/layer_10/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1388 bert/encoder/layer_10/output/LayerNorm/batchnorm/add" -> "1389 bert/encoder/layer_10/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1389 bert/encoder/layer_10/output/LayerNorm/batchnorm/Rsqrt" -> "1390 bert/encoder/layer_10/output/LayerNorm/batchnorm/Rsqrt__455" [label="[]", style=solid]; +"1390 bert/encoder/layer_10/output/LayerNorm/batchnorm/Rsqrt__455" -> "1391 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1391 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul" -> "1392 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1391 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul" -> "1394 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1392 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_2" -> "1393 bert/encoder/layer_10/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1393 bert/encoder/layer_10/output/LayerNorm/batchnorm/sub" -> "1395 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1394 bert/encoder/layer_10/output/LayerNorm/batchnorm/mul_1" -> "1395 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1395 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" -> "1396 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=solid]; +"1395 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" -> "1398 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=solid]; +"1395 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" -> "1400 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1395 bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1" -> "1438 bert/encoder/layer_11/attention/output/add" [label="[]", style=solid]; +"1396 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_3" -> "1397 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_3" [label="[]", style=dashed]; +"1397 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_3" -> "1418 bert/encoder/layer_11/attention/self/key/MatMul" [label="[]", style=solid]; +"1398 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_2" -> "1399 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_2" [label="[]", style=dashed]; +"1399 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_2" -> "1410 bert/encoder/layer_11/attention/self/query/MatMul" [label="[]", style=solid]; +"1400 QuantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_1" -> "1401 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1401 DequantizeLinear_bert/encoder/layer_10/output/LayerNorm/batchnorm/add_1^0_1" -> "1404 bert/encoder/layer_11/attention/self/value/MatMul" [label="[]", style=solid]; +"1402 QuantizeLinear_bert/encoder/layer_11/attention/self/value/kernel^0_1" -> "1403 DequantizeLinear_bert/encoder/layer_11/attention/self/value/kernel^0_1" [label="[768, 768]", style=dashed]; +"1403 DequantizeLinear_bert/encoder/layer_11/attention/self/value/kernel^0_1" -> "1404 bert/encoder/layer_11/attention/self/value/MatMul" [label="[768, 768]", style=solid]; +"1404 bert/encoder/layer_11/attention/self/value/MatMul" -> "1405 bert/encoder/layer_11/attention/self/value/BiasAdd" [label="[]", style=solid]; +"1405 bert/encoder/layer_11/attention/self/value/BiasAdd" -> "1406 bert/encoder/layer_11/attention/self/Reshape_2" [label="[]", style=solid]; +"1406 bert/encoder/layer_11/attention/self/Reshape_2" -> "1407 bert/encoder/layer_11/attention/self/transpose_2" [label="[]", style=solid]; +"1407 bert/encoder/layer_11/attention/self/transpose_2" -> "1429 bert/encoder/layer_11/attention/self/MatMul_1" [label="[]", style=solid]; +"1408 QuantizeLinear_bert/encoder/layer_11/attention/self/query/kernel^0_1" -> "1409 DequantizeLinear_bert/encoder/layer_11/attention/self/query/kernel^0_1" [label="[768, 768]", style=dashed]; +"1409 DequantizeLinear_bert/encoder/layer_11/attention/self/query/kernel^0_1" -> "1410 bert/encoder/layer_11/attention/self/query/MatMul" [label="[768, 768]", style=solid]; +"1410 bert/encoder/layer_11/attention/self/query/MatMul" -> "1411 bert/encoder/layer_11/attention/self/query/BiasAdd" [label="[]", style=solid]; +"1411 bert/encoder/layer_11/attention/self/query/BiasAdd" -> "1412 QuantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd^0_1" [label="[]", style=solid]; +"1412 QuantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd^0_1" -> "1413 DequantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd^0_1" [label="[]", style=dashed]; +"1413 DequantizeLinear_bert/encoder/layer_11/attention/self/query/BiasAdd^0_1" -> "1414 bert/encoder/layer_11/attention/self/Reshape" [label="[]", style=solid]; +"1414 bert/encoder/layer_11/attention/self/Reshape" -> "1415 bert/encoder/layer_11/attention/self/transpose" [label="[]", style=solid]; +"1415 bert/encoder/layer_11/attention/self/transpose" -> "1425 bert/encoder/layer_11/attention/self/MatMul" [label="[]", style=solid]; +"1416 QuantizeLinear_bert/encoder/layer_11/attention/self/key/kernel^0_1" -> "1417 DequantizeLinear_bert/encoder/layer_11/attention/self/key/kernel^0_1" [label="[768, 768]", style=dashed]; +"1417 DequantizeLinear_bert/encoder/layer_11/attention/self/key/kernel^0_1" -> "1418 bert/encoder/layer_11/attention/self/key/MatMul" [label="[768, 768]", style=solid]; +"1418 bert/encoder/layer_11/attention/self/key/MatMul" -> "1419 bert/encoder/layer_11/attention/self/key/BiasAdd" [label="[]", style=solid]; +"1419 bert/encoder/layer_11/attention/self/key/BiasAdd" -> "1420 QuantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd^0_1" [label="[]", style=solid]; +"1420 QuantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd^0_1" -> "1421 DequantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd^0_1" [label="[]", style=dashed]; +"1421 DequantizeLinear_bert/encoder/layer_11/attention/self/key/BiasAdd^0_1" -> "1422 bert/encoder/layer_11/attention/self/Reshape_1" [label="[]", style=solid]; +"1422 bert/encoder/layer_11/attention/self/Reshape_1" -> "1423 bert/encoder/layer_11/attention/self/transpose_1" [label="[]", style=solid]; +"1423 bert/encoder/layer_11/attention/self/transpose_1" -> "1424 bert/encoder/layer_11/attention/self/MatMul__460" [label="[]", style=solid]; +"1424 bert/encoder/layer_11/attention/self/MatMul__460" -> "1425 bert/encoder/layer_11/attention/self/MatMul" [label="[]", style=solid]; +"1425 bert/encoder/layer_11/attention/self/MatMul" -> "1426 bert/encoder/layer_11/attention/self/Mul" [label="[]", style=solid]; +"1426 bert/encoder/layer_11/attention/self/Mul" -> "1427 bert/encoder/layer_11/attention/self/add" [label="[]", style=solid]; +"1427 bert/encoder/layer_11/attention/self/add" -> "1428 bert/encoder/layer_11/attention/self/Softmax" [label="[]", style=solid]; +"1428 bert/encoder/layer_11/attention/self/Softmax" -> "1429 bert/encoder/layer_11/attention/self/MatMul_1" [label="[]", style=solid]; +"1429 bert/encoder/layer_11/attention/self/MatMul_1" -> "1430 QuantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1^0_1" [label="[]", style=solid]; +"1430 QuantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1^0_1" -> "1431 DequantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1^0_1" [label="[]", style=dashed]; +"1431 DequantizeLinear_bert/encoder/layer_11/attention/self/MatMul_1^0_1" -> "1432 bert/encoder/layer_11/attention/self/transpose_3" [label="[]", style=solid]; +"1432 bert/encoder/layer_11/attention/self/transpose_3" -> "1433 bert/encoder/layer_11/attention/self/Reshape_3" [label="[]", style=solid]; +"1433 bert/encoder/layer_11/attention/self/Reshape_3" -> "1436 bert/encoder/layer_11/attention/output/dense/MatMul" [label="[]", style=solid]; +"1434 QuantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel^0_1" -> "1435 DequantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel^0_1" [label="[768, 768]", style=dashed]; +"1435 DequantizeLinear_bert/encoder/layer_11/attention/output/dense/kernel^0_1" -> "1436 bert/encoder/layer_11/attention/output/dense/MatMul" [label="[768, 768]", style=solid]; +"1436 bert/encoder/layer_11/attention/output/dense/MatMul" -> "1437 bert/encoder/layer_11/attention/output/dense/BiasAdd" [label="[]", style=solid]; +"1437 bert/encoder/layer_11/attention/output/dense/BiasAdd" -> "1438 bert/encoder/layer_11/attention/output/add" [label="[]", style=solid]; +"1438 bert/encoder/layer_11/attention/output/add" -> "1439 bert/encoder/layer_11/attention/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1438 bert/encoder/layer_11/attention/output/add" -> "1441 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1438 bert/encoder/layer_11/attention/output/add" -> "1450 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1439 bert/encoder/layer_11/attention/output/LayerNorm/moments/mean" -> "1440 bert/encoder/layer_11/attention/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1439 bert/encoder/layer_11/attention/output/LayerNorm/moments/mean" -> "1448 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1440 bert/encoder/layer_11/attention/output/LayerNorm/moments/StopGradient" -> "1441 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1441 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference" -> "1442 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference__463" [label="[]", style=solid]; +"1442 bert/encoder/layer_11/attention/output/LayerNorm/moments/SquaredDifference__463" -> "1443 bert/encoder/layer_11/attention/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1443 bert/encoder/layer_11/attention/output/LayerNorm/moments/variance" -> "1444 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1444 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add" -> "1445 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1445 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/Rsqrt" -> "1446 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/Rsqrt__465" [label="[]", style=solid]; +"1446 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/Rsqrt__465" -> "1447 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1447 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul" -> "1448 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1447 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul" -> "1450 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1448 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_2" -> "1449 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1449 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/sub" -> "1451 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1450 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/mul_1" -> "1451 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1451 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1" -> "1452 QuantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1451 bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1" -> "1472 bert/encoder/layer_11/output/add" [label="[]", style=solid]; +"1452 QuantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1453 DequantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1453 DequantizeLinear_bert/encoder/layer_11/attention/output/LayerNorm/batchnorm/add_1^0_1" -> "1456 bert/encoder/layer_11/intermediate/dense/MatMul" [label="[]", style=solid]; +"1454 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel^0_1" -> "1455 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel^0_1" [label="[768, 3072]", style=dashed]; +"1455 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/kernel^0_1" -> "1456 bert/encoder/layer_11/intermediate/dense/MatMul" [label="[768, 3072]", style=solid]; +"1456 bert/encoder/layer_11/intermediate/dense/MatMul" -> "1457 bert/encoder/layer_11/intermediate/dense/BiasAdd" [label="[]", style=solid]; +"1457 bert/encoder/layer_11/intermediate/dense/BiasAdd" -> "1458 bert/encoder/layer_11/intermediate/dense/Pow" [label="[]", style=solid]; +"1457 bert/encoder/layer_11/intermediate/dense/BiasAdd" -> "1460 bert/encoder/layer_11/intermediate/dense/add" [label="[]", style=solid]; +"1457 bert/encoder/layer_11/intermediate/dense/BiasAdd" -> "1465 bert/encoder/layer_11/intermediate/dense/mul_3" [label="[]", style=solid]; +"1458 bert/encoder/layer_11/intermediate/dense/Pow" -> "1459 bert/encoder/layer_11/intermediate/dense/mul" [label="[]", style=solid]; +"1459 bert/encoder/layer_11/intermediate/dense/mul" -> "1460 bert/encoder/layer_11/intermediate/dense/add" [label="[]", style=solid]; +"1460 bert/encoder/layer_11/intermediate/dense/add" -> "1461 bert/encoder/layer_11/intermediate/dense/mul_1" [label="[]", style=solid]; +"1461 bert/encoder/layer_11/intermediate/dense/mul_1" -> "1462 bert/encoder/layer_11/intermediate/dense/Tanh" [label="[]", style=solid]; +"1462 bert/encoder/layer_11/intermediate/dense/Tanh" -> "1463 bert/encoder/layer_11/intermediate/dense/add_1" [label="[]", style=solid]; +"1463 bert/encoder/layer_11/intermediate/dense/add_1" -> "1464 bert/encoder/layer_11/intermediate/dense/mul_2" [label="[]", style=solid]; +"1464 bert/encoder/layer_11/intermediate/dense/mul_2" -> "1465 bert/encoder/layer_11/intermediate/dense/mul_3" [label="[]", style=solid]; +"1465 bert/encoder/layer_11/intermediate/dense/mul_3" -> "1466 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3^0_1" [label="[]", style=solid]; +"1466 QuantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3^0_1" -> "1467 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3^0_1" [label="[]", style=dashed]; +"1467 DequantizeLinear_bert/encoder/layer_11/intermediate/dense/mul_3^0_1" -> "1470 bert/encoder/layer_11/output/dense/MatMul" [label="[]", style=solid]; +"1468 QuantizeLinear_bert/encoder/layer_11/output/dense/kernel^0_1" -> "1469 DequantizeLinear_bert/encoder/layer_11/output/dense/kernel^0_1" [label="[3072, 768]", style=dashed]; +"1469 DequantizeLinear_bert/encoder/layer_11/output/dense/kernel^0_1" -> "1470 bert/encoder/layer_11/output/dense/MatMul" [label="[3072, 768]", style=solid]; +"1470 bert/encoder/layer_11/output/dense/MatMul" -> "1471 bert/encoder/layer_11/output/dense/BiasAdd" [label="[]", style=solid]; +"1471 bert/encoder/layer_11/output/dense/BiasAdd" -> "1472 bert/encoder/layer_11/output/add" [label="[]", style=solid]; +"1472 bert/encoder/layer_11/output/add" -> "1473 bert/encoder/layer_11/output/LayerNorm/moments/mean" [label="[]", style=solid]; +"1472 bert/encoder/layer_11/output/add" -> "1475 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1472 bert/encoder/layer_11/output/add" -> "1484 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1473 bert/encoder/layer_11/output/LayerNorm/moments/mean" -> "1474 bert/encoder/layer_11/output/LayerNorm/moments/StopGradient" [label="[]", style=solid]; +"1473 bert/encoder/layer_11/output/LayerNorm/moments/mean" -> "1482 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1474 bert/encoder/layer_11/output/LayerNorm/moments/StopGradient" -> "1475 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference" [label="[]", style=solid]; +"1475 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference" -> "1476 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference__467" [label="[]", style=solid]; +"1476 bert/encoder/layer_11/output/LayerNorm/moments/SquaredDifference__467" -> "1477 bert/encoder/layer_11/output/LayerNorm/moments/variance" [label="[]", style=solid]; +"1477 bert/encoder/layer_11/output/LayerNorm/moments/variance" -> "1478 bert/encoder/layer_11/output/LayerNorm/batchnorm/add" [label="[]", style=solid]; +"1478 bert/encoder/layer_11/output/LayerNorm/batchnorm/add" -> "1479 bert/encoder/layer_11/output/LayerNorm/batchnorm/Rsqrt" [label="[]", style=solid]; +"1479 bert/encoder/layer_11/output/LayerNorm/batchnorm/Rsqrt" -> "1480 bert/encoder/layer_11/output/LayerNorm/batchnorm/Rsqrt__469" [label="[]", style=solid]; +"1480 bert/encoder/layer_11/output/LayerNorm/batchnorm/Rsqrt__469" -> "1481 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul" [label="[]", style=solid]; +"1481 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul" -> "1482 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_2" [label="[]", style=solid]; +"1481 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul" -> "1484 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_1" [label="[]", style=solid]; +"1482 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_2" -> "1483 bert/encoder/layer_11/output/LayerNorm/batchnorm/sub" [label="[]", style=solid]; +"1483 bert/encoder/layer_11/output/LayerNorm/batchnorm/sub" -> "1485 bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1484 bert/encoder/layer_11/output/LayerNorm/batchnorm/mul_1" -> "1485 bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1" [label="[]", style=solid]; +"1485 bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1" -> "1486 QuantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=solid]; +"1486 QuantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1^0_1" -> "1487 DequantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1^0_1" [label="[]", style=dashed]; +"1487 DequantizeLinear_bert/encoder/layer_11/output/LayerNorm/batchnorm/add_1^0_1" -> "1488 bert/encoder/Reshape_13" [label="[]", style=solid]; +"1488 bert/encoder/Reshape_13" -> "1489 Shape_1" [label="[]", style=solid]; +"1488 bert/encoder/Reshape_13" -> "1501 Reshape" [label="[]", style=solid]; +"1489 Shape_1" -> "1490 Shape_1__472" [label="[-1]", style=dashed]; +"1490 Shape_1__472" -> "1491 strided_slice_1" [label="[-1]", style=solid]; +"1491 strided_slice_1" -> "1492 strided_slice_1__476" [label="[-1]", style=solid]; +"1492 strided_slice_1__476" -> "1493 strided_slice_1__477" [label="[]", style=solid]; +"1493 strided_slice_1__477" -> "1494 mul" [label="[]", style=dashed]; +"1493 strided_slice_1__477" -> "1498 Reshape_1/shape_Unsqueeze__478" [label="[]", style=dashed]; +"1494 mul" -> "1495 Reshape/shape_Unsqueeze__482" [label="[]", style=dashed]; +"1495 Reshape/shape_Unsqueeze__482" -> "1496 Reshape/shape_Concat__484" [label="[1]", style=dashed]; +"1496 Reshape/shape_Concat__484" -> "1497 Reshape__485" [label="[2]", style=dashed]; +"1497 Reshape__485" -> "1501 Reshape" [label="[2]", style=dashed]; +"1498 Reshape_1/shape_Unsqueeze__478" -> "1499 Reshape_1/shape_Concat__481" [label="[1]", style=dashed]; +"1499 Reshape_1/shape_Concat__481" -> "1500 Reshape_1__487" [label="[3]", style=dashed]; +"1500 Reshape_1__487" -> "1506 Reshape_1" [label="[3]", style=dashed]; +"1501 Reshape" -> "1504 MatMul" [label="[]", style=solid]; +"1502 QuantizeLinear_MatMul__486^0_1" -> "1503 DequantizeLinear_MatMul__486^0_1" [label="[768, 2]", style=dashed]; +"1503 DequantizeLinear_MatMul__486^0_1" -> "1504 MatMul" [label="[768, 2]", style=solid]; +"1504 MatMul" -> "1505 BiasAdd" [label="[]", style=solid]; +"1505 BiasAdd" -> "1506 Reshape_1" [label="[]", style=solid]; +"1506 Reshape_1" -> "1507 transpose" [label="[]", style=solid]; +"1507 transpose" -> "1508 unstack" [label="[]", style=solid]; +"1508 unstack" -> "1509 unstack__490" [label="[]", style=solid]; +"1508 unstack" -> "1511 unstack__488" [label="[]", style=solid]; +"1509 unstack__490" -> "1510 unstack_graph_outputs_Identity__4" [label="[]", style=solid]; +"1510 unstack_graph_outputs_Identity__4" -> "1517 nncf_model_output_0" [label="[-1, 256]", style=solid]; +"1511 unstack__488" -> "1512 unstack_graph_outputs_Identity__7" [label="[]", style=solid]; +"1512 unstack_graph_outputs_Identity__7" -> "1518 nncf_model_output_1" [label="[-1, 256]", style=solid]; +"1513 nncf_model_input_0" -> "0 unique_ids_graph_outputs_Identity__10" [label="[-1]", style=dashed]; +"1514 nncf_model_input_1" -> "185 bert/embeddings/Reshape_2" [label="[-1, 256]", style=dashed]; +"1515 nncf_model_input_2" -> "140 bert/encoder/Reshape" [label="[-1, 256]", style=dashed]; +"1516 nncf_model_input_3" -> "123 bert/encoder/Shape" [label="[-1, 256]", style=dashed]; +"1516 nncf_model_input_3" -> "189 bert/embeddings/ExpandDims" [label="[-1, 256]", style=dashed]; +} diff --git a/tests/onnx/data/reference_graphs/quantization/googlenet.dot b/tests/onnx/data/reference_graphs/quantization/googlenet.dot index a8b7346b20d..bd5fcdf922a 100644 --- a/tests/onnx/data/reference_graphs/quantization/googlenet.dot +++ b/tests/onnx/data/reference_graphs/quantization/googlenet.dot @@ -387,14 +387,14 @@ strict digraph { "385 DequantizeLinear_onnx^^Conv_735_1" [id=385, label="385 DequantizeLinear_onnx::Conv_735_1", type=DequantizeLinear]; "386 /inception5b/branch4/branch4.1/conv/Conv" [id=386, type=Conv]; "387 /inception5b/branch4/branch4.1/Relu" [id=387, type=Relu]; -"388 QuantizeLinear_/inception5b/branch2/branch2.1/Relu_output_0_1" [id=388, type=QuantizeLinear]; -"389 DequantizeLinear_/inception5b/branch2/branch2.1/Relu_output_0_1" [id=389, type=DequantizeLinear]; -"390 QuantizeLinear_/inception5b/branch3/branch3.1/Relu_output_0_1" [id=390, type=QuantizeLinear]; -"391 DequantizeLinear_/inception5b/branch3/branch3.1/Relu_output_0_1" [id=391, type=DequantizeLinear]; -"392 QuantizeLinear_/inception5b/branch4/branch4.1/Relu_output_0_1" [id=392, type=QuantizeLinear]; -"393 DequantizeLinear_/inception5b/branch4/branch4.1/Relu_output_0_1" [id=393, type=DequantizeLinear]; -"394 QuantizeLinear_/inception5b/branch1/Relu_output_0_1" [id=394, type=QuantizeLinear]; -"395 DequantizeLinear_/inception5b/branch1/Relu_output_0_1" [id=395, type=DequantizeLinear]; +"388 QuantizeLinear_/inception5b/branch1/Relu_output_0_1" [id=388, type=QuantizeLinear]; +"389 DequantizeLinear_/inception5b/branch1/Relu_output_0_1" [id=389, type=DequantizeLinear]; +"390 QuantizeLinear_/inception5b/branch2/branch2.1/Relu_output_0_1" [id=390, type=QuantizeLinear]; +"391 DequantizeLinear_/inception5b/branch2/branch2.1/Relu_output_0_1" [id=391, type=DequantizeLinear]; +"392 QuantizeLinear_/inception5b/branch3/branch3.1/Relu_output_0_1" [id=392, type=QuantizeLinear]; +"393 DequantizeLinear_/inception5b/branch3/branch3.1/Relu_output_0_1" [id=393, type=DequantizeLinear]; +"394 QuantizeLinear_/inception5b/branch4/branch4.1/Relu_output_0_1" [id=394, type=QuantizeLinear]; +"395 DequantizeLinear_/inception5b/branch4/branch4.1/Relu_output_0_1" [id=395, type=DequantizeLinear]; "396 /inception5b/Concat" [id=396, type=Concat]; "397 /avgpool/GlobalAveragePool" [id=397, type=GlobalAveragePool]; "398 QuantizeLinear_/avgpool/GlobalAveragePool_output_0_1" [id=398, type=QuantizeLinear]; @@ -405,438 +405,438 @@ strict digraph { "403 /fc/Gemm" [id=403, type=Gemm]; "404 nncf_model_input_0" [id=404, type=nncf_model_input]; "405 nncf_model_output_0" [id=405, type=nncf_model_output]; -"0 /Constant" -> "4 /Gather" [label="()", style=dashed]; -"1 /Constant_1" -> "11 /Gather_1" [label="()", style=dashed]; -"2 QuantizeLinear_x.1_1" -> "3 DequantizeLinear_x.1_1" [label="(1, 3, 224, 224)", style=dashed]; -"3 DequantizeLinear_x.1_1" -> "4 /Gather" [label="(1, 3, 224, 224)", style=solid]; -"3 DequantizeLinear_x.1_1" -> "11 /Gather_1" [label="(1, 3, 224, 224)", style=solid]; -"3 DequantizeLinear_x.1_1" -> "19 /Gather_2" [label="(1, 3, 224, 224)", style=solid]; -"4 /Gather" -> "6 /Unsqueeze" [label="(1, 224, 224)", style=solid]; -"5 /Constant_2" -> "6 /Unsqueeze" [label="(1,)", style=dashed]; -"6 /Unsqueeze" -> "8 /Mul" [label="(1, 1, 224, 224)", style=solid]; -"7 /Constant_3" -> "8 /Mul" [label="()", style=solid]; -"8 /Mul" -> "10 /Add" [label="(1, 1, 224, 224)", style=solid]; -"9 /Constant_4" -> "10 /Add" [label="()", style=solid]; -"10 /Add" -> "28 QuantizeLinear_/Add_output_0_1" [label="(1, 1, 224, 224)", style=solid]; -"11 /Gather_1" -> "13 /Unsqueeze_1" [label="(1, 224, 224)", style=solid]; -"12 /Constant_5" -> "13 /Unsqueeze_1" [label="(1,)", style=dashed]; -"13 /Unsqueeze_1" -> "15 /Mul_1" [label="(1, 1, 224, 224)", style=solid]; -"14 /Constant_6" -> "15 /Mul_1" [label="()", style=solid]; -"15 /Mul_1" -> "17 /Add_1" [label="(1, 1, 224, 224)", style=solid]; -"16 /Constant_7" -> "17 /Add_1" [label="()", style=solid]; -"17 /Add_1" -> "30 QuantizeLinear_/Add_1_output_0_1" [label="(1, 1, 224, 224)", style=solid]; -"18 /Constant_8" -> "19 /Gather_2" [label="()", style=dashed]; -"19 /Gather_2" -> "21 /Unsqueeze_2" [label="(1, 224, 224)", style=solid]; -"20 /Constant_9" -> "21 /Unsqueeze_2" [label="(1,)", style=dashed]; -"21 /Unsqueeze_2" -> "23 /Mul_2" [label="(1, 1, 224, 224)", style=solid]; -"22 /Constant_10" -> "23 /Mul_2" [label="()", style=solid]; -"23 /Mul_2" -> "25 /Add_2" [label="(1, 1, 224, 224)", style=solid]; -"24 /Constant_11" -> "25 /Add_2" [label="()", style=solid]; -"25 /Add_2" -> "26 QuantizeLinear_/Add_2_output_0_1" [label="(1, 1, 224, 224)", style=solid]; -"26 QuantizeLinear_/Add_2_output_0_1" -> "27 DequantizeLinear_/Add_2_output_0_1" [label="(1, 1, 224, 224)", style=dashed]; -"27 DequantizeLinear_/Add_2_output_0_1" -> "32 /Concat" [label="(1, 1, 224, 224)", style=solid]; -"28 QuantizeLinear_/Add_output_0_1" -> "29 DequantizeLinear_/Add_output_0_1" [label="(1, 1, 224, 224)", style=dashed]; -"29 DequantizeLinear_/Add_output_0_1" -> "32 /Concat" [label="(1, 1, 224, 224)", style=solid]; -"30 QuantizeLinear_/Add_1_output_0_1" -> "31 DequantizeLinear_/Add_1_output_0_1" [label="(1, 1, 224, 224)", style=dashed]; -"31 DequantizeLinear_/Add_1_output_0_1" -> "32 /Concat" [label="(1, 1, 224, 224)", style=solid]; -"32 /Concat" -> "35 /conv1/conv/Conv" [label="(1, 3, 224, 224)", style=solid]; -"33 QuantizeLinear_onnx^^Conv_567_1" -> "34 DequantizeLinear_onnx^^Conv_567_1" [label="(64, 3, 7, 7)", style=dashed]; -"34 DequantizeLinear_onnx^^Conv_567_1" -> "35 /conv1/conv/Conv" [label="(64, 3, 7, 7)", style=solid]; -"35 /conv1/conv/Conv" -> "36 /conv1/Relu" [label="(1, 64, 112, 112)", style=solid]; -"36 /conv1/Relu" -> "37 QuantizeLinear_/conv1/Relu_output_0_1" [label="(1, 64, 112, 112)", style=solid]; -"37 QuantizeLinear_/conv1/Relu_output_0_1" -> "38 DequantizeLinear_/conv1/Relu_output_0_1" [label="(1, 64, 112, 112)", style=dashed]; -"38 DequantizeLinear_/conv1/Relu_output_0_1" -> "39 /maxpool1/MaxPool" [label="(1, 64, 112, 112)", style=solid]; -"39 /maxpool1/MaxPool" -> "42 /conv2/conv/Conv" [label="(1, 64, 56, 56)", style=solid]; -"40 QuantizeLinear_onnx^^Conv_570_1" -> "41 DequantizeLinear_onnx^^Conv_570_1" [label="(64, 64, 1, 1)", style=dashed]; -"41 DequantizeLinear_onnx^^Conv_570_1" -> "42 /conv2/conv/Conv" [label="(64, 64, 1, 1)", style=solid]; -"42 /conv2/conv/Conv" -> "43 /conv2/Relu" [label="(1, 64, 56, 56)", style=solid]; -"43 /conv2/Relu" -> "44 QuantizeLinear_/conv2/Relu_output_0_1" [label="(1, 64, 56, 56)", style=solid]; -"44 QuantizeLinear_/conv2/Relu_output_0_1" -> "45 DequantizeLinear_/conv2/Relu_output_0_1" [label="(1, 64, 56, 56)", style=dashed]; -"45 DequantizeLinear_/conv2/Relu_output_0_1" -> "48 /conv3/conv/Conv" [label="(1, 64, 56, 56)", style=solid]; -"46 QuantizeLinear_onnx^^Conv_573_1" -> "47 DequantizeLinear_onnx^^Conv_573_1" [label="(192, 64, 3, 3)", style=dashed]; -"47 DequantizeLinear_onnx^^Conv_573_1" -> "48 /conv3/conv/Conv" [label="(192, 64, 3, 3)", style=solid]; -"48 /conv3/conv/Conv" -> "49 /conv3/Relu" [label="(1, 192, 56, 56)", style=solid]; -"49 /conv3/Relu" -> "50 QuantizeLinear_/conv3/Relu_output_0_1" [label="(1, 192, 56, 56)", style=solid]; -"50 QuantizeLinear_/conv3/Relu_output_0_1" -> "51 DequantizeLinear_/conv3/Relu_output_0_1" [label="(1, 192, 56, 56)", style=dashed]; -"51 DequantizeLinear_/conv3/Relu_output_0_1" -> "52 /maxpool2/MaxPool" [label="(1, 192, 56, 56)", style=solid]; -"52 /maxpool2/MaxPool" -> "55 /inception3a/branch1/conv/Conv" [label="(1, 192, 28, 28)", style=solid]; -"52 /maxpool2/MaxPool" -> "59 /inception3a/branch2/branch2.0/conv/Conv" [label="(1, 192, 28, 28)", style=solid]; -"52 /maxpool2/MaxPool" -> "69 /inception3a/branch3/branch3.0/conv/Conv" [label="(1, 192, 28, 28)", style=solid]; -"52 /maxpool2/MaxPool" -> "77 /inception3a/branch4/branch4.0/MaxPool" [label="(1, 192, 28, 28)", style=solid]; -"53 QuantizeLinear_onnx^^Conv_576_1" -> "54 DequantizeLinear_onnx^^Conv_576_1" [label="(64, 192, 1, 1)", style=dashed]; -"54 DequantizeLinear_onnx^^Conv_576_1" -> "55 /inception3a/branch1/conv/Conv" [label="(64, 192, 1, 1)", style=solid]; -"55 /inception3a/branch1/conv/Conv" -> "56 /inception3a/branch1/Relu" [label="(1, 64, 28, 28)", style=solid]; -"56 /inception3a/branch1/Relu" -> "88 QuantizeLinear_/inception3a/branch1/Relu_output_0_1" [label="(1, 64, 28, 28)", style=solid]; -"57 QuantizeLinear_onnx^^Conv_579_1" -> "58 DequantizeLinear_onnx^^Conv_579_1" [label="(96, 192, 1, 1)", style=dashed]; -"58 DequantizeLinear_onnx^^Conv_579_1" -> "59 /inception3a/branch2/branch2.0/conv/Conv" [label="(96, 192, 1, 1)", style=solid]; -"59 /inception3a/branch2/branch2.0/conv/Conv" -> "60 /inception3a/branch2/branch2.0/Relu" [label="(1, 96, 28, 28)", style=solid]; -"60 /inception3a/branch2/branch2.0/Relu" -> "61 QuantizeLinear_/inception3a/branch2/branch2.0/Relu_output_0_1" [label="(1, 96, 28, 28)", style=solid]; -"61 QuantizeLinear_/inception3a/branch2/branch2.0/Relu_output_0_1" -> "62 DequantizeLinear_/inception3a/branch2/branch2.0/Relu_output_0_1" [label="(1, 96, 28, 28)", style=dashed]; -"62 DequantizeLinear_/inception3a/branch2/branch2.0/Relu_output_0_1" -> "65 /inception3a/branch2/branch2.1/conv/Conv" [label="(1, 96, 28, 28)", style=solid]; -"63 QuantizeLinear_onnx^^Conv_582_1" -> "64 DequantizeLinear_onnx^^Conv_582_1" [label="(128, 96, 3, 3)", style=dashed]; -"64 DequantizeLinear_onnx^^Conv_582_1" -> "65 /inception3a/branch2/branch2.1/conv/Conv" [label="(128, 96, 3, 3)", style=solid]; -"65 /inception3a/branch2/branch2.1/conv/Conv" -> "66 /inception3a/branch2/branch2.1/Relu" [label="(1, 128, 28, 28)", style=solid]; -"66 /inception3a/branch2/branch2.1/Relu" -> "82 QuantizeLinear_/inception3a/branch2/branch2.1/Relu_output_0_1" [label="(1, 128, 28, 28)", style=solid]; -"67 QuantizeLinear_onnx^^Conv_585_1" -> "68 DequantizeLinear_onnx^^Conv_585_1" [label="(16, 192, 1, 1)", style=dashed]; -"68 DequantizeLinear_onnx^^Conv_585_1" -> "69 /inception3a/branch3/branch3.0/conv/Conv" [label="(16, 192, 1, 1)", style=solid]; -"69 /inception3a/branch3/branch3.0/conv/Conv" -> "70 /inception3a/branch3/branch3.0/Relu" [label="(1, 16, 28, 28)", style=solid]; -"70 /inception3a/branch3/branch3.0/Relu" -> "71 QuantizeLinear_/inception3a/branch3/branch3.0/Relu_output_0_1" [label="(1, 16, 28, 28)", style=solid]; -"71 QuantizeLinear_/inception3a/branch3/branch3.0/Relu_output_0_1" -> "72 DequantizeLinear_/inception3a/branch3/branch3.0/Relu_output_0_1" [label="(1, 16, 28, 28)", style=dashed]; -"72 DequantizeLinear_/inception3a/branch3/branch3.0/Relu_output_0_1" -> "75 /inception3a/branch3/branch3.1/conv/Conv" [label="(1, 16, 28, 28)", style=solid]; -"73 QuantizeLinear_onnx^^Conv_588_1" -> "74 DequantizeLinear_onnx^^Conv_588_1" [label="(32, 16, 3, 3)", style=dashed]; -"74 DequantizeLinear_onnx^^Conv_588_1" -> "75 /inception3a/branch3/branch3.1/conv/Conv" [label="(32, 16, 3, 3)", style=solid]; -"75 /inception3a/branch3/branch3.1/conv/Conv" -> "76 /inception3a/branch3/branch3.1/Relu" [label="(1, 32, 28, 28)", style=solid]; -"76 /inception3a/branch3/branch3.1/Relu" -> "84 QuantizeLinear_/inception3a/branch3/branch3.1/Relu_output_0_1" [label="(1, 32, 28, 28)", style=solid]; -"77 /inception3a/branch4/branch4.0/MaxPool" -> "80 /inception3a/branch4/branch4.1/conv/Conv" [label="(1, 192, 28, 28)", style=solid]; -"78 QuantizeLinear_onnx^^Conv_591_1" -> "79 DequantizeLinear_onnx^^Conv_591_1" [label="(32, 192, 1, 1)", style=dashed]; -"79 DequantizeLinear_onnx^^Conv_591_1" -> "80 /inception3a/branch4/branch4.1/conv/Conv" [label="(32, 192, 1, 1)", style=solid]; -"80 /inception3a/branch4/branch4.1/conv/Conv" -> "81 /inception3a/branch4/branch4.1/Relu" [label="(1, 32, 28, 28)", style=solid]; -"81 /inception3a/branch4/branch4.1/Relu" -> "86 QuantizeLinear_/inception3a/branch4/branch4.1/Relu_output_0_1" [label="(1, 32, 28, 28)", style=solid]; -"82 QuantizeLinear_/inception3a/branch2/branch2.1/Relu_output_0_1" -> "83 DequantizeLinear_/inception3a/branch2/branch2.1/Relu_output_0_1" [label="(1, 128, 28, 28)", style=dashed]; -"83 DequantizeLinear_/inception3a/branch2/branch2.1/Relu_output_0_1" -> "90 /inception3a/Concat" [label="(1, 128, 28, 28)", style=solid]; -"84 QuantizeLinear_/inception3a/branch3/branch3.1/Relu_output_0_1" -> "85 DequantizeLinear_/inception3a/branch3/branch3.1/Relu_output_0_1" [label="(1, 32, 28, 28)", style=dashed]; -"85 DequantizeLinear_/inception3a/branch3/branch3.1/Relu_output_0_1" -> "90 /inception3a/Concat" [label="(1, 32, 28, 28)", style=solid]; -"86 QuantizeLinear_/inception3a/branch4/branch4.1/Relu_output_0_1" -> "87 DequantizeLinear_/inception3a/branch4/branch4.1/Relu_output_0_1" [label="(1, 32, 28, 28)", style=dashed]; -"87 DequantizeLinear_/inception3a/branch4/branch4.1/Relu_output_0_1" -> "90 /inception3a/Concat" [label="(1, 32, 28, 28)", style=solid]; -"88 QuantizeLinear_/inception3a/branch1/Relu_output_0_1" -> "89 DequantizeLinear_/inception3a/branch1/Relu_output_0_1" [label="(1, 64, 28, 28)", style=dashed]; -"89 DequantizeLinear_/inception3a/branch1/Relu_output_0_1" -> "90 /inception3a/Concat" [label="(1, 64, 28, 28)", style=solid]; -"90 /inception3a/Concat" -> "93 /inception3b/branch1/conv/Conv" [label="(1, 256, 28, 28)", style=solid]; -"90 /inception3a/Concat" -> "97 /inception3b/branch2/branch2.0/conv/Conv" [label="(1, 256, 28, 28)", style=solid]; -"90 /inception3a/Concat" -> "107 /inception3b/branch3/branch3.0/conv/Conv" [label="(1, 256, 28, 28)", style=solid]; -"90 /inception3a/Concat" -> "115 /inception3b/branch4/branch4.0/MaxPool" [label="(1, 256, 28, 28)", style=solid]; -"91 QuantizeLinear_onnx^^Conv_594_1" -> "92 DequantizeLinear_onnx^^Conv_594_1" [label="(128, 256, 1, 1)", style=dashed]; -"92 DequantizeLinear_onnx^^Conv_594_1" -> "93 /inception3b/branch1/conv/Conv" [label="(128, 256, 1, 1)", style=solid]; -"93 /inception3b/branch1/conv/Conv" -> "94 /inception3b/branch1/Relu" [label="(1, 128, 28, 28)", style=solid]; -"94 /inception3b/branch1/Relu" -> "126 QuantizeLinear_/inception3b/branch1/Relu_output_0_1" [label="(1, 128, 28, 28)", style=solid]; -"95 QuantizeLinear_onnx^^Conv_597_1" -> "96 DequantizeLinear_onnx^^Conv_597_1" [label="(128, 256, 1, 1)", style=dashed]; -"96 DequantizeLinear_onnx^^Conv_597_1" -> "97 /inception3b/branch2/branch2.0/conv/Conv" [label="(128, 256, 1, 1)", style=solid]; -"97 /inception3b/branch2/branch2.0/conv/Conv" -> "98 /inception3b/branch2/branch2.0/Relu" [label="(1, 128, 28, 28)", style=solid]; -"98 /inception3b/branch2/branch2.0/Relu" -> "99 QuantizeLinear_/inception3b/branch2/branch2.0/Relu_output_0_1" [label="(1, 128, 28, 28)", style=solid]; -"99 QuantizeLinear_/inception3b/branch2/branch2.0/Relu_output_0_1" -> "100 DequantizeLinear_/inception3b/branch2/branch2.0/Relu_output_0_1" [label="(1, 128, 28, 28)", style=dashed]; -"100 DequantizeLinear_/inception3b/branch2/branch2.0/Relu_output_0_1" -> "103 /inception3b/branch2/branch2.1/conv/Conv" [label="(1, 128, 28, 28)", style=solid]; -"101 QuantizeLinear_onnx^^Conv_600_1" -> "102 DequantizeLinear_onnx^^Conv_600_1" [label="(192, 128, 3, 3)", style=dashed]; -"102 DequantizeLinear_onnx^^Conv_600_1" -> "103 /inception3b/branch2/branch2.1/conv/Conv" [label="(192, 128, 3, 3)", style=solid]; -"103 /inception3b/branch2/branch2.1/conv/Conv" -> "104 /inception3b/branch2/branch2.1/Relu" [label="(1, 192, 28, 28)", style=solid]; -"104 /inception3b/branch2/branch2.1/Relu" -> "120 QuantizeLinear_/inception3b/branch2/branch2.1/Relu_output_0_1" [label="(1, 192, 28, 28)", style=solid]; -"105 QuantizeLinear_onnx^^Conv_603_1" -> "106 DequantizeLinear_onnx^^Conv_603_1" [label="(32, 256, 1, 1)", style=dashed]; -"106 DequantizeLinear_onnx^^Conv_603_1" -> "107 /inception3b/branch3/branch3.0/conv/Conv" [label="(32, 256, 1, 1)", style=solid]; -"107 /inception3b/branch3/branch3.0/conv/Conv" -> "108 /inception3b/branch3/branch3.0/Relu" [label="(1, 32, 28, 28)", style=solid]; -"108 /inception3b/branch3/branch3.0/Relu" -> "109 QuantizeLinear_/inception3b/branch3/branch3.0/Relu_output_0_1" [label="(1, 32, 28, 28)", style=solid]; -"109 QuantizeLinear_/inception3b/branch3/branch3.0/Relu_output_0_1" -> "110 DequantizeLinear_/inception3b/branch3/branch3.0/Relu_output_0_1" [label="(1, 32, 28, 28)", style=dashed]; -"110 DequantizeLinear_/inception3b/branch3/branch3.0/Relu_output_0_1" -> "113 /inception3b/branch3/branch3.1/conv/Conv" [label="(1, 32, 28, 28)", style=solid]; -"111 QuantizeLinear_onnx^^Conv_606_1" -> "112 DequantizeLinear_onnx^^Conv_606_1" [label="(96, 32, 3, 3)", style=dashed]; -"112 DequantizeLinear_onnx^^Conv_606_1" -> "113 /inception3b/branch3/branch3.1/conv/Conv" [label="(96, 32, 3, 3)", style=solid]; -"113 /inception3b/branch3/branch3.1/conv/Conv" -> "114 /inception3b/branch3/branch3.1/Relu" [label="(1, 96, 28, 28)", style=solid]; -"114 /inception3b/branch3/branch3.1/Relu" -> "122 QuantizeLinear_/inception3b/branch3/branch3.1/Relu_output_0_1" [label="(1, 96, 28, 28)", style=solid]; -"115 /inception3b/branch4/branch4.0/MaxPool" -> "118 /inception3b/branch4/branch4.1/conv/Conv" [label="(1, 256, 28, 28)", style=solid]; -"116 QuantizeLinear_onnx^^Conv_609_1" -> "117 DequantizeLinear_onnx^^Conv_609_1" [label="(64, 256, 1, 1)", style=dashed]; -"117 DequantizeLinear_onnx^^Conv_609_1" -> "118 /inception3b/branch4/branch4.1/conv/Conv" [label="(64, 256, 1, 1)", style=solid]; -"118 /inception3b/branch4/branch4.1/conv/Conv" -> "119 /inception3b/branch4/branch4.1/Relu" [label="(1, 64, 28, 28)", style=solid]; -"119 /inception3b/branch4/branch4.1/Relu" -> "124 QuantizeLinear_/inception3b/branch4/branch4.1/Relu_output_0_1" [label="(1, 64, 28, 28)", style=solid]; -"120 QuantizeLinear_/inception3b/branch2/branch2.1/Relu_output_0_1" -> "121 DequantizeLinear_/inception3b/branch2/branch2.1/Relu_output_0_1" [label="(1, 192, 28, 28)", style=dashed]; -"121 DequantizeLinear_/inception3b/branch2/branch2.1/Relu_output_0_1" -> "128 /inception3b/Concat" [label="(1, 192, 28, 28)", style=solid]; -"122 QuantizeLinear_/inception3b/branch3/branch3.1/Relu_output_0_1" -> "123 DequantizeLinear_/inception3b/branch3/branch3.1/Relu_output_0_1" [label="(1, 96, 28, 28)", style=dashed]; -"123 DequantizeLinear_/inception3b/branch3/branch3.1/Relu_output_0_1" -> "128 /inception3b/Concat" [label="(1, 96, 28, 28)", style=solid]; -"124 QuantizeLinear_/inception3b/branch4/branch4.1/Relu_output_0_1" -> "125 DequantizeLinear_/inception3b/branch4/branch4.1/Relu_output_0_1" [label="(1, 64, 28, 28)", style=dashed]; -"125 DequantizeLinear_/inception3b/branch4/branch4.1/Relu_output_0_1" -> "128 /inception3b/Concat" [label="(1, 64, 28, 28)", style=solid]; -"126 QuantizeLinear_/inception3b/branch1/Relu_output_0_1" -> "127 DequantizeLinear_/inception3b/branch1/Relu_output_0_1" [label="(1, 128, 28, 28)", style=dashed]; -"127 DequantizeLinear_/inception3b/branch1/Relu_output_0_1" -> "128 /inception3b/Concat" [label="(1, 128, 28, 28)", style=solid]; -"128 /inception3b/Concat" -> "129 /maxpool3/MaxPool" [label="(1, 480, 28, 28)", style=solid]; -"129 /maxpool3/MaxPool" -> "132 /inception4a/branch1/conv/Conv" [label="(1, 480, 14, 14)", style=solid]; -"129 /maxpool3/MaxPool" -> "136 /inception4a/branch2/branch2.0/conv/Conv" [label="(1, 480, 14, 14)", style=solid]; -"129 /maxpool3/MaxPool" -> "146 /inception4a/branch3/branch3.0/conv/Conv" [label="(1, 480, 14, 14)", style=solid]; -"129 /maxpool3/MaxPool" -> "154 /inception4a/branch4/branch4.0/MaxPool" [label="(1, 480, 14, 14)", style=solid]; -"130 QuantizeLinear_onnx^^Conv_612_1" -> "131 DequantizeLinear_onnx^^Conv_612_1" [label="(192, 480, 1, 1)", style=dashed]; -"131 DequantizeLinear_onnx^^Conv_612_1" -> "132 /inception4a/branch1/conv/Conv" [label="(192, 480, 1, 1)", style=solid]; -"132 /inception4a/branch1/conv/Conv" -> "133 /inception4a/branch1/Relu" [label="(1, 192, 14, 14)", style=solid]; -"133 /inception4a/branch1/Relu" -> "159 QuantizeLinear_/inception4a/branch1/Relu_output_0_1" [label="(1, 192, 14, 14)", style=solid]; -"134 QuantizeLinear_onnx^^Conv_615_1" -> "135 DequantizeLinear_onnx^^Conv_615_1" [label="(96, 480, 1, 1)", style=dashed]; -"135 DequantizeLinear_onnx^^Conv_615_1" -> "136 /inception4a/branch2/branch2.0/conv/Conv" [label="(96, 480, 1, 1)", style=solid]; -"136 /inception4a/branch2/branch2.0/conv/Conv" -> "137 /inception4a/branch2/branch2.0/Relu" [label="(1, 96, 14, 14)", style=solid]; -"137 /inception4a/branch2/branch2.0/Relu" -> "138 QuantizeLinear_/inception4a/branch2/branch2.0/Relu_output_0_1" [label="(1, 96, 14, 14)", style=solid]; -"138 QuantizeLinear_/inception4a/branch2/branch2.0/Relu_output_0_1" -> "139 DequantizeLinear_/inception4a/branch2/branch2.0/Relu_output_0_1" [label="(1, 96, 14, 14)", style=dashed]; -"139 DequantizeLinear_/inception4a/branch2/branch2.0/Relu_output_0_1" -> "142 /inception4a/branch2/branch2.1/conv/Conv" [label="(1, 96, 14, 14)", style=solid]; -"140 QuantizeLinear_onnx^^Conv_618_1" -> "141 DequantizeLinear_onnx^^Conv_618_1" [label="(208, 96, 3, 3)", style=dashed]; -"141 DequantizeLinear_onnx^^Conv_618_1" -> "142 /inception4a/branch2/branch2.1/conv/Conv" [label="(208, 96, 3, 3)", style=solid]; -"142 /inception4a/branch2/branch2.1/conv/Conv" -> "143 /inception4a/branch2/branch2.1/Relu" [label="(1, 208, 14, 14)", style=solid]; -"143 /inception4a/branch2/branch2.1/Relu" -> "161 QuantizeLinear_/inception4a/branch2/branch2.1/Relu_output_0_1" [label="(1, 208, 14, 14)", style=solid]; -"144 QuantizeLinear_onnx^^Conv_621_1" -> "145 DequantizeLinear_onnx^^Conv_621_1" [label="(16, 480, 1, 1)", style=dashed]; -"145 DequantizeLinear_onnx^^Conv_621_1" -> "146 /inception4a/branch3/branch3.0/conv/Conv" [label="(16, 480, 1, 1)", style=solid]; -"146 /inception4a/branch3/branch3.0/conv/Conv" -> "147 /inception4a/branch3/branch3.0/Relu" [label="(1, 16, 14, 14)", style=solid]; -"147 /inception4a/branch3/branch3.0/Relu" -> "148 QuantizeLinear_/inception4a/branch3/branch3.0/Relu_output_0_1" [label="(1, 16, 14, 14)", style=solid]; -"148 QuantizeLinear_/inception4a/branch3/branch3.0/Relu_output_0_1" -> "149 DequantizeLinear_/inception4a/branch3/branch3.0/Relu_output_0_1" [label="(1, 16, 14, 14)", style=dashed]; -"149 DequantizeLinear_/inception4a/branch3/branch3.0/Relu_output_0_1" -> "152 /inception4a/branch3/branch3.1/conv/Conv" [label="(1, 16, 14, 14)", style=solid]; -"150 QuantizeLinear_onnx^^Conv_624_1" -> "151 DequantizeLinear_onnx^^Conv_624_1" [label="(48, 16, 3, 3)", style=dashed]; -"151 DequantizeLinear_onnx^^Conv_624_1" -> "152 /inception4a/branch3/branch3.1/conv/Conv" [label="(48, 16, 3, 3)", style=solid]; -"152 /inception4a/branch3/branch3.1/conv/Conv" -> "153 /inception4a/branch3/branch3.1/Relu" [label="(1, 48, 14, 14)", style=solid]; -"153 /inception4a/branch3/branch3.1/Relu" -> "163 QuantizeLinear_/inception4a/branch3/branch3.1/Relu_output_0_1" [label="(1, 48, 14, 14)", style=solid]; -"154 /inception4a/branch4/branch4.0/MaxPool" -> "157 /inception4a/branch4/branch4.1/conv/Conv" [label="(1, 480, 14, 14)", style=solid]; -"155 QuantizeLinear_onnx^^Conv_627_1" -> "156 DequantizeLinear_onnx^^Conv_627_1" [label="(64, 480, 1, 1)", style=dashed]; -"156 DequantizeLinear_onnx^^Conv_627_1" -> "157 /inception4a/branch4/branch4.1/conv/Conv" [label="(64, 480, 1, 1)", style=solid]; -"157 /inception4a/branch4/branch4.1/conv/Conv" -> "158 /inception4a/branch4/branch4.1/Relu" [label="(1, 64, 14, 14)", style=solid]; -"158 /inception4a/branch4/branch4.1/Relu" -> "165 QuantizeLinear_/inception4a/branch4/branch4.1/Relu_output_0_1" [label="(1, 64, 14, 14)", style=solid]; -"159 QuantizeLinear_/inception4a/branch1/Relu_output_0_1" -> "160 DequantizeLinear_/inception4a/branch1/Relu_output_0_1" [label="(1, 192, 14, 14)", style=dashed]; -"160 DequantizeLinear_/inception4a/branch1/Relu_output_0_1" -> "167 /inception4a/Concat" [label="(1, 192, 14, 14)", style=solid]; -"161 QuantizeLinear_/inception4a/branch2/branch2.1/Relu_output_0_1" -> "162 DequantizeLinear_/inception4a/branch2/branch2.1/Relu_output_0_1" [label="(1, 208, 14, 14)", style=dashed]; -"162 DequantizeLinear_/inception4a/branch2/branch2.1/Relu_output_0_1" -> "167 /inception4a/Concat" [label="(1, 208, 14, 14)", style=solid]; -"163 QuantizeLinear_/inception4a/branch3/branch3.1/Relu_output_0_1" -> "164 DequantizeLinear_/inception4a/branch3/branch3.1/Relu_output_0_1" [label="(1, 48, 14, 14)", style=dashed]; -"164 DequantizeLinear_/inception4a/branch3/branch3.1/Relu_output_0_1" -> "167 /inception4a/Concat" [label="(1, 48, 14, 14)", style=solid]; -"165 QuantizeLinear_/inception4a/branch4/branch4.1/Relu_output_0_1" -> "166 DequantizeLinear_/inception4a/branch4/branch4.1/Relu_output_0_1" [label="(1, 64, 14, 14)", style=dashed]; -"166 DequantizeLinear_/inception4a/branch4/branch4.1/Relu_output_0_1" -> "167 /inception4a/Concat" [label="(1, 64, 14, 14)", style=solid]; -"167 /inception4a/Concat" -> "170 /inception4b/branch1/conv/Conv" [label="(1, 512, 14, 14)", style=solid]; -"167 /inception4a/Concat" -> "174 /inception4b/branch2/branch2.0/conv/Conv" [label="(1, 512, 14, 14)", style=solid]; -"167 /inception4a/Concat" -> "184 /inception4b/branch3/branch3.0/conv/Conv" [label="(1, 512, 14, 14)", style=solid]; -"167 /inception4a/Concat" -> "192 /inception4b/branch4/branch4.0/MaxPool" [label="(1, 512, 14, 14)", style=solid]; -"168 QuantizeLinear_onnx^^Conv_630_1" -> "169 DequantizeLinear_onnx^^Conv_630_1" [label="(160, 512, 1, 1)", style=dashed]; -"169 DequantizeLinear_onnx^^Conv_630_1" -> "170 /inception4b/branch1/conv/Conv" [label="(160, 512, 1, 1)", style=solid]; -"170 /inception4b/branch1/conv/Conv" -> "171 /inception4b/branch1/Relu" [label="(1, 160, 14, 14)", style=solid]; -"171 /inception4b/branch1/Relu" -> "203 QuantizeLinear_/inception4b/branch1/Relu_output_0_1" [label="(1, 160, 14, 14)", style=solid]; -"172 QuantizeLinear_onnx^^Conv_633_1" -> "173 DequantizeLinear_onnx^^Conv_633_1" [label="(112, 512, 1, 1)", style=dashed]; -"173 DequantizeLinear_onnx^^Conv_633_1" -> "174 /inception4b/branch2/branch2.0/conv/Conv" [label="(112, 512, 1, 1)", style=solid]; -"174 /inception4b/branch2/branch2.0/conv/Conv" -> "175 /inception4b/branch2/branch2.0/Relu" [label="(1, 112, 14, 14)", style=solid]; -"175 /inception4b/branch2/branch2.0/Relu" -> "176 QuantizeLinear_/inception4b/branch2/branch2.0/Relu_output_0_1" [label="(1, 112, 14, 14)", style=solid]; -"176 QuantizeLinear_/inception4b/branch2/branch2.0/Relu_output_0_1" -> "177 DequantizeLinear_/inception4b/branch2/branch2.0/Relu_output_0_1" [label="(1, 112, 14, 14)", style=dashed]; -"177 DequantizeLinear_/inception4b/branch2/branch2.0/Relu_output_0_1" -> "180 /inception4b/branch2/branch2.1/conv/Conv" [label="(1, 112, 14, 14)", style=solid]; -"178 QuantizeLinear_onnx^^Conv_636_1" -> "179 DequantizeLinear_onnx^^Conv_636_1" [label="(224, 112, 3, 3)", style=dashed]; -"179 DequantizeLinear_onnx^^Conv_636_1" -> "180 /inception4b/branch2/branch2.1/conv/Conv" [label="(224, 112, 3, 3)", style=solid]; -"180 /inception4b/branch2/branch2.1/conv/Conv" -> "181 /inception4b/branch2/branch2.1/Relu" [label="(1, 224, 14, 14)", style=solid]; -"181 /inception4b/branch2/branch2.1/Relu" -> "197 QuantizeLinear_/inception4b/branch2/branch2.1/Relu_output_0_1" [label="(1, 224, 14, 14)", style=solid]; -"182 QuantizeLinear_onnx^^Conv_639_1" -> "183 DequantizeLinear_onnx^^Conv_639_1" [label="(24, 512, 1, 1)", style=dashed]; -"183 DequantizeLinear_onnx^^Conv_639_1" -> "184 /inception4b/branch3/branch3.0/conv/Conv" [label="(24, 512, 1, 1)", style=solid]; -"184 /inception4b/branch3/branch3.0/conv/Conv" -> "185 /inception4b/branch3/branch3.0/Relu" [label="(1, 24, 14, 14)", style=solid]; -"185 /inception4b/branch3/branch3.0/Relu" -> "186 QuantizeLinear_/inception4b/branch3/branch3.0/Relu_output_0_1" [label="(1, 24, 14, 14)", style=solid]; -"186 QuantizeLinear_/inception4b/branch3/branch3.0/Relu_output_0_1" -> "187 DequantizeLinear_/inception4b/branch3/branch3.0/Relu_output_0_1" [label="(1, 24, 14, 14)", style=dashed]; -"187 DequantizeLinear_/inception4b/branch3/branch3.0/Relu_output_0_1" -> "190 /inception4b/branch3/branch3.1/conv/Conv" [label="(1, 24, 14, 14)", style=solid]; -"188 QuantizeLinear_onnx^^Conv_642_1" -> "189 DequantizeLinear_onnx^^Conv_642_1" [label="(64, 24, 3, 3)", style=dashed]; -"189 DequantizeLinear_onnx^^Conv_642_1" -> "190 /inception4b/branch3/branch3.1/conv/Conv" [label="(64, 24, 3, 3)", style=solid]; -"190 /inception4b/branch3/branch3.1/conv/Conv" -> "191 /inception4b/branch3/branch3.1/Relu" [label="(1, 64, 14, 14)", style=solid]; -"191 /inception4b/branch3/branch3.1/Relu" -> "199 QuantizeLinear_/inception4b/branch3/branch3.1/Relu_output_0_1" [label="(1, 64, 14, 14)", style=solid]; -"192 /inception4b/branch4/branch4.0/MaxPool" -> "195 /inception4b/branch4/branch4.1/conv/Conv" [label="(1, 512, 14, 14)", style=solid]; -"193 QuantizeLinear_onnx^^Conv_645_1" -> "194 DequantizeLinear_onnx^^Conv_645_1" [label="(64, 512, 1, 1)", style=dashed]; -"194 DequantizeLinear_onnx^^Conv_645_1" -> "195 /inception4b/branch4/branch4.1/conv/Conv" [label="(64, 512, 1, 1)", style=solid]; -"195 /inception4b/branch4/branch4.1/conv/Conv" -> "196 /inception4b/branch4/branch4.1/Relu" [label="(1, 64, 14, 14)", style=solid]; -"196 /inception4b/branch4/branch4.1/Relu" -> "201 QuantizeLinear_/inception4b/branch4/branch4.1/Relu_output_0_1" [label="(1, 64, 14, 14)", style=solid]; -"197 QuantizeLinear_/inception4b/branch2/branch2.1/Relu_output_0_1" -> "198 DequantizeLinear_/inception4b/branch2/branch2.1/Relu_output_0_1" [label="(1, 224, 14, 14)", style=dashed]; -"198 DequantizeLinear_/inception4b/branch2/branch2.1/Relu_output_0_1" -> "205 /inception4b/Concat" [label="(1, 224, 14, 14)", style=solid]; -"199 QuantizeLinear_/inception4b/branch3/branch3.1/Relu_output_0_1" -> "200 DequantizeLinear_/inception4b/branch3/branch3.1/Relu_output_0_1" [label="(1, 64, 14, 14)", style=dashed]; -"200 DequantizeLinear_/inception4b/branch3/branch3.1/Relu_output_0_1" -> "205 /inception4b/Concat" [label="(1, 64, 14, 14)", style=solid]; -"201 QuantizeLinear_/inception4b/branch4/branch4.1/Relu_output_0_1" -> "202 DequantizeLinear_/inception4b/branch4/branch4.1/Relu_output_0_1" [label="(1, 64, 14, 14)", style=dashed]; -"202 DequantizeLinear_/inception4b/branch4/branch4.1/Relu_output_0_1" -> "205 /inception4b/Concat" [label="(1, 64, 14, 14)", style=solid]; -"203 QuantizeLinear_/inception4b/branch1/Relu_output_0_1" -> "204 DequantizeLinear_/inception4b/branch1/Relu_output_0_1" [label="(1, 160, 14, 14)", style=dashed]; -"204 DequantizeLinear_/inception4b/branch1/Relu_output_0_1" -> "205 /inception4b/Concat" [label="(1, 160, 14, 14)", style=solid]; -"205 /inception4b/Concat" -> "208 /inception4c/branch1/conv/Conv" [label="(1, 512, 14, 14)", style=solid]; -"205 /inception4b/Concat" -> "212 /inception4c/branch2/branch2.0/conv/Conv" [label="(1, 512, 14, 14)", style=solid]; -"205 /inception4b/Concat" -> "222 /inception4c/branch3/branch3.0/conv/Conv" [label="(1, 512, 14, 14)", style=solid]; -"205 /inception4b/Concat" -> "230 /inception4c/branch4/branch4.0/MaxPool" [label="(1, 512, 14, 14)", style=solid]; -"206 QuantizeLinear_onnx^^Conv_648_1" -> "207 DequantizeLinear_onnx^^Conv_648_1" [label="(128, 512, 1, 1)", style=dashed]; -"207 DequantizeLinear_onnx^^Conv_648_1" -> "208 /inception4c/branch1/conv/Conv" [label="(128, 512, 1, 1)", style=solid]; -"208 /inception4c/branch1/conv/Conv" -> "209 /inception4c/branch1/Relu" [label="(1, 128, 14, 14)", style=solid]; -"209 /inception4c/branch1/Relu" -> "237 QuantizeLinear_/inception4c/branch1/Relu_output_0_1" [label="(1, 128, 14, 14)", style=solid]; -"210 QuantizeLinear_onnx^^Conv_651_1" -> "211 DequantizeLinear_onnx^^Conv_651_1" [label="(128, 512, 1, 1)", style=dashed]; -"211 DequantizeLinear_onnx^^Conv_651_1" -> "212 /inception4c/branch2/branch2.0/conv/Conv" [label="(128, 512, 1, 1)", style=solid]; -"212 /inception4c/branch2/branch2.0/conv/Conv" -> "213 /inception4c/branch2/branch2.0/Relu" [label="(1, 128, 14, 14)", style=solid]; -"213 /inception4c/branch2/branch2.0/Relu" -> "214 QuantizeLinear_/inception4c/branch2/branch2.0/Relu_output_0_1" [label="(1, 128, 14, 14)", style=solid]; -"214 QuantizeLinear_/inception4c/branch2/branch2.0/Relu_output_0_1" -> "215 DequantizeLinear_/inception4c/branch2/branch2.0/Relu_output_0_1" [label="(1, 128, 14, 14)", style=dashed]; -"215 DequantizeLinear_/inception4c/branch2/branch2.0/Relu_output_0_1" -> "218 /inception4c/branch2/branch2.1/conv/Conv" [label="(1, 128, 14, 14)", style=solid]; -"216 QuantizeLinear_onnx^^Conv_654_1" -> "217 DequantizeLinear_onnx^^Conv_654_1" [label="(256, 128, 3, 3)", style=dashed]; -"217 DequantizeLinear_onnx^^Conv_654_1" -> "218 /inception4c/branch2/branch2.1/conv/Conv" [label="(256, 128, 3, 3)", style=solid]; -"218 /inception4c/branch2/branch2.1/conv/Conv" -> "219 /inception4c/branch2/branch2.1/Relu" [label="(1, 256, 14, 14)", style=solid]; -"219 /inception4c/branch2/branch2.1/Relu" -> "239 QuantizeLinear_/inception4c/branch2/branch2.1/Relu_output_0_1" [label="(1, 256, 14, 14)", style=solid]; -"220 QuantizeLinear_onnx^^Conv_657_1" -> "221 DequantizeLinear_onnx^^Conv_657_1" [label="(24, 512, 1, 1)", style=dashed]; -"221 DequantizeLinear_onnx^^Conv_657_1" -> "222 /inception4c/branch3/branch3.0/conv/Conv" [label="(24, 512, 1, 1)", style=solid]; -"222 /inception4c/branch3/branch3.0/conv/Conv" -> "223 /inception4c/branch3/branch3.0/Relu" [label="(1, 24, 14, 14)", style=solid]; -"223 /inception4c/branch3/branch3.0/Relu" -> "224 QuantizeLinear_/inception4c/branch3/branch3.0/Relu_output_0_1" [label="(1, 24, 14, 14)", style=solid]; -"224 QuantizeLinear_/inception4c/branch3/branch3.0/Relu_output_0_1" -> "225 DequantizeLinear_/inception4c/branch3/branch3.0/Relu_output_0_1" [label="(1, 24, 14, 14)", style=dashed]; -"225 DequantizeLinear_/inception4c/branch3/branch3.0/Relu_output_0_1" -> "228 /inception4c/branch3/branch3.1/conv/Conv" [label="(1, 24, 14, 14)", style=solid]; -"226 QuantizeLinear_onnx^^Conv_660_1" -> "227 DequantizeLinear_onnx^^Conv_660_1" [label="(64, 24, 3, 3)", style=dashed]; -"227 DequantizeLinear_onnx^^Conv_660_1" -> "228 /inception4c/branch3/branch3.1/conv/Conv" [label="(64, 24, 3, 3)", style=solid]; -"228 /inception4c/branch3/branch3.1/conv/Conv" -> "229 /inception4c/branch3/branch3.1/Relu" [label="(1, 64, 14, 14)", style=solid]; -"229 /inception4c/branch3/branch3.1/Relu" -> "241 QuantizeLinear_/inception4c/branch3/branch3.1/Relu_output_0_1" [label="(1, 64, 14, 14)", style=solid]; -"230 /inception4c/branch4/branch4.0/MaxPool" -> "233 /inception4c/branch4/branch4.1/conv/Conv" [label="(1, 512, 14, 14)", style=solid]; -"231 QuantizeLinear_onnx^^Conv_663_1" -> "232 DequantizeLinear_onnx^^Conv_663_1" [label="(64, 512, 1, 1)", style=dashed]; -"232 DequantizeLinear_onnx^^Conv_663_1" -> "233 /inception4c/branch4/branch4.1/conv/Conv" [label="(64, 512, 1, 1)", style=solid]; -"233 /inception4c/branch4/branch4.1/conv/Conv" -> "234 /inception4c/branch4/branch4.1/Relu" [label="(1, 64, 14, 14)", style=solid]; -"234 /inception4c/branch4/branch4.1/Relu" -> "235 QuantizeLinear_/inception4c/branch4/branch4.1/Relu_output_0_1" [label="(1, 64, 14, 14)", style=solid]; -"235 QuantizeLinear_/inception4c/branch4/branch4.1/Relu_output_0_1" -> "236 DequantizeLinear_/inception4c/branch4/branch4.1/Relu_output_0_1" [label="(1, 64, 14, 14)", style=dashed]; -"236 DequantizeLinear_/inception4c/branch4/branch4.1/Relu_output_0_1" -> "243 /inception4c/Concat" [label="(1, 64, 14, 14)", style=solid]; -"237 QuantizeLinear_/inception4c/branch1/Relu_output_0_1" -> "238 DequantizeLinear_/inception4c/branch1/Relu_output_0_1" [label="(1, 128, 14, 14)", style=dashed]; -"238 DequantizeLinear_/inception4c/branch1/Relu_output_0_1" -> "243 /inception4c/Concat" [label="(1, 128, 14, 14)", style=solid]; -"239 QuantizeLinear_/inception4c/branch2/branch2.1/Relu_output_0_1" -> "240 DequantizeLinear_/inception4c/branch2/branch2.1/Relu_output_0_1" [label="(1, 256, 14, 14)", style=dashed]; -"240 DequantizeLinear_/inception4c/branch2/branch2.1/Relu_output_0_1" -> "243 /inception4c/Concat" [label="(1, 256, 14, 14)", style=solid]; -"241 QuantizeLinear_/inception4c/branch3/branch3.1/Relu_output_0_1" -> "242 DequantizeLinear_/inception4c/branch3/branch3.1/Relu_output_0_1" [label="(1, 64, 14, 14)", style=dashed]; -"242 DequantizeLinear_/inception4c/branch3/branch3.1/Relu_output_0_1" -> "243 /inception4c/Concat" [label="(1, 64, 14, 14)", style=solid]; -"243 /inception4c/Concat" -> "246 /inception4d/branch1/conv/Conv" [label="(1, 512, 14, 14)", style=solid]; -"243 /inception4c/Concat" -> "250 /inception4d/branch2/branch2.0/conv/Conv" [label="(1, 512, 14, 14)", style=solid]; -"243 /inception4c/Concat" -> "260 /inception4d/branch3/branch3.0/conv/Conv" [label="(1, 512, 14, 14)", style=solid]; -"243 /inception4c/Concat" -> "268 /inception4d/branch4/branch4.0/MaxPool" [label="(1, 512, 14, 14)", style=solid]; -"244 QuantizeLinear_onnx^^Conv_666_1" -> "245 DequantizeLinear_onnx^^Conv_666_1" [label="(112, 512, 1, 1)", style=dashed]; -"245 DequantizeLinear_onnx^^Conv_666_1" -> "246 /inception4d/branch1/conv/Conv" [label="(112, 512, 1, 1)", style=solid]; -"246 /inception4d/branch1/conv/Conv" -> "247 /inception4d/branch1/Relu" [label="(1, 112, 14, 14)", style=solid]; -"247 /inception4d/branch1/Relu" -> "279 QuantizeLinear_/inception4d/branch1/Relu_output_0_1" [label="(1, 112, 14, 14)", style=solid]; -"248 QuantizeLinear_onnx^^Conv_669_1" -> "249 DequantizeLinear_onnx^^Conv_669_1" [label="(144, 512, 1, 1)", style=dashed]; -"249 DequantizeLinear_onnx^^Conv_669_1" -> "250 /inception4d/branch2/branch2.0/conv/Conv" [label="(144, 512, 1, 1)", style=solid]; -"250 /inception4d/branch2/branch2.0/conv/Conv" -> "251 /inception4d/branch2/branch2.0/Relu" [label="(1, 144, 14, 14)", style=solid]; -"251 /inception4d/branch2/branch2.0/Relu" -> "252 QuantizeLinear_/inception4d/branch2/branch2.0/Relu_output_0_1" [label="(1, 144, 14, 14)", style=solid]; -"252 QuantizeLinear_/inception4d/branch2/branch2.0/Relu_output_0_1" -> "253 DequantizeLinear_/inception4d/branch2/branch2.0/Relu_output_0_1" [label="(1, 144, 14, 14)", style=dashed]; -"253 DequantizeLinear_/inception4d/branch2/branch2.0/Relu_output_0_1" -> "256 /inception4d/branch2/branch2.1/conv/Conv" [label="(1, 144, 14, 14)", style=solid]; -"254 QuantizeLinear_onnx^^Conv_672_1" -> "255 DequantizeLinear_onnx^^Conv_672_1" [label="(288, 144, 3, 3)", style=dashed]; -"255 DequantizeLinear_onnx^^Conv_672_1" -> "256 /inception4d/branch2/branch2.1/conv/Conv" [label="(288, 144, 3, 3)", style=solid]; -"256 /inception4d/branch2/branch2.1/conv/Conv" -> "257 /inception4d/branch2/branch2.1/Relu" [label="(1, 288, 14, 14)", style=solid]; -"257 /inception4d/branch2/branch2.1/Relu" -> "273 QuantizeLinear_/inception4d/branch2/branch2.1/Relu_output_0_1" [label="(1, 288, 14, 14)", style=solid]; -"258 QuantizeLinear_onnx^^Conv_675_1" -> "259 DequantizeLinear_onnx^^Conv_675_1" [label="(32, 512, 1, 1)", style=dashed]; -"259 DequantizeLinear_onnx^^Conv_675_1" -> "260 /inception4d/branch3/branch3.0/conv/Conv" [label="(32, 512, 1, 1)", style=solid]; -"260 /inception4d/branch3/branch3.0/conv/Conv" -> "261 /inception4d/branch3/branch3.0/Relu" [label="(1, 32, 14, 14)", style=solid]; -"261 /inception4d/branch3/branch3.0/Relu" -> "262 QuantizeLinear_/inception4d/branch3/branch3.0/Relu_output_0_1" [label="(1, 32, 14, 14)", style=solid]; -"262 QuantizeLinear_/inception4d/branch3/branch3.0/Relu_output_0_1" -> "263 DequantizeLinear_/inception4d/branch3/branch3.0/Relu_output_0_1" [label="(1, 32, 14, 14)", style=dashed]; -"263 DequantizeLinear_/inception4d/branch3/branch3.0/Relu_output_0_1" -> "266 /inception4d/branch3/branch3.1/conv/Conv" [label="(1, 32, 14, 14)", style=solid]; -"264 QuantizeLinear_onnx^^Conv_678_1" -> "265 DequantizeLinear_onnx^^Conv_678_1" [label="(64, 32, 3, 3)", style=dashed]; -"265 DequantizeLinear_onnx^^Conv_678_1" -> "266 /inception4d/branch3/branch3.1/conv/Conv" [label="(64, 32, 3, 3)", style=solid]; -"266 /inception4d/branch3/branch3.1/conv/Conv" -> "267 /inception4d/branch3/branch3.1/Relu" [label="(1, 64, 14, 14)", style=solid]; -"267 /inception4d/branch3/branch3.1/Relu" -> "275 QuantizeLinear_/inception4d/branch3/branch3.1/Relu_output_0_1" [label="(1, 64, 14, 14)", style=solid]; -"268 /inception4d/branch4/branch4.0/MaxPool" -> "271 /inception4d/branch4/branch4.1/conv/Conv" [label="(1, 512, 14, 14)", style=solid]; -"269 QuantizeLinear_onnx^^Conv_681_1" -> "270 DequantizeLinear_onnx^^Conv_681_1" [label="(64, 512, 1, 1)", style=dashed]; -"270 DequantizeLinear_onnx^^Conv_681_1" -> "271 /inception4d/branch4/branch4.1/conv/Conv" [label="(64, 512, 1, 1)", style=solid]; -"271 /inception4d/branch4/branch4.1/conv/Conv" -> "272 /inception4d/branch4/branch4.1/Relu" [label="(1, 64, 14, 14)", style=solid]; -"272 /inception4d/branch4/branch4.1/Relu" -> "277 QuantizeLinear_/inception4d/branch4/branch4.1/Relu_output_0_1" [label="(1, 64, 14, 14)", style=solid]; -"273 QuantizeLinear_/inception4d/branch2/branch2.1/Relu_output_0_1" -> "274 DequantizeLinear_/inception4d/branch2/branch2.1/Relu_output_0_1" [label="(1, 288, 14, 14)", style=dashed]; -"274 DequantizeLinear_/inception4d/branch2/branch2.1/Relu_output_0_1" -> "281 /inception4d/Concat" [label="(1, 288, 14, 14)", style=solid]; -"275 QuantizeLinear_/inception4d/branch3/branch3.1/Relu_output_0_1" -> "276 DequantizeLinear_/inception4d/branch3/branch3.1/Relu_output_0_1" [label="(1, 64, 14, 14)", style=dashed]; -"276 DequantizeLinear_/inception4d/branch3/branch3.1/Relu_output_0_1" -> "281 /inception4d/Concat" [label="(1, 64, 14, 14)", style=solid]; -"277 QuantizeLinear_/inception4d/branch4/branch4.1/Relu_output_0_1" -> "278 DequantizeLinear_/inception4d/branch4/branch4.1/Relu_output_0_1" [label="(1, 64, 14, 14)", style=dashed]; -"278 DequantizeLinear_/inception4d/branch4/branch4.1/Relu_output_0_1" -> "281 /inception4d/Concat" [label="(1, 64, 14, 14)", style=solid]; -"279 QuantizeLinear_/inception4d/branch1/Relu_output_0_1" -> "280 DequantizeLinear_/inception4d/branch1/Relu_output_0_1" [label="(1, 112, 14, 14)", style=dashed]; -"280 DequantizeLinear_/inception4d/branch1/Relu_output_0_1" -> "281 /inception4d/Concat" [label="(1, 112, 14, 14)", style=solid]; -"281 /inception4d/Concat" -> "284 /inception4e/branch1/conv/Conv" [label="(1, 528, 14, 14)", style=solid]; -"281 /inception4d/Concat" -> "288 /inception4e/branch2/branch2.0/conv/Conv" [label="(1, 528, 14, 14)", style=solid]; -"281 /inception4d/Concat" -> "298 /inception4e/branch3/branch3.0/conv/Conv" [label="(1, 528, 14, 14)", style=solid]; -"281 /inception4d/Concat" -> "306 /inception4e/branch4/branch4.0/MaxPool" [label="(1, 528, 14, 14)", style=solid]; -"282 QuantizeLinear_onnx^^Conv_684_1" -> "283 DequantizeLinear_onnx^^Conv_684_1" [label="(256, 528, 1, 1)", style=dashed]; -"283 DequantizeLinear_onnx^^Conv_684_1" -> "284 /inception4e/branch1/conv/Conv" [label="(256, 528, 1, 1)", style=solid]; -"284 /inception4e/branch1/conv/Conv" -> "285 /inception4e/branch1/Relu" [label="(1, 256, 14, 14)", style=solid]; -"285 /inception4e/branch1/Relu" -> "317 QuantizeLinear_/inception4e/branch1/Relu_output_0_1" [label="(1, 256, 14, 14)", style=solid]; -"286 QuantizeLinear_onnx^^Conv_687_1" -> "287 DequantizeLinear_onnx^^Conv_687_1" [label="(160, 528, 1, 1)", style=dashed]; -"287 DequantizeLinear_onnx^^Conv_687_1" -> "288 /inception4e/branch2/branch2.0/conv/Conv" [label="(160, 528, 1, 1)", style=solid]; -"288 /inception4e/branch2/branch2.0/conv/Conv" -> "289 /inception4e/branch2/branch2.0/Relu" [label="(1, 160, 14, 14)", style=solid]; -"289 /inception4e/branch2/branch2.0/Relu" -> "290 QuantizeLinear_/inception4e/branch2/branch2.0/Relu_output_0_1" [label="(1, 160, 14, 14)", style=solid]; -"290 QuantizeLinear_/inception4e/branch2/branch2.0/Relu_output_0_1" -> "291 DequantizeLinear_/inception4e/branch2/branch2.0/Relu_output_0_1" [label="(1, 160, 14, 14)", style=dashed]; -"291 DequantizeLinear_/inception4e/branch2/branch2.0/Relu_output_0_1" -> "294 /inception4e/branch2/branch2.1/conv/Conv" [label="(1, 160, 14, 14)", style=solid]; -"292 QuantizeLinear_onnx^^Conv_690_1" -> "293 DequantizeLinear_onnx^^Conv_690_1" [label="(320, 160, 3, 3)", style=dashed]; -"293 DequantizeLinear_onnx^^Conv_690_1" -> "294 /inception4e/branch2/branch2.1/conv/Conv" [label="(320, 160, 3, 3)", style=solid]; -"294 /inception4e/branch2/branch2.1/conv/Conv" -> "295 /inception4e/branch2/branch2.1/Relu" [label="(1, 320, 14, 14)", style=solid]; -"295 /inception4e/branch2/branch2.1/Relu" -> "311 QuantizeLinear_/inception4e/branch2/branch2.1/Relu_output_0_1" [label="(1, 320, 14, 14)", style=solid]; -"296 QuantizeLinear_onnx^^Conv_693_1" -> "297 DequantizeLinear_onnx^^Conv_693_1" [label="(32, 528, 1, 1)", style=dashed]; -"297 DequantizeLinear_onnx^^Conv_693_1" -> "298 /inception4e/branch3/branch3.0/conv/Conv" [label="(32, 528, 1, 1)", style=solid]; -"298 /inception4e/branch3/branch3.0/conv/Conv" -> "299 /inception4e/branch3/branch3.0/Relu" [label="(1, 32, 14, 14)", style=solid]; -"299 /inception4e/branch3/branch3.0/Relu" -> "300 QuantizeLinear_/inception4e/branch3/branch3.0/Relu_output_0_1" [label="(1, 32, 14, 14)", style=solid]; -"300 QuantizeLinear_/inception4e/branch3/branch3.0/Relu_output_0_1" -> "301 DequantizeLinear_/inception4e/branch3/branch3.0/Relu_output_0_1" [label="(1, 32, 14, 14)", style=dashed]; -"301 DequantizeLinear_/inception4e/branch3/branch3.0/Relu_output_0_1" -> "304 /inception4e/branch3/branch3.1/conv/Conv" [label="(1, 32, 14, 14)", style=solid]; -"302 QuantizeLinear_onnx^^Conv_696_1" -> "303 DequantizeLinear_onnx^^Conv_696_1" [label="(128, 32, 3, 3)", style=dashed]; -"303 DequantizeLinear_onnx^^Conv_696_1" -> "304 /inception4e/branch3/branch3.1/conv/Conv" [label="(128, 32, 3, 3)", style=solid]; -"304 /inception4e/branch3/branch3.1/conv/Conv" -> "305 /inception4e/branch3/branch3.1/Relu" [label="(1, 128, 14, 14)", style=solid]; -"305 /inception4e/branch3/branch3.1/Relu" -> "313 QuantizeLinear_/inception4e/branch3/branch3.1/Relu_output_0_1" [label="(1, 128, 14, 14)", style=solid]; -"306 /inception4e/branch4/branch4.0/MaxPool" -> "309 /inception4e/branch4/branch4.1/conv/Conv" [label="(1, 528, 14, 14)", style=solid]; -"307 QuantizeLinear_onnx^^Conv_699_1" -> "308 DequantizeLinear_onnx^^Conv_699_1" [label="(128, 528, 1, 1)", style=dashed]; -"308 DequantizeLinear_onnx^^Conv_699_1" -> "309 /inception4e/branch4/branch4.1/conv/Conv" [label="(128, 528, 1, 1)", style=solid]; -"309 /inception4e/branch4/branch4.1/conv/Conv" -> "310 /inception4e/branch4/branch4.1/Relu" [label="(1, 128, 14, 14)", style=solid]; -"310 /inception4e/branch4/branch4.1/Relu" -> "315 QuantizeLinear_/inception4e/branch4/branch4.1/Relu_output_0_1" [label="(1, 128, 14, 14)", style=solid]; -"311 QuantizeLinear_/inception4e/branch2/branch2.1/Relu_output_0_1" -> "312 DequantizeLinear_/inception4e/branch2/branch2.1/Relu_output_0_1" [label="(1, 320, 14, 14)", style=dashed]; -"312 DequantizeLinear_/inception4e/branch2/branch2.1/Relu_output_0_1" -> "319 /inception4e/Concat" [label="(1, 320, 14, 14)", style=solid]; -"313 QuantizeLinear_/inception4e/branch3/branch3.1/Relu_output_0_1" -> "314 DequantizeLinear_/inception4e/branch3/branch3.1/Relu_output_0_1" [label="(1, 128, 14, 14)", style=dashed]; -"314 DequantizeLinear_/inception4e/branch3/branch3.1/Relu_output_0_1" -> "319 /inception4e/Concat" [label="(1, 128, 14, 14)", style=solid]; -"315 QuantizeLinear_/inception4e/branch4/branch4.1/Relu_output_0_1" -> "316 DequantizeLinear_/inception4e/branch4/branch4.1/Relu_output_0_1" [label="(1, 128, 14, 14)", style=dashed]; -"316 DequantizeLinear_/inception4e/branch4/branch4.1/Relu_output_0_1" -> "319 /inception4e/Concat" [label="(1, 128, 14, 14)", style=solid]; -"317 QuantizeLinear_/inception4e/branch1/Relu_output_0_1" -> "318 DequantizeLinear_/inception4e/branch1/Relu_output_0_1" [label="(1, 256, 14, 14)", style=dashed]; -"318 DequantizeLinear_/inception4e/branch1/Relu_output_0_1" -> "319 /inception4e/Concat" [label="(1, 256, 14, 14)", style=solid]; -"319 /inception4e/Concat" -> "320 /maxpool4/MaxPool" [label="(1, 832, 14, 14)", style=solid]; -"320 /maxpool4/MaxPool" -> "323 /inception5a/branch1/conv/Conv" [label="(1, 832, 7, 7)", style=solid]; -"320 /maxpool4/MaxPool" -> "327 /inception5a/branch2/branch2.0/conv/Conv" [label="(1, 832, 7, 7)", style=solid]; -"320 /maxpool4/MaxPool" -> "337 /inception5a/branch3/branch3.0/conv/Conv" [label="(1, 832, 7, 7)", style=solid]; -"320 /maxpool4/MaxPool" -> "345 /inception5a/branch4/branch4.0/MaxPool" [label="(1, 832, 7, 7)", style=solid]; -"321 QuantizeLinear_onnx^^Conv_702_1" -> "322 DequantizeLinear_onnx^^Conv_702_1" [label="(256, 832, 1, 1)", style=dashed]; -"322 DequantizeLinear_onnx^^Conv_702_1" -> "323 /inception5a/branch1/conv/Conv" [label="(256, 832, 1, 1)", style=solid]; -"323 /inception5a/branch1/conv/Conv" -> "324 /inception5a/branch1/Relu" [label="(1, 256, 7, 7)", style=solid]; -"324 /inception5a/branch1/Relu" -> "356 QuantizeLinear_/inception5a/branch1/Relu_output_0_1" [label="(1, 256, 7, 7)", style=solid]; -"325 QuantizeLinear_onnx^^Conv_705_1" -> "326 DequantizeLinear_onnx^^Conv_705_1" [label="(160, 832, 1, 1)", style=dashed]; -"326 DequantizeLinear_onnx^^Conv_705_1" -> "327 /inception5a/branch2/branch2.0/conv/Conv" [label="(160, 832, 1, 1)", style=solid]; -"327 /inception5a/branch2/branch2.0/conv/Conv" -> "328 /inception5a/branch2/branch2.0/Relu" [label="(1, 160, 7, 7)", style=solid]; -"328 /inception5a/branch2/branch2.0/Relu" -> "329 QuantizeLinear_/inception5a/branch2/branch2.0/Relu_output_0_1" [label="(1, 160, 7, 7)", style=solid]; -"329 QuantizeLinear_/inception5a/branch2/branch2.0/Relu_output_0_1" -> "330 DequantizeLinear_/inception5a/branch2/branch2.0/Relu_output_0_1" [label="(1, 160, 7, 7)", style=dashed]; -"330 DequantizeLinear_/inception5a/branch2/branch2.0/Relu_output_0_1" -> "333 /inception5a/branch2/branch2.1/conv/Conv" [label="(1, 160, 7, 7)", style=solid]; -"331 QuantizeLinear_onnx^^Conv_708_1" -> "332 DequantizeLinear_onnx^^Conv_708_1" [label="(320, 160, 3, 3)", style=dashed]; -"332 DequantizeLinear_onnx^^Conv_708_1" -> "333 /inception5a/branch2/branch2.1/conv/Conv" [label="(320, 160, 3, 3)", style=solid]; -"333 /inception5a/branch2/branch2.1/conv/Conv" -> "334 /inception5a/branch2/branch2.1/Relu" [label="(1, 320, 7, 7)", style=solid]; -"334 /inception5a/branch2/branch2.1/Relu" -> "350 QuantizeLinear_/inception5a/branch2/branch2.1/Relu_output_0_1" [label="(1, 320, 7, 7)", style=solid]; -"335 QuantizeLinear_onnx^^Conv_711_1" -> "336 DequantizeLinear_onnx^^Conv_711_1" [label="(32, 832, 1, 1)", style=dashed]; -"336 DequantizeLinear_onnx^^Conv_711_1" -> "337 /inception5a/branch3/branch3.0/conv/Conv" [label="(32, 832, 1, 1)", style=solid]; -"337 /inception5a/branch3/branch3.0/conv/Conv" -> "338 /inception5a/branch3/branch3.0/Relu" [label="(1, 32, 7, 7)", style=solid]; -"338 /inception5a/branch3/branch3.0/Relu" -> "339 QuantizeLinear_/inception5a/branch3/branch3.0/Relu_output_0_1" [label="(1, 32, 7, 7)", style=solid]; -"339 QuantizeLinear_/inception5a/branch3/branch3.0/Relu_output_0_1" -> "340 DequantizeLinear_/inception5a/branch3/branch3.0/Relu_output_0_1" [label="(1, 32, 7, 7)", style=dashed]; -"340 DequantizeLinear_/inception5a/branch3/branch3.0/Relu_output_0_1" -> "343 /inception5a/branch3/branch3.1/conv/Conv" [label="(1, 32, 7, 7)", style=solid]; -"341 QuantizeLinear_onnx^^Conv_714_1" -> "342 DequantizeLinear_onnx^^Conv_714_1" [label="(128, 32, 3, 3)", style=dashed]; -"342 DequantizeLinear_onnx^^Conv_714_1" -> "343 /inception5a/branch3/branch3.1/conv/Conv" [label="(128, 32, 3, 3)", style=solid]; -"343 /inception5a/branch3/branch3.1/conv/Conv" -> "344 /inception5a/branch3/branch3.1/Relu" [label="(1, 128, 7, 7)", style=solid]; -"344 /inception5a/branch3/branch3.1/Relu" -> "352 QuantizeLinear_/inception5a/branch3/branch3.1/Relu_output_0_1" [label="(1, 128, 7, 7)", style=solid]; -"345 /inception5a/branch4/branch4.0/MaxPool" -> "348 /inception5a/branch4/branch4.1/conv/Conv" [label="(1, 832, 7, 7)", style=solid]; -"346 QuantizeLinear_onnx^^Conv_717_1" -> "347 DequantizeLinear_onnx^^Conv_717_1" [label="(128, 832, 1, 1)", style=dashed]; -"347 DequantizeLinear_onnx^^Conv_717_1" -> "348 /inception5a/branch4/branch4.1/conv/Conv" [label="(128, 832, 1, 1)", style=solid]; -"348 /inception5a/branch4/branch4.1/conv/Conv" -> "349 /inception5a/branch4/branch4.1/Relu" [label="(1, 128, 7, 7)", style=solid]; -"349 /inception5a/branch4/branch4.1/Relu" -> "354 QuantizeLinear_/inception5a/branch4/branch4.1/Relu_output_0_1" [label="(1, 128, 7, 7)", style=solid]; -"350 QuantizeLinear_/inception5a/branch2/branch2.1/Relu_output_0_1" -> "351 DequantizeLinear_/inception5a/branch2/branch2.1/Relu_output_0_1" [label="(1, 320, 7, 7)", style=dashed]; -"351 DequantizeLinear_/inception5a/branch2/branch2.1/Relu_output_0_1" -> "358 /inception5a/Concat" [label="(1, 320, 7, 7)", style=solid]; -"352 QuantizeLinear_/inception5a/branch3/branch3.1/Relu_output_0_1" -> "353 DequantizeLinear_/inception5a/branch3/branch3.1/Relu_output_0_1" [label="(1, 128, 7, 7)", style=dashed]; -"353 DequantizeLinear_/inception5a/branch3/branch3.1/Relu_output_0_1" -> "358 /inception5a/Concat" [label="(1, 128, 7, 7)", style=solid]; -"354 QuantizeLinear_/inception5a/branch4/branch4.1/Relu_output_0_1" -> "355 DequantizeLinear_/inception5a/branch4/branch4.1/Relu_output_0_1" [label="(1, 128, 7, 7)", style=dashed]; -"355 DequantizeLinear_/inception5a/branch4/branch4.1/Relu_output_0_1" -> "358 /inception5a/Concat" [label="(1, 128, 7, 7)", style=solid]; -"356 QuantizeLinear_/inception5a/branch1/Relu_output_0_1" -> "357 DequantizeLinear_/inception5a/branch1/Relu_output_0_1" [label="(1, 256, 7, 7)", style=dashed]; -"357 DequantizeLinear_/inception5a/branch1/Relu_output_0_1" -> "358 /inception5a/Concat" [label="(1, 256, 7, 7)", style=solid]; -"358 /inception5a/Concat" -> "361 /inception5b/branch1/conv/Conv" [label="(1, 832, 7, 7)", style=solid]; -"358 /inception5a/Concat" -> "365 /inception5b/branch2/branch2.0/conv/Conv" [label="(1, 832, 7, 7)", style=solid]; -"358 /inception5a/Concat" -> "375 /inception5b/branch3/branch3.0/conv/Conv" [label="(1, 832, 7, 7)", style=solid]; -"358 /inception5a/Concat" -> "383 /inception5b/branch4/branch4.0/MaxPool" [label="(1, 832, 7, 7)", style=solid]; -"359 QuantizeLinear_onnx^^Conv_720_1" -> "360 DequantizeLinear_onnx^^Conv_720_1" [label="(384, 832, 1, 1)", style=dashed]; -"360 DequantizeLinear_onnx^^Conv_720_1" -> "361 /inception5b/branch1/conv/Conv" [label="(384, 832, 1, 1)", style=solid]; -"361 /inception5b/branch1/conv/Conv" -> "362 /inception5b/branch1/Relu" [label="(1, 384, 7, 7)", style=solid]; -"362 /inception5b/branch1/Relu" -> "394 QuantizeLinear_/inception5b/branch1/Relu_output_0_1" [label="(1, 384, 7, 7)", style=solid]; -"363 QuantizeLinear_onnx^^Conv_723_1" -> "364 DequantizeLinear_onnx^^Conv_723_1" [label="(192, 832, 1, 1)", style=dashed]; -"364 DequantizeLinear_onnx^^Conv_723_1" -> "365 /inception5b/branch2/branch2.0/conv/Conv" [label="(192, 832, 1, 1)", style=solid]; -"365 /inception5b/branch2/branch2.0/conv/Conv" -> "366 /inception5b/branch2/branch2.0/Relu" [label="(1, 192, 7, 7)", style=solid]; -"366 /inception5b/branch2/branch2.0/Relu" -> "367 QuantizeLinear_/inception5b/branch2/branch2.0/Relu_output_0_1" [label="(1, 192, 7, 7)", style=solid]; -"367 QuantizeLinear_/inception5b/branch2/branch2.0/Relu_output_0_1" -> "368 DequantizeLinear_/inception5b/branch2/branch2.0/Relu_output_0_1" [label="(1, 192, 7, 7)", style=dashed]; -"368 DequantizeLinear_/inception5b/branch2/branch2.0/Relu_output_0_1" -> "371 /inception5b/branch2/branch2.1/conv/Conv" [label="(1, 192, 7, 7)", style=solid]; -"369 QuantizeLinear_onnx^^Conv_726_1" -> "370 DequantizeLinear_onnx^^Conv_726_1" [label="(384, 192, 3, 3)", style=dashed]; -"370 DequantizeLinear_onnx^^Conv_726_1" -> "371 /inception5b/branch2/branch2.1/conv/Conv" [label="(384, 192, 3, 3)", style=solid]; -"371 /inception5b/branch2/branch2.1/conv/Conv" -> "372 /inception5b/branch2/branch2.1/Relu" [label="(1, 384, 7, 7)", style=solid]; -"372 /inception5b/branch2/branch2.1/Relu" -> "388 QuantizeLinear_/inception5b/branch2/branch2.1/Relu_output_0_1" [label="(1, 384, 7, 7)", style=solid]; -"373 QuantizeLinear_onnx^^Conv_729_1" -> "374 DequantizeLinear_onnx^^Conv_729_1" [label="(48, 832, 1, 1)", style=dashed]; -"374 DequantizeLinear_onnx^^Conv_729_1" -> "375 /inception5b/branch3/branch3.0/conv/Conv" [label="(48, 832, 1, 1)", style=solid]; -"375 /inception5b/branch3/branch3.0/conv/Conv" -> "376 /inception5b/branch3/branch3.0/Relu" [label="(1, 48, 7, 7)", style=solid]; -"376 /inception5b/branch3/branch3.0/Relu" -> "377 QuantizeLinear_/inception5b/branch3/branch3.0/Relu_output_0_1" [label="(1, 48, 7, 7)", style=solid]; -"377 QuantizeLinear_/inception5b/branch3/branch3.0/Relu_output_0_1" -> "378 DequantizeLinear_/inception5b/branch3/branch3.0/Relu_output_0_1" [label="(1, 48, 7, 7)", style=dashed]; -"378 DequantizeLinear_/inception5b/branch3/branch3.0/Relu_output_0_1" -> "381 /inception5b/branch3/branch3.1/conv/Conv" [label="(1, 48, 7, 7)", style=solid]; -"379 QuantizeLinear_onnx^^Conv_732_1" -> "380 DequantizeLinear_onnx^^Conv_732_1" [label="(128, 48, 3, 3)", style=dashed]; -"380 DequantizeLinear_onnx^^Conv_732_1" -> "381 /inception5b/branch3/branch3.1/conv/Conv" [label="(128, 48, 3, 3)", style=solid]; -"381 /inception5b/branch3/branch3.1/conv/Conv" -> "382 /inception5b/branch3/branch3.1/Relu" [label="(1, 128, 7, 7)", style=solid]; -"382 /inception5b/branch3/branch3.1/Relu" -> "390 QuantizeLinear_/inception5b/branch3/branch3.1/Relu_output_0_1" [label="(1, 128, 7, 7)", style=solid]; -"383 /inception5b/branch4/branch4.0/MaxPool" -> "386 /inception5b/branch4/branch4.1/conv/Conv" [label="(1, 832, 7, 7)", style=solid]; -"384 QuantizeLinear_onnx^^Conv_735_1" -> "385 DequantizeLinear_onnx^^Conv_735_1" [label="(128, 832, 1, 1)", style=dashed]; -"385 DequantizeLinear_onnx^^Conv_735_1" -> "386 /inception5b/branch4/branch4.1/conv/Conv" [label="(128, 832, 1, 1)", style=solid]; -"386 /inception5b/branch4/branch4.1/conv/Conv" -> "387 /inception5b/branch4/branch4.1/Relu" [label="(1, 128, 7, 7)", style=solid]; -"387 /inception5b/branch4/branch4.1/Relu" -> "392 QuantizeLinear_/inception5b/branch4/branch4.1/Relu_output_0_1" [label="(1, 128, 7, 7)", style=solid]; -"388 QuantizeLinear_/inception5b/branch2/branch2.1/Relu_output_0_1" -> "389 DequantizeLinear_/inception5b/branch2/branch2.1/Relu_output_0_1" [label="(1, 384, 7, 7)", style=dashed]; -"389 DequantizeLinear_/inception5b/branch2/branch2.1/Relu_output_0_1" -> "396 /inception5b/Concat" [label="(1, 384, 7, 7)", style=solid]; -"390 QuantizeLinear_/inception5b/branch3/branch3.1/Relu_output_0_1" -> "391 DequantizeLinear_/inception5b/branch3/branch3.1/Relu_output_0_1" [label="(1, 128, 7, 7)", style=dashed]; -"391 DequantizeLinear_/inception5b/branch3/branch3.1/Relu_output_0_1" -> "396 /inception5b/Concat" [label="(1, 128, 7, 7)", style=solid]; -"392 QuantizeLinear_/inception5b/branch4/branch4.1/Relu_output_0_1" -> "393 DequantizeLinear_/inception5b/branch4/branch4.1/Relu_output_0_1" [label="(1, 128, 7, 7)", style=dashed]; -"393 DequantizeLinear_/inception5b/branch4/branch4.1/Relu_output_0_1" -> "396 /inception5b/Concat" [label="(1, 128, 7, 7)", style=solid]; -"394 QuantizeLinear_/inception5b/branch1/Relu_output_0_1" -> "395 DequantizeLinear_/inception5b/branch1/Relu_output_0_1" [label="(1, 384, 7, 7)", style=dashed]; -"395 DequantizeLinear_/inception5b/branch1/Relu_output_0_1" -> "396 /inception5b/Concat" [label="(1, 384, 7, 7)", style=solid]; -"396 /inception5b/Concat" -> "397 /avgpool/GlobalAveragePool" [label="(1, 1024, 7, 7)", style=solid]; -"397 /avgpool/GlobalAveragePool" -> "398 QuantizeLinear_/avgpool/GlobalAveragePool_output_0_1" [label="(1, 1024, 1, 1)", style=solid]; -"398 QuantizeLinear_/avgpool/GlobalAveragePool_output_0_1" -> "399 DequantizeLinear_/avgpool/GlobalAveragePool_output_0_1" [label="(1, 1024, 1, 1)", style=dashed]; -"399 DequantizeLinear_/avgpool/GlobalAveragePool_output_0_1" -> "400 /Flatten" [label="(1, 1024, 1, 1)", style=solid]; -"400 /Flatten" -> "403 /fc/Gemm" [label="(1, 1024)", style=solid]; -"401 QuantizeLinear_fc.weight_1" -> "402 DequantizeLinear_fc.weight_1" [label="(1000, 1024)", style=dashed]; -"402 DequantizeLinear_fc.weight_1" -> "403 /fc/Gemm" [label="(1000, 1024)", style=solid]; -"403 /fc/Gemm" -> "405 nncf_model_output_0" [label="(1, 1000)", style=solid]; -"404 nncf_model_input_0" -> "2 QuantizeLinear_x.1_1" [label="(1, 3, 224, 224)", style=solid]; +"0 /Constant" -> "4 /Gather" [label="[]", style=dashed]; +"1 /Constant_1" -> "11 /Gather_1" [label="[]", style=dashed]; +"2 QuantizeLinear_x.1_1" -> "3 DequantizeLinear_x.1_1" [label="[1, 3, 224, 224]", style=dashed]; +"3 DequantizeLinear_x.1_1" -> "4 /Gather" [label="[1, 3, 224, 224]", style=solid]; +"3 DequantizeLinear_x.1_1" -> "11 /Gather_1" [label="[1, 3, 224, 224]", style=solid]; +"3 DequantizeLinear_x.1_1" -> "19 /Gather_2" [label="[1, 3, 224, 224]", style=solid]; +"4 /Gather" -> "6 /Unsqueeze" [label="[1, 224, 224]", style=solid]; +"5 /Constant_2" -> "6 /Unsqueeze" [label="[1]", style=dashed]; +"6 /Unsqueeze" -> "8 /Mul" [label="[1, 1, 224, 224]", style=solid]; +"7 /Constant_3" -> "8 /Mul" [label="[]", style=solid]; +"8 /Mul" -> "10 /Add" [label="[1, 1, 224, 224]", style=solid]; +"9 /Constant_4" -> "10 /Add" [label="[]", style=solid]; +"10 /Add" -> "28 QuantizeLinear_/Add_output_0_1" [label="[1, 1, 224, 224]", style=solid]; +"11 /Gather_1" -> "13 /Unsqueeze_1" [label="[1, 224, 224]", style=solid]; +"12 /Constant_5" -> "13 /Unsqueeze_1" [label="[1]", style=dashed]; +"13 /Unsqueeze_1" -> "15 /Mul_1" [label="[1, 1, 224, 224]", style=solid]; +"14 /Constant_6" -> "15 /Mul_1" [label="[]", style=solid]; +"15 /Mul_1" -> "17 /Add_1" [label="[1, 1, 224, 224]", style=solid]; +"16 /Constant_7" -> "17 /Add_1" [label="[]", style=solid]; +"17 /Add_1" -> "30 QuantizeLinear_/Add_1_output_0_1" [label="[1, 1, 224, 224]", style=solid]; +"18 /Constant_8" -> "19 /Gather_2" [label="[]", style=dashed]; +"19 /Gather_2" -> "21 /Unsqueeze_2" [label="[1, 224, 224]", style=solid]; +"20 /Constant_9" -> "21 /Unsqueeze_2" [label="[1]", style=dashed]; +"21 /Unsqueeze_2" -> "23 /Mul_2" [label="[1, 1, 224, 224]", style=solid]; +"22 /Constant_10" -> "23 /Mul_2" [label="[]", style=solid]; +"23 /Mul_2" -> "25 /Add_2" [label="[1, 1, 224, 224]", style=solid]; +"24 /Constant_11" -> "25 /Add_2" [label="[]", style=solid]; +"25 /Add_2" -> "26 QuantizeLinear_/Add_2_output_0_1" [label="[1, 1, 224, 224]", style=solid]; +"26 QuantizeLinear_/Add_2_output_0_1" -> "27 DequantizeLinear_/Add_2_output_0_1" [label="[1, 1, 224, 224]", style=dashed]; +"27 DequantizeLinear_/Add_2_output_0_1" -> "32 /Concat" [label="[1, 1, 224, 224]", style=solid]; +"28 QuantizeLinear_/Add_output_0_1" -> "29 DequantizeLinear_/Add_output_0_1" [label="[1, 1, 224, 224]", style=dashed]; +"29 DequantizeLinear_/Add_output_0_1" -> "32 /Concat" [label="[1, 1, 224, 224]", style=solid]; +"30 QuantizeLinear_/Add_1_output_0_1" -> "31 DequantizeLinear_/Add_1_output_0_1" [label="[1, 1, 224, 224]", style=dashed]; +"31 DequantizeLinear_/Add_1_output_0_1" -> "32 /Concat" [label="[1, 1, 224, 224]", style=solid]; +"32 /Concat" -> "35 /conv1/conv/Conv" [label="[1, 3, 224, 224]", style=solid]; +"33 QuantizeLinear_onnx^^Conv_567_1" -> "34 DequantizeLinear_onnx^^Conv_567_1" [label="[64, 3, 7, 7]", style=dashed]; +"34 DequantizeLinear_onnx^^Conv_567_1" -> "35 /conv1/conv/Conv" [label="[64, 3, 7, 7]", style=solid]; +"35 /conv1/conv/Conv" -> "36 /conv1/Relu" [label="[1, 64, 112, 112]", style=solid]; +"36 /conv1/Relu" -> "37 QuantizeLinear_/conv1/Relu_output_0_1" [label="[1, 64, 112, 112]", style=solid]; +"37 QuantizeLinear_/conv1/Relu_output_0_1" -> "38 DequantizeLinear_/conv1/Relu_output_0_1" [label="[1, 64, 112, 112]", style=dashed]; +"38 DequantizeLinear_/conv1/Relu_output_0_1" -> "39 /maxpool1/MaxPool" [label="[1, 64, 112, 112]", style=solid]; +"39 /maxpool1/MaxPool" -> "42 /conv2/conv/Conv" [label="[1, 64, 56, 56]", style=solid]; +"40 QuantizeLinear_onnx^^Conv_570_1" -> "41 DequantizeLinear_onnx^^Conv_570_1" [label="[64, 64, 1, 1]", style=dashed]; +"41 DequantizeLinear_onnx^^Conv_570_1" -> "42 /conv2/conv/Conv" [label="[64, 64, 1, 1]", style=solid]; +"42 /conv2/conv/Conv" -> "43 /conv2/Relu" [label="[1, 64, 56, 56]", style=solid]; +"43 /conv2/Relu" -> "44 QuantizeLinear_/conv2/Relu_output_0_1" [label="[1, 64, 56, 56]", style=solid]; +"44 QuantizeLinear_/conv2/Relu_output_0_1" -> "45 DequantizeLinear_/conv2/Relu_output_0_1" [label="[1, 64, 56, 56]", style=dashed]; +"45 DequantizeLinear_/conv2/Relu_output_0_1" -> "48 /conv3/conv/Conv" [label="[1, 64, 56, 56]", style=solid]; +"46 QuantizeLinear_onnx^^Conv_573_1" -> "47 DequantizeLinear_onnx^^Conv_573_1" [label="[192, 64, 3, 3]", style=dashed]; +"47 DequantizeLinear_onnx^^Conv_573_1" -> "48 /conv3/conv/Conv" [label="[192, 64, 3, 3]", style=solid]; +"48 /conv3/conv/Conv" -> "49 /conv3/Relu" [label="[1, 192, 56, 56]", style=solid]; +"49 /conv3/Relu" -> "50 QuantizeLinear_/conv3/Relu_output_0_1" [label="[1, 192, 56, 56]", style=solid]; +"50 QuantizeLinear_/conv3/Relu_output_0_1" -> "51 DequantizeLinear_/conv3/Relu_output_0_1" [label="[1, 192, 56, 56]", style=dashed]; +"51 DequantizeLinear_/conv3/Relu_output_0_1" -> "52 /maxpool2/MaxPool" [label="[1, 192, 56, 56]", style=solid]; +"52 /maxpool2/MaxPool" -> "55 /inception3a/branch1/conv/Conv" [label="[1, 192, 28, 28]", style=solid]; +"52 /maxpool2/MaxPool" -> "59 /inception3a/branch2/branch2.0/conv/Conv" [label="[1, 192, 28, 28]", style=solid]; +"52 /maxpool2/MaxPool" -> "69 /inception3a/branch3/branch3.0/conv/Conv" [label="[1, 192, 28, 28]", style=solid]; +"52 /maxpool2/MaxPool" -> "77 /inception3a/branch4/branch4.0/MaxPool" [label="[1, 192, 28, 28]", style=solid]; +"53 QuantizeLinear_onnx^^Conv_576_1" -> "54 DequantizeLinear_onnx^^Conv_576_1" [label="[64, 192, 1, 1]", style=dashed]; +"54 DequantizeLinear_onnx^^Conv_576_1" -> "55 /inception3a/branch1/conv/Conv" [label="[64, 192, 1, 1]", style=solid]; +"55 /inception3a/branch1/conv/Conv" -> "56 /inception3a/branch1/Relu" [label="[1, 64, 28, 28]", style=solid]; +"56 /inception3a/branch1/Relu" -> "88 QuantizeLinear_/inception3a/branch1/Relu_output_0_1" [label="[1, 64, 28, 28]", style=solid]; +"57 QuantizeLinear_onnx^^Conv_579_1" -> "58 DequantizeLinear_onnx^^Conv_579_1" [label="[96, 192, 1, 1]", style=dashed]; +"58 DequantizeLinear_onnx^^Conv_579_1" -> "59 /inception3a/branch2/branch2.0/conv/Conv" [label="[96, 192, 1, 1]", style=solid]; +"59 /inception3a/branch2/branch2.0/conv/Conv" -> "60 /inception3a/branch2/branch2.0/Relu" [label="[1, 96, 28, 28]", style=solid]; +"60 /inception3a/branch2/branch2.0/Relu" -> "61 QuantizeLinear_/inception3a/branch2/branch2.0/Relu_output_0_1" [label="[1, 96, 28, 28]", style=solid]; +"61 QuantizeLinear_/inception3a/branch2/branch2.0/Relu_output_0_1" -> "62 DequantizeLinear_/inception3a/branch2/branch2.0/Relu_output_0_1" [label="[1, 96, 28, 28]", style=dashed]; +"62 DequantizeLinear_/inception3a/branch2/branch2.0/Relu_output_0_1" -> "65 /inception3a/branch2/branch2.1/conv/Conv" [label="[1, 96, 28, 28]", style=solid]; +"63 QuantizeLinear_onnx^^Conv_582_1" -> "64 DequantizeLinear_onnx^^Conv_582_1" [label="[128, 96, 3, 3]", style=dashed]; +"64 DequantizeLinear_onnx^^Conv_582_1" -> "65 /inception3a/branch2/branch2.1/conv/Conv" [label="[128, 96, 3, 3]", style=solid]; +"65 /inception3a/branch2/branch2.1/conv/Conv" -> "66 /inception3a/branch2/branch2.1/Relu" [label="[1, 128, 28, 28]", style=solid]; +"66 /inception3a/branch2/branch2.1/Relu" -> "82 QuantizeLinear_/inception3a/branch2/branch2.1/Relu_output_0_1" [label="[1, 128, 28, 28]", style=solid]; +"67 QuantizeLinear_onnx^^Conv_585_1" -> "68 DequantizeLinear_onnx^^Conv_585_1" [label="[16, 192, 1, 1]", style=dashed]; +"68 DequantizeLinear_onnx^^Conv_585_1" -> "69 /inception3a/branch3/branch3.0/conv/Conv" [label="[16, 192, 1, 1]", style=solid]; +"69 /inception3a/branch3/branch3.0/conv/Conv" -> "70 /inception3a/branch3/branch3.0/Relu" [label="[1, 16, 28, 28]", style=solid]; +"70 /inception3a/branch3/branch3.0/Relu" -> "71 QuantizeLinear_/inception3a/branch3/branch3.0/Relu_output_0_1" [label="[1, 16, 28, 28]", style=solid]; +"71 QuantizeLinear_/inception3a/branch3/branch3.0/Relu_output_0_1" -> "72 DequantizeLinear_/inception3a/branch3/branch3.0/Relu_output_0_1" [label="[1, 16, 28, 28]", style=dashed]; +"72 DequantizeLinear_/inception3a/branch3/branch3.0/Relu_output_0_1" -> "75 /inception3a/branch3/branch3.1/conv/Conv" [label="[1, 16, 28, 28]", style=solid]; +"73 QuantizeLinear_onnx^^Conv_588_1" -> "74 DequantizeLinear_onnx^^Conv_588_1" [label="[32, 16, 3, 3]", style=dashed]; +"74 DequantizeLinear_onnx^^Conv_588_1" -> "75 /inception3a/branch3/branch3.1/conv/Conv" [label="[32, 16, 3, 3]", style=solid]; +"75 /inception3a/branch3/branch3.1/conv/Conv" -> "76 /inception3a/branch3/branch3.1/Relu" [label="[1, 32, 28, 28]", style=solid]; +"76 /inception3a/branch3/branch3.1/Relu" -> "84 QuantizeLinear_/inception3a/branch3/branch3.1/Relu_output_0_1" [label="[1, 32, 28, 28]", style=solid]; +"77 /inception3a/branch4/branch4.0/MaxPool" -> "80 /inception3a/branch4/branch4.1/conv/Conv" [label="[1, 192, 28, 28]", style=solid]; +"78 QuantizeLinear_onnx^^Conv_591_1" -> "79 DequantizeLinear_onnx^^Conv_591_1" [label="[32, 192, 1, 1]", style=dashed]; +"79 DequantizeLinear_onnx^^Conv_591_1" -> "80 /inception3a/branch4/branch4.1/conv/Conv" [label="[32, 192, 1, 1]", style=solid]; +"80 /inception3a/branch4/branch4.1/conv/Conv" -> "81 /inception3a/branch4/branch4.1/Relu" [label="[1, 32, 28, 28]", style=solid]; +"81 /inception3a/branch4/branch4.1/Relu" -> "86 QuantizeLinear_/inception3a/branch4/branch4.1/Relu_output_0_1" [label="[1, 32, 28, 28]", style=solid]; +"82 QuantizeLinear_/inception3a/branch2/branch2.1/Relu_output_0_1" -> "83 DequantizeLinear_/inception3a/branch2/branch2.1/Relu_output_0_1" [label="[1, 128, 28, 28]", style=dashed]; +"83 DequantizeLinear_/inception3a/branch2/branch2.1/Relu_output_0_1" -> "90 /inception3a/Concat" [label="[1, 128, 28, 28]", style=solid]; +"84 QuantizeLinear_/inception3a/branch3/branch3.1/Relu_output_0_1" -> "85 DequantizeLinear_/inception3a/branch3/branch3.1/Relu_output_0_1" [label="[1, 32, 28, 28]", style=dashed]; +"85 DequantizeLinear_/inception3a/branch3/branch3.1/Relu_output_0_1" -> "90 /inception3a/Concat" [label="[1, 32, 28, 28]", style=solid]; +"86 QuantizeLinear_/inception3a/branch4/branch4.1/Relu_output_0_1" -> "87 DequantizeLinear_/inception3a/branch4/branch4.1/Relu_output_0_1" [label="[1, 32, 28, 28]", style=dashed]; +"87 DequantizeLinear_/inception3a/branch4/branch4.1/Relu_output_0_1" -> "90 /inception3a/Concat" [label="[1, 32, 28, 28]", style=solid]; +"88 QuantizeLinear_/inception3a/branch1/Relu_output_0_1" -> "89 DequantizeLinear_/inception3a/branch1/Relu_output_0_1" [label="[1, 64, 28, 28]", style=dashed]; +"89 DequantizeLinear_/inception3a/branch1/Relu_output_0_1" -> "90 /inception3a/Concat" [label="[1, 64, 28, 28]", style=solid]; +"90 /inception3a/Concat" -> "93 /inception3b/branch1/conv/Conv" [label="[1, 256, 28, 28]", style=solid]; +"90 /inception3a/Concat" -> "97 /inception3b/branch2/branch2.0/conv/Conv" [label="[1, 256, 28, 28]", style=solid]; +"90 /inception3a/Concat" -> "107 /inception3b/branch3/branch3.0/conv/Conv" [label="[1, 256, 28, 28]", style=solid]; +"90 /inception3a/Concat" -> "115 /inception3b/branch4/branch4.0/MaxPool" [label="[1, 256, 28, 28]", style=solid]; +"91 QuantizeLinear_onnx^^Conv_594_1" -> "92 DequantizeLinear_onnx^^Conv_594_1" [label="[128, 256, 1, 1]", style=dashed]; +"92 DequantizeLinear_onnx^^Conv_594_1" -> "93 /inception3b/branch1/conv/Conv" [label="[128, 256, 1, 1]", style=solid]; +"93 /inception3b/branch1/conv/Conv" -> "94 /inception3b/branch1/Relu" [label="[1, 128, 28, 28]", style=solid]; +"94 /inception3b/branch1/Relu" -> "126 QuantizeLinear_/inception3b/branch1/Relu_output_0_1" [label="[1, 128, 28, 28]", style=solid]; +"95 QuantizeLinear_onnx^^Conv_597_1" -> "96 DequantizeLinear_onnx^^Conv_597_1" [label="[128, 256, 1, 1]", style=dashed]; +"96 DequantizeLinear_onnx^^Conv_597_1" -> "97 /inception3b/branch2/branch2.0/conv/Conv" [label="[128, 256, 1, 1]", style=solid]; +"97 /inception3b/branch2/branch2.0/conv/Conv" -> "98 /inception3b/branch2/branch2.0/Relu" [label="[1, 128, 28, 28]", style=solid]; +"98 /inception3b/branch2/branch2.0/Relu" -> "99 QuantizeLinear_/inception3b/branch2/branch2.0/Relu_output_0_1" [label="[1, 128, 28, 28]", style=solid]; +"99 QuantizeLinear_/inception3b/branch2/branch2.0/Relu_output_0_1" -> "100 DequantizeLinear_/inception3b/branch2/branch2.0/Relu_output_0_1" [label="[1, 128, 28, 28]", style=dashed]; +"100 DequantizeLinear_/inception3b/branch2/branch2.0/Relu_output_0_1" -> "103 /inception3b/branch2/branch2.1/conv/Conv" [label="[1, 128, 28, 28]", style=solid]; +"101 QuantizeLinear_onnx^^Conv_600_1" -> "102 DequantizeLinear_onnx^^Conv_600_1" [label="[192, 128, 3, 3]", style=dashed]; +"102 DequantizeLinear_onnx^^Conv_600_1" -> "103 /inception3b/branch2/branch2.1/conv/Conv" [label="[192, 128, 3, 3]", style=solid]; +"103 /inception3b/branch2/branch2.1/conv/Conv" -> "104 /inception3b/branch2/branch2.1/Relu" [label="[1, 192, 28, 28]", style=solid]; +"104 /inception3b/branch2/branch2.1/Relu" -> "120 QuantizeLinear_/inception3b/branch2/branch2.1/Relu_output_0_1" [label="[1, 192, 28, 28]", style=solid]; +"105 QuantizeLinear_onnx^^Conv_603_1" -> "106 DequantizeLinear_onnx^^Conv_603_1" [label="[32, 256, 1, 1]", style=dashed]; +"106 DequantizeLinear_onnx^^Conv_603_1" -> "107 /inception3b/branch3/branch3.0/conv/Conv" [label="[32, 256, 1, 1]", style=solid]; +"107 /inception3b/branch3/branch3.0/conv/Conv" -> "108 /inception3b/branch3/branch3.0/Relu" [label="[1, 32, 28, 28]", style=solid]; +"108 /inception3b/branch3/branch3.0/Relu" -> "109 QuantizeLinear_/inception3b/branch3/branch3.0/Relu_output_0_1" [label="[1, 32, 28, 28]", style=solid]; +"109 QuantizeLinear_/inception3b/branch3/branch3.0/Relu_output_0_1" -> "110 DequantizeLinear_/inception3b/branch3/branch3.0/Relu_output_0_1" [label="[1, 32, 28, 28]", style=dashed]; +"110 DequantizeLinear_/inception3b/branch3/branch3.0/Relu_output_0_1" -> "113 /inception3b/branch3/branch3.1/conv/Conv" [label="[1, 32, 28, 28]", style=solid]; +"111 QuantizeLinear_onnx^^Conv_606_1" -> "112 DequantizeLinear_onnx^^Conv_606_1" [label="[96, 32, 3, 3]", style=dashed]; +"112 DequantizeLinear_onnx^^Conv_606_1" -> "113 /inception3b/branch3/branch3.1/conv/Conv" [label="[96, 32, 3, 3]", style=solid]; +"113 /inception3b/branch3/branch3.1/conv/Conv" -> "114 /inception3b/branch3/branch3.1/Relu" [label="[1, 96, 28, 28]", style=solid]; +"114 /inception3b/branch3/branch3.1/Relu" -> "122 QuantizeLinear_/inception3b/branch3/branch3.1/Relu_output_0_1" [label="[1, 96, 28, 28]", style=solid]; +"115 /inception3b/branch4/branch4.0/MaxPool" -> "118 /inception3b/branch4/branch4.1/conv/Conv" [label="[1, 256, 28, 28]", style=solid]; +"116 QuantizeLinear_onnx^^Conv_609_1" -> "117 DequantizeLinear_onnx^^Conv_609_1" [label="[64, 256, 1, 1]", style=dashed]; +"117 DequantizeLinear_onnx^^Conv_609_1" -> "118 /inception3b/branch4/branch4.1/conv/Conv" [label="[64, 256, 1, 1]", style=solid]; +"118 /inception3b/branch4/branch4.1/conv/Conv" -> "119 /inception3b/branch4/branch4.1/Relu" [label="[1, 64, 28, 28]", style=solid]; +"119 /inception3b/branch4/branch4.1/Relu" -> "124 QuantizeLinear_/inception3b/branch4/branch4.1/Relu_output_0_1" [label="[1, 64, 28, 28]", style=solid]; +"120 QuantizeLinear_/inception3b/branch2/branch2.1/Relu_output_0_1" -> "121 DequantizeLinear_/inception3b/branch2/branch2.1/Relu_output_0_1" [label="[1, 192, 28, 28]", style=dashed]; +"121 DequantizeLinear_/inception3b/branch2/branch2.1/Relu_output_0_1" -> "128 /inception3b/Concat" [label="[1, 192, 28, 28]", style=solid]; +"122 QuantizeLinear_/inception3b/branch3/branch3.1/Relu_output_0_1" -> "123 DequantizeLinear_/inception3b/branch3/branch3.1/Relu_output_0_1" [label="[1, 96, 28, 28]", style=dashed]; +"123 DequantizeLinear_/inception3b/branch3/branch3.1/Relu_output_0_1" -> "128 /inception3b/Concat" [label="[1, 96, 28, 28]", style=solid]; +"124 QuantizeLinear_/inception3b/branch4/branch4.1/Relu_output_0_1" -> "125 DequantizeLinear_/inception3b/branch4/branch4.1/Relu_output_0_1" [label="[1, 64, 28, 28]", style=dashed]; +"125 DequantizeLinear_/inception3b/branch4/branch4.1/Relu_output_0_1" -> "128 /inception3b/Concat" [label="[1, 64, 28, 28]", style=solid]; +"126 QuantizeLinear_/inception3b/branch1/Relu_output_0_1" -> "127 DequantizeLinear_/inception3b/branch1/Relu_output_0_1" [label="[1, 128, 28, 28]", style=dashed]; +"127 DequantizeLinear_/inception3b/branch1/Relu_output_0_1" -> "128 /inception3b/Concat" [label="[1, 128, 28, 28]", style=solid]; +"128 /inception3b/Concat" -> "129 /maxpool3/MaxPool" [label="[1, 480, 28, 28]", style=solid]; +"129 /maxpool3/MaxPool" -> "132 /inception4a/branch1/conv/Conv" [label="[1, 480, 14, 14]", style=solid]; +"129 /maxpool3/MaxPool" -> "136 /inception4a/branch2/branch2.0/conv/Conv" [label="[1, 480, 14, 14]", style=solid]; +"129 /maxpool3/MaxPool" -> "146 /inception4a/branch3/branch3.0/conv/Conv" [label="[1, 480, 14, 14]", style=solid]; +"129 /maxpool3/MaxPool" -> "154 /inception4a/branch4/branch4.0/MaxPool" [label="[1, 480, 14, 14]", style=solid]; +"130 QuantizeLinear_onnx^^Conv_612_1" -> "131 DequantizeLinear_onnx^^Conv_612_1" [label="[192, 480, 1, 1]", style=dashed]; +"131 DequantizeLinear_onnx^^Conv_612_1" -> "132 /inception4a/branch1/conv/Conv" [label="[192, 480, 1, 1]", style=solid]; +"132 /inception4a/branch1/conv/Conv" -> "133 /inception4a/branch1/Relu" [label="[1, 192, 14, 14]", style=solid]; +"133 /inception4a/branch1/Relu" -> "159 QuantizeLinear_/inception4a/branch1/Relu_output_0_1" [label="[1, 192, 14, 14]", style=solid]; +"134 QuantizeLinear_onnx^^Conv_615_1" -> "135 DequantizeLinear_onnx^^Conv_615_1" [label="[96, 480, 1, 1]", style=dashed]; +"135 DequantizeLinear_onnx^^Conv_615_1" -> "136 /inception4a/branch2/branch2.0/conv/Conv" [label="[96, 480, 1, 1]", style=solid]; +"136 /inception4a/branch2/branch2.0/conv/Conv" -> "137 /inception4a/branch2/branch2.0/Relu" [label="[1, 96, 14, 14]", style=solid]; +"137 /inception4a/branch2/branch2.0/Relu" -> "138 QuantizeLinear_/inception4a/branch2/branch2.0/Relu_output_0_1" [label="[1, 96, 14, 14]", style=solid]; +"138 QuantizeLinear_/inception4a/branch2/branch2.0/Relu_output_0_1" -> "139 DequantizeLinear_/inception4a/branch2/branch2.0/Relu_output_0_1" [label="[1, 96, 14, 14]", style=dashed]; +"139 DequantizeLinear_/inception4a/branch2/branch2.0/Relu_output_0_1" -> "142 /inception4a/branch2/branch2.1/conv/Conv" [label="[1, 96, 14, 14]", style=solid]; +"140 QuantizeLinear_onnx^^Conv_618_1" -> "141 DequantizeLinear_onnx^^Conv_618_1" [label="[208, 96, 3, 3]", style=dashed]; +"141 DequantizeLinear_onnx^^Conv_618_1" -> "142 /inception4a/branch2/branch2.1/conv/Conv" [label="[208, 96, 3, 3]", style=solid]; +"142 /inception4a/branch2/branch2.1/conv/Conv" -> "143 /inception4a/branch2/branch2.1/Relu" [label="[1, 208, 14, 14]", style=solid]; +"143 /inception4a/branch2/branch2.1/Relu" -> "161 QuantizeLinear_/inception4a/branch2/branch2.1/Relu_output_0_1" [label="[1, 208, 14, 14]", style=solid]; +"144 QuantizeLinear_onnx^^Conv_621_1" -> "145 DequantizeLinear_onnx^^Conv_621_1" [label="[16, 480, 1, 1]", style=dashed]; +"145 DequantizeLinear_onnx^^Conv_621_1" -> "146 /inception4a/branch3/branch3.0/conv/Conv" [label="[16, 480, 1, 1]", style=solid]; +"146 /inception4a/branch3/branch3.0/conv/Conv" -> "147 /inception4a/branch3/branch3.0/Relu" [label="[1, 16, 14, 14]", style=solid]; +"147 /inception4a/branch3/branch3.0/Relu" -> "148 QuantizeLinear_/inception4a/branch3/branch3.0/Relu_output_0_1" [label="[1, 16, 14, 14]", style=solid]; +"148 QuantizeLinear_/inception4a/branch3/branch3.0/Relu_output_0_1" -> "149 DequantizeLinear_/inception4a/branch3/branch3.0/Relu_output_0_1" [label="[1, 16, 14, 14]", style=dashed]; +"149 DequantizeLinear_/inception4a/branch3/branch3.0/Relu_output_0_1" -> "152 /inception4a/branch3/branch3.1/conv/Conv" [label="[1, 16, 14, 14]", style=solid]; +"150 QuantizeLinear_onnx^^Conv_624_1" -> "151 DequantizeLinear_onnx^^Conv_624_1" [label="[48, 16, 3, 3]", style=dashed]; +"151 DequantizeLinear_onnx^^Conv_624_1" -> "152 /inception4a/branch3/branch3.1/conv/Conv" [label="[48, 16, 3, 3]", style=solid]; +"152 /inception4a/branch3/branch3.1/conv/Conv" -> "153 /inception4a/branch3/branch3.1/Relu" [label="[1, 48, 14, 14]", style=solid]; +"153 /inception4a/branch3/branch3.1/Relu" -> "163 QuantizeLinear_/inception4a/branch3/branch3.1/Relu_output_0_1" [label="[1, 48, 14, 14]", style=solid]; +"154 /inception4a/branch4/branch4.0/MaxPool" -> "157 /inception4a/branch4/branch4.1/conv/Conv" [label="[1, 480, 14, 14]", style=solid]; +"155 QuantizeLinear_onnx^^Conv_627_1" -> "156 DequantizeLinear_onnx^^Conv_627_1" [label="[64, 480, 1, 1]", style=dashed]; +"156 DequantizeLinear_onnx^^Conv_627_1" -> "157 /inception4a/branch4/branch4.1/conv/Conv" [label="[64, 480, 1, 1]", style=solid]; +"157 /inception4a/branch4/branch4.1/conv/Conv" -> "158 /inception4a/branch4/branch4.1/Relu" [label="[1, 64, 14, 14]", style=solid]; +"158 /inception4a/branch4/branch4.1/Relu" -> "165 QuantizeLinear_/inception4a/branch4/branch4.1/Relu_output_0_1" [label="[1, 64, 14, 14]", style=solid]; +"159 QuantizeLinear_/inception4a/branch1/Relu_output_0_1" -> "160 DequantizeLinear_/inception4a/branch1/Relu_output_0_1" [label="[1, 192, 14, 14]", style=dashed]; +"160 DequantizeLinear_/inception4a/branch1/Relu_output_0_1" -> "167 /inception4a/Concat" [label="[1, 192, 14, 14]", style=solid]; +"161 QuantizeLinear_/inception4a/branch2/branch2.1/Relu_output_0_1" -> "162 DequantizeLinear_/inception4a/branch2/branch2.1/Relu_output_0_1" [label="[1, 208, 14, 14]", style=dashed]; +"162 DequantizeLinear_/inception4a/branch2/branch2.1/Relu_output_0_1" -> "167 /inception4a/Concat" [label="[1, 208, 14, 14]", style=solid]; +"163 QuantizeLinear_/inception4a/branch3/branch3.1/Relu_output_0_1" -> "164 DequantizeLinear_/inception4a/branch3/branch3.1/Relu_output_0_1" [label="[1, 48, 14, 14]", style=dashed]; +"164 DequantizeLinear_/inception4a/branch3/branch3.1/Relu_output_0_1" -> "167 /inception4a/Concat" [label="[1, 48, 14, 14]", style=solid]; +"165 QuantizeLinear_/inception4a/branch4/branch4.1/Relu_output_0_1" -> "166 DequantizeLinear_/inception4a/branch4/branch4.1/Relu_output_0_1" [label="[1, 64, 14, 14]", style=dashed]; +"166 DequantizeLinear_/inception4a/branch4/branch4.1/Relu_output_0_1" -> "167 /inception4a/Concat" [label="[1, 64, 14, 14]", style=solid]; +"167 /inception4a/Concat" -> "170 /inception4b/branch1/conv/Conv" [label="[1, 512, 14, 14]", style=solid]; +"167 /inception4a/Concat" -> "174 /inception4b/branch2/branch2.0/conv/Conv" [label="[1, 512, 14, 14]", style=solid]; +"167 /inception4a/Concat" -> "184 /inception4b/branch3/branch3.0/conv/Conv" [label="[1, 512, 14, 14]", style=solid]; +"167 /inception4a/Concat" -> "192 /inception4b/branch4/branch4.0/MaxPool" [label="[1, 512, 14, 14]", style=solid]; +"168 QuantizeLinear_onnx^^Conv_630_1" -> "169 DequantizeLinear_onnx^^Conv_630_1" [label="[160, 512, 1, 1]", style=dashed]; +"169 DequantizeLinear_onnx^^Conv_630_1" -> "170 /inception4b/branch1/conv/Conv" [label="[160, 512, 1, 1]", style=solid]; +"170 /inception4b/branch1/conv/Conv" -> "171 /inception4b/branch1/Relu" [label="[1, 160, 14, 14]", style=solid]; +"171 /inception4b/branch1/Relu" -> "203 QuantizeLinear_/inception4b/branch1/Relu_output_0_1" [label="[1, 160, 14, 14]", style=solid]; +"172 QuantizeLinear_onnx^^Conv_633_1" -> "173 DequantizeLinear_onnx^^Conv_633_1" [label="[112, 512, 1, 1]", style=dashed]; +"173 DequantizeLinear_onnx^^Conv_633_1" -> "174 /inception4b/branch2/branch2.0/conv/Conv" [label="[112, 512, 1, 1]", style=solid]; +"174 /inception4b/branch2/branch2.0/conv/Conv" -> "175 /inception4b/branch2/branch2.0/Relu" [label="[1, 112, 14, 14]", style=solid]; +"175 /inception4b/branch2/branch2.0/Relu" -> "176 QuantizeLinear_/inception4b/branch2/branch2.0/Relu_output_0_1" [label="[1, 112, 14, 14]", style=solid]; +"176 QuantizeLinear_/inception4b/branch2/branch2.0/Relu_output_0_1" -> "177 DequantizeLinear_/inception4b/branch2/branch2.0/Relu_output_0_1" [label="[1, 112, 14, 14]", style=dashed]; +"177 DequantizeLinear_/inception4b/branch2/branch2.0/Relu_output_0_1" -> "180 /inception4b/branch2/branch2.1/conv/Conv" [label="[1, 112, 14, 14]", style=solid]; +"178 QuantizeLinear_onnx^^Conv_636_1" -> "179 DequantizeLinear_onnx^^Conv_636_1" [label="[224, 112, 3, 3]", style=dashed]; +"179 DequantizeLinear_onnx^^Conv_636_1" -> "180 /inception4b/branch2/branch2.1/conv/Conv" [label="[224, 112, 3, 3]", style=solid]; +"180 /inception4b/branch2/branch2.1/conv/Conv" -> "181 /inception4b/branch2/branch2.1/Relu" [label="[1, 224, 14, 14]", style=solid]; +"181 /inception4b/branch2/branch2.1/Relu" -> "197 QuantizeLinear_/inception4b/branch2/branch2.1/Relu_output_0_1" [label="[1, 224, 14, 14]", style=solid]; +"182 QuantizeLinear_onnx^^Conv_639_1" -> "183 DequantizeLinear_onnx^^Conv_639_1" [label="[24, 512, 1, 1]", style=dashed]; +"183 DequantizeLinear_onnx^^Conv_639_1" -> "184 /inception4b/branch3/branch3.0/conv/Conv" [label="[24, 512, 1, 1]", style=solid]; +"184 /inception4b/branch3/branch3.0/conv/Conv" -> "185 /inception4b/branch3/branch3.0/Relu" [label="[1, 24, 14, 14]", style=solid]; +"185 /inception4b/branch3/branch3.0/Relu" -> "186 QuantizeLinear_/inception4b/branch3/branch3.0/Relu_output_0_1" [label="[1, 24, 14, 14]", style=solid]; +"186 QuantizeLinear_/inception4b/branch3/branch3.0/Relu_output_0_1" -> "187 DequantizeLinear_/inception4b/branch3/branch3.0/Relu_output_0_1" [label="[1, 24, 14, 14]", style=dashed]; +"187 DequantizeLinear_/inception4b/branch3/branch3.0/Relu_output_0_1" -> "190 /inception4b/branch3/branch3.1/conv/Conv" [label="[1, 24, 14, 14]", style=solid]; +"188 QuantizeLinear_onnx^^Conv_642_1" -> "189 DequantizeLinear_onnx^^Conv_642_1" [label="[64, 24, 3, 3]", style=dashed]; +"189 DequantizeLinear_onnx^^Conv_642_1" -> "190 /inception4b/branch3/branch3.1/conv/Conv" [label="[64, 24, 3, 3]", style=solid]; +"190 /inception4b/branch3/branch3.1/conv/Conv" -> "191 /inception4b/branch3/branch3.1/Relu" [label="[1, 64, 14, 14]", style=solid]; +"191 /inception4b/branch3/branch3.1/Relu" -> "199 QuantizeLinear_/inception4b/branch3/branch3.1/Relu_output_0_1" [label="[1, 64, 14, 14]", style=solid]; +"192 /inception4b/branch4/branch4.0/MaxPool" -> "195 /inception4b/branch4/branch4.1/conv/Conv" [label="[1, 512, 14, 14]", style=solid]; +"193 QuantizeLinear_onnx^^Conv_645_1" -> "194 DequantizeLinear_onnx^^Conv_645_1" [label="[64, 512, 1, 1]", style=dashed]; +"194 DequantizeLinear_onnx^^Conv_645_1" -> "195 /inception4b/branch4/branch4.1/conv/Conv" [label="[64, 512, 1, 1]", style=solid]; +"195 /inception4b/branch4/branch4.1/conv/Conv" -> "196 /inception4b/branch4/branch4.1/Relu" [label="[1, 64, 14, 14]", style=solid]; +"196 /inception4b/branch4/branch4.1/Relu" -> "201 QuantizeLinear_/inception4b/branch4/branch4.1/Relu_output_0_1" [label="[1, 64, 14, 14]", style=solid]; +"197 QuantizeLinear_/inception4b/branch2/branch2.1/Relu_output_0_1" -> "198 DequantizeLinear_/inception4b/branch2/branch2.1/Relu_output_0_1" [label="[1, 224, 14, 14]", style=dashed]; +"198 DequantizeLinear_/inception4b/branch2/branch2.1/Relu_output_0_1" -> "205 /inception4b/Concat" [label="[1, 224, 14, 14]", style=solid]; +"199 QuantizeLinear_/inception4b/branch3/branch3.1/Relu_output_0_1" -> "200 DequantizeLinear_/inception4b/branch3/branch3.1/Relu_output_0_1" [label="[1, 64, 14, 14]", style=dashed]; +"200 DequantizeLinear_/inception4b/branch3/branch3.1/Relu_output_0_1" -> "205 /inception4b/Concat" [label="[1, 64, 14, 14]", style=solid]; +"201 QuantizeLinear_/inception4b/branch4/branch4.1/Relu_output_0_1" -> "202 DequantizeLinear_/inception4b/branch4/branch4.1/Relu_output_0_1" [label="[1, 64, 14, 14]", style=dashed]; +"202 DequantizeLinear_/inception4b/branch4/branch4.1/Relu_output_0_1" -> "205 /inception4b/Concat" [label="[1, 64, 14, 14]", style=solid]; +"203 QuantizeLinear_/inception4b/branch1/Relu_output_0_1" -> "204 DequantizeLinear_/inception4b/branch1/Relu_output_0_1" [label="[1, 160, 14, 14]", style=dashed]; +"204 DequantizeLinear_/inception4b/branch1/Relu_output_0_1" -> "205 /inception4b/Concat" [label="[1, 160, 14, 14]", style=solid]; +"205 /inception4b/Concat" -> "208 /inception4c/branch1/conv/Conv" [label="[1, 512, 14, 14]", style=solid]; +"205 /inception4b/Concat" -> "212 /inception4c/branch2/branch2.0/conv/Conv" [label="[1, 512, 14, 14]", style=solid]; +"205 /inception4b/Concat" -> "222 /inception4c/branch3/branch3.0/conv/Conv" [label="[1, 512, 14, 14]", style=solid]; +"205 /inception4b/Concat" -> "230 /inception4c/branch4/branch4.0/MaxPool" [label="[1, 512, 14, 14]", style=solid]; +"206 QuantizeLinear_onnx^^Conv_648_1" -> "207 DequantizeLinear_onnx^^Conv_648_1" [label="[128, 512, 1, 1]", style=dashed]; +"207 DequantizeLinear_onnx^^Conv_648_1" -> "208 /inception4c/branch1/conv/Conv" [label="[128, 512, 1, 1]", style=solid]; +"208 /inception4c/branch1/conv/Conv" -> "209 /inception4c/branch1/Relu" [label="[1, 128, 14, 14]", style=solid]; +"209 /inception4c/branch1/Relu" -> "237 QuantizeLinear_/inception4c/branch1/Relu_output_0_1" [label="[1, 128, 14, 14]", style=solid]; +"210 QuantizeLinear_onnx^^Conv_651_1" -> "211 DequantizeLinear_onnx^^Conv_651_1" [label="[128, 512, 1, 1]", style=dashed]; +"211 DequantizeLinear_onnx^^Conv_651_1" -> "212 /inception4c/branch2/branch2.0/conv/Conv" [label="[128, 512, 1, 1]", style=solid]; +"212 /inception4c/branch2/branch2.0/conv/Conv" -> "213 /inception4c/branch2/branch2.0/Relu" [label="[1, 128, 14, 14]", style=solid]; +"213 /inception4c/branch2/branch2.0/Relu" -> "214 QuantizeLinear_/inception4c/branch2/branch2.0/Relu_output_0_1" [label="[1, 128, 14, 14]", style=solid]; +"214 QuantizeLinear_/inception4c/branch2/branch2.0/Relu_output_0_1" -> "215 DequantizeLinear_/inception4c/branch2/branch2.0/Relu_output_0_1" [label="[1, 128, 14, 14]", style=dashed]; +"215 DequantizeLinear_/inception4c/branch2/branch2.0/Relu_output_0_1" -> "218 /inception4c/branch2/branch2.1/conv/Conv" [label="[1, 128, 14, 14]", style=solid]; +"216 QuantizeLinear_onnx^^Conv_654_1" -> "217 DequantizeLinear_onnx^^Conv_654_1" [label="[256, 128, 3, 3]", style=dashed]; +"217 DequantizeLinear_onnx^^Conv_654_1" -> "218 /inception4c/branch2/branch2.1/conv/Conv" [label="[256, 128, 3, 3]", style=solid]; +"218 /inception4c/branch2/branch2.1/conv/Conv" -> "219 /inception4c/branch2/branch2.1/Relu" [label="[1, 256, 14, 14]", style=solid]; +"219 /inception4c/branch2/branch2.1/Relu" -> "239 QuantizeLinear_/inception4c/branch2/branch2.1/Relu_output_0_1" [label="[1, 256, 14, 14]", style=solid]; +"220 QuantizeLinear_onnx^^Conv_657_1" -> "221 DequantizeLinear_onnx^^Conv_657_1" [label="[24, 512, 1, 1]", style=dashed]; +"221 DequantizeLinear_onnx^^Conv_657_1" -> "222 /inception4c/branch3/branch3.0/conv/Conv" [label="[24, 512, 1, 1]", style=solid]; +"222 /inception4c/branch3/branch3.0/conv/Conv" -> "223 /inception4c/branch3/branch3.0/Relu" [label="[1, 24, 14, 14]", style=solid]; +"223 /inception4c/branch3/branch3.0/Relu" -> "224 QuantizeLinear_/inception4c/branch3/branch3.0/Relu_output_0_1" [label="[1, 24, 14, 14]", style=solid]; +"224 QuantizeLinear_/inception4c/branch3/branch3.0/Relu_output_0_1" -> "225 DequantizeLinear_/inception4c/branch3/branch3.0/Relu_output_0_1" [label="[1, 24, 14, 14]", style=dashed]; +"225 DequantizeLinear_/inception4c/branch3/branch3.0/Relu_output_0_1" -> "228 /inception4c/branch3/branch3.1/conv/Conv" [label="[1, 24, 14, 14]", style=solid]; +"226 QuantizeLinear_onnx^^Conv_660_1" -> "227 DequantizeLinear_onnx^^Conv_660_1" [label="[64, 24, 3, 3]", style=dashed]; +"227 DequantizeLinear_onnx^^Conv_660_1" -> "228 /inception4c/branch3/branch3.1/conv/Conv" [label="[64, 24, 3, 3]", style=solid]; +"228 /inception4c/branch3/branch3.1/conv/Conv" -> "229 /inception4c/branch3/branch3.1/Relu" [label="[1, 64, 14, 14]", style=solid]; +"229 /inception4c/branch3/branch3.1/Relu" -> "241 QuantizeLinear_/inception4c/branch3/branch3.1/Relu_output_0_1" [label="[1, 64, 14, 14]", style=solid]; +"230 /inception4c/branch4/branch4.0/MaxPool" -> "233 /inception4c/branch4/branch4.1/conv/Conv" [label="[1, 512, 14, 14]", style=solid]; +"231 QuantizeLinear_onnx^^Conv_663_1" -> "232 DequantizeLinear_onnx^^Conv_663_1" [label="[64, 512, 1, 1]", style=dashed]; +"232 DequantizeLinear_onnx^^Conv_663_1" -> "233 /inception4c/branch4/branch4.1/conv/Conv" [label="[64, 512, 1, 1]", style=solid]; +"233 /inception4c/branch4/branch4.1/conv/Conv" -> "234 /inception4c/branch4/branch4.1/Relu" [label="[1, 64, 14, 14]", style=solid]; +"234 /inception4c/branch4/branch4.1/Relu" -> "235 QuantizeLinear_/inception4c/branch4/branch4.1/Relu_output_0_1" [label="[1, 64, 14, 14]", style=solid]; +"235 QuantizeLinear_/inception4c/branch4/branch4.1/Relu_output_0_1" -> "236 DequantizeLinear_/inception4c/branch4/branch4.1/Relu_output_0_1" [label="[1, 64, 14, 14]", style=dashed]; +"236 DequantizeLinear_/inception4c/branch4/branch4.1/Relu_output_0_1" -> "243 /inception4c/Concat" [label="[1, 64, 14, 14]", style=solid]; +"237 QuantizeLinear_/inception4c/branch1/Relu_output_0_1" -> "238 DequantizeLinear_/inception4c/branch1/Relu_output_0_1" [label="[1, 128, 14, 14]", style=dashed]; +"238 DequantizeLinear_/inception4c/branch1/Relu_output_0_1" -> "243 /inception4c/Concat" [label="[1, 128, 14, 14]", style=solid]; +"239 QuantizeLinear_/inception4c/branch2/branch2.1/Relu_output_0_1" -> "240 DequantizeLinear_/inception4c/branch2/branch2.1/Relu_output_0_1" [label="[1, 256, 14, 14]", style=dashed]; +"240 DequantizeLinear_/inception4c/branch2/branch2.1/Relu_output_0_1" -> "243 /inception4c/Concat" [label="[1, 256, 14, 14]", style=solid]; +"241 QuantizeLinear_/inception4c/branch3/branch3.1/Relu_output_0_1" -> "242 DequantizeLinear_/inception4c/branch3/branch3.1/Relu_output_0_1" [label="[1, 64, 14, 14]", style=dashed]; +"242 DequantizeLinear_/inception4c/branch3/branch3.1/Relu_output_0_1" -> "243 /inception4c/Concat" [label="[1, 64, 14, 14]", style=solid]; +"243 /inception4c/Concat" -> "246 /inception4d/branch1/conv/Conv" [label="[1, 512, 14, 14]", style=solid]; +"243 /inception4c/Concat" -> "250 /inception4d/branch2/branch2.0/conv/Conv" [label="[1, 512, 14, 14]", style=solid]; +"243 /inception4c/Concat" -> "260 /inception4d/branch3/branch3.0/conv/Conv" [label="[1, 512, 14, 14]", style=solid]; +"243 /inception4c/Concat" -> "268 /inception4d/branch4/branch4.0/MaxPool" [label="[1, 512, 14, 14]", style=solid]; +"244 QuantizeLinear_onnx^^Conv_666_1" -> "245 DequantizeLinear_onnx^^Conv_666_1" [label="[112, 512, 1, 1]", style=dashed]; +"245 DequantizeLinear_onnx^^Conv_666_1" -> "246 /inception4d/branch1/conv/Conv" [label="[112, 512, 1, 1]", style=solid]; +"246 /inception4d/branch1/conv/Conv" -> "247 /inception4d/branch1/Relu" [label="[1, 112, 14, 14]", style=solid]; +"247 /inception4d/branch1/Relu" -> "279 QuantizeLinear_/inception4d/branch1/Relu_output_0_1" [label="[1, 112, 14, 14]", style=solid]; +"248 QuantizeLinear_onnx^^Conv_669_1" -> "249 DequantizeLinear_onnx^^Conv_669_1" [label="[144, 512, 1, 1]", style=dashed]; +"249 DequantizeLinear_onnx^^Conv_669_1" -> "250 /inception4d/branch2/branch2.0/conv/Conv" [label="[144, 512, 1, 1]", style=solid]; +"250 /inception4d/branch2/branch2.0/conv/Conv" -> "251 /inception4d/branch2/branch2.0/Relu" [label="[1, 144, 14, 14]", style=solid]; +"251 /inception4d/branch2/branch2.0/Relu" -> "252 QuantizeLinear_/inception4d/branch2/branch2.0/Relu_output_0_1" [label="[1, 144, 14, 14]", style=solid]; +"252 QuantizeLinear_/inception4d/branch2/branch2.0/Relu_output_0_1" -> "253 DequantizeLinear_/inception4d/branch2/branch2.0/Relu_output_0_1" [label="[1, 144, 14, 14]", style=dashed]; +"253 DequantizeLinear_/inception4d/branch2/branch2.0/Relu_output_0_1" -> "256 /inception4d/branch2/branch2.1/conv/Conv" [label="[1, 144, 14, 14]", style=solid]; +"254 QuantizeLinear_onnx^^Conv_672_1" -> "255 DequantizeLinear_onnx^^Conv_672_1" [label="[288, 144, 3, 3]", style=dashed]; +"255 DequantizeLinear_onnx^^Conv_672_1" -> "256 /inception4d/branch2/branch2.1/conv/Conv" [label="[288, 144, 3, 3]", style=solid]; +"256 /inception4d/branch2/branch2.1/conv/Conv" -> "257 /inception4d/branch2/branch2.1/Relu" [label="[1, 288, 14, 14]", style=solid]; +"257 /inception4d/branch2/branch2.1/Relu" -> "273 QuantizeLinear_/inception4d/branch2/branch2.1/Relu_output_0_1" [label="[1, 288, 14, 14]", style=solid]; +"258 QuantizeLinear_onnx^^Conv_675_1" -> "259 DequantizeLinear_onnx^^Conv_675_1" [label="[32, 512, 1, 1]", style=dashed]; +"259 DequantizeLinear_onnx^^Conv_675_1" -> "260 /inception4d/branch3/branch3.0/conv/Conv" [label="[32, 512, 1, 1]", style=solid]; +"260 /inception4d/branch3/branch3.0/conv/Conv" -> "261 /inception4d/branch3/branch3.0/Relu" [label="[1, 32, 14, 14]", style=solid]; +"261 /inception4d/branch3/branch3.0/Relu" -> "262 QuantizeLinear_/inception4d/branch3/branch3.0/Relu_output_0_1" [label="[1, 32, 14, 14]", style=solid]; +"262 QuantizeLinear_/inception4d/branch3/branch3.0/Relu_output_0_1" -> "263 DequantizeLinear_/inception4d/branch3/branch3.0/Relu_output_0_1" [label="[1, 32, 14, 14]", style=dashed]; +"263 DequantizeLinear_/inception4d/branch3/branch3.0/Relu_output_0_1" -> "266 /inception4d/branch3/branch3.1/conv/Conv" [label="[1, 32, 14, 14]", style=solid]; +"264 QuantizeLinear_onnx^^Conv_678_1" -> "265 DequantizeLinear_onnx^^Conv_678_1" [label="[64, 32, 3, 3]", style=dashed]; +"265 DequantizeLinear_onnx^^Conv_678_1" -> "266 /inception4d/branch3/branch3.1/conv/Conv" [label="[64, 32, 3, 3]", style=solid]; +"266 /inception4d/branch3/branch3.1/conv/Conv" -> "267 /inception4d/branch3/branch3.1/Relu" [label="[1, 64, 14, 14]", style=solid]; +"267 /inception4d/branch3/branch3.1/Relu" -> "275 QuantizeLinear_/inception4d/branch3/branch3.1/Relu_output_0_1" [label="[1, 64, 14, 14]", style=solid]; +"268 /inception4d/branch4/branch4.0/MaxPool" -> "271 /inception4d/branch4/branch4.1/conv/Conv" [label="[1, 512, 14, 14]", style=solid]; +"269 QuantizeLinear_onnx^^Conv_681_1" -> "270 DequantizeLinear_onnx^^Conv_681_1" [label="[64, 512, 1, 1]", style=dashed]; +"270 DequantizeLinear_onnx^^Conv_681_1" -> "271 /inception4d/branch4/branch4.1/conv/Conv" [label="[64, 512, 1, 1]", style=solid]; +"271 /inception4d/branch4/branch4.1/conv/Conv" -> "272 /inception4d/branch4/branch4.1/Relu" [label="[1, 64, 14, 14]", style=solid]; +"272 /inception4d/branch4/branch4.1/Relu" -> "277 QuantizeLinear_/inception4d/branch4/branch4.1/Relu_output_0_1" [label="[1, 64, 14, 14]", style=solid]; +"273 QuantizeLinear_/inception4d/branch2/branch2.1/Relu_output_0_1" -> "274 DequantizeLinear_/inception4d/branch2/branch2.1/Relu_output_0_1" [label="[1, 288, 14, 14]", style=dashed]; +"274 DequantizeLinear_/inception4d/branch2/branch2.1/Relu_output_0_1" -> "281 /inception4d/Concat" [label="[1, 288, 14, 14]", style=solid]; +"275 QuantizeLinear_/inception4d/branch3/branch3.1/Relu_output_0_1" -> "276 DequantizeLinear_/inception4d/branch3/branch3.1/Relu_output_0_1" [label="[1, 64, 14, 14]", style=dashed]; +"276 DequantizeLinear_/inception4d/branch3/branch3.1/Relu_output_0_1" -> "281 /inception4d/Concat" [label="[1, 64, 14, 14]", style=solid]; +"277 QuantizeLinear_/inception4d/branch4/branch4.1/Relu_output_0_1" -> "278 DequantizeLinear_/inception4d/branch4/branch4.1/Relu_output_0_1" [label="[1, 64, 14, 14]", style=dashed]; +"278 DequantizeLinear_/inception4d/branch4/branch4.1/Relu_output_0_1" -> "281 /inception4d/Concat" [label="[1, 64, 14, 14]", style=solid]; +"279 QuantizeLinear_/inception4d/branch1/Relu_output_0_1" -> "280 DequantizeLinear_/inception4d/branch1/Relu_output_0_1" [label="[1, 112, 14, 14]", style=dashed]; +"280 DequantizeLinear_/inception4d/branch1/Relu_output_0_1" -> "281 /inception4d/Concat" [label="[1, 112, 14, 14]", style=solid]; +"281 /inception4d/Concat" -> "284 /inception4e/branch1/conv/Conv" [label="[1, 528, 14, 14]", style=solid]; +"281 /inception4d/Concat" -> "288 /inception4e/branch2/branch2.0/conv/Conv" [label="[1, 528, 14, 14]", style=solid]; +"281 /inception4d/Concat" -> "298 /inception4e/branch3/branch3.0/conv/Conv" [label="[1, 528, 14, 14]", style=solid]; +"281 /inception4d/Concat" -> "306 /inception4e/branch4/branch4.0/MaxPool" [label="[1, 528, 14, 14]", style=solid]; +"282 QuantizeLinear_onnx^^Conv_684_1" -> "283 DequantizeLinear_onnx^^Conv_684_1" [label="[256, 528, 1, 1]", style=dashed]; +"283 DequantizeLinear_onnx^^Conv_684_1" -> "284 /inception4e/branch1/conv/Conv" [label="[256, 528, 1, 1]", style=solid]; +"284 /inception4e/branch1/conv/Conv" -> "285 /inception4e/branch1/Relu" [label="[1, 256, 14, 14]", style=solid]; +"285 /inception4e/branch1/Relu" -> "317 QuantizeLinear_/inception4e/branch1/Relu_output_0_1" [label="[1, 256, 14, 14]", style=solid]; +"286 QuantizeLinear_onnx^^Conv_687_1" -> "287 DequantizeLinear_onnx^^Conv_687_1" [label="[160, 528, 1, 1]", style=dashed]; +"287 DequantizeLinear_onnx^^Conv_687_1" -> "288 /inception4e/branch2/branch2.0/conv/Conv" [label="[160, 528, 1, 1]", style=solid]; +"288 /inception4e/branch2/branch2.0/conv/Conv" -> "289 /inception4e/branch2/branch2.0/Relu" [label="[1, 160, 14, 14]", style=solid]; +"289 /inception4e/branch2/branch2.0/Relu" -> "290 QuantizeLinear_/inception4e/branch2/branch2.0/Relu_output_0_1" [label="[1, 160, 14, 14]", style=solid]; +"290 QuantizeLinear_/inception4e/branch2/branch2.0/Relu_output_0_1" -> "291 DequantizeLinear_/inception4e/branch2/branch2.0/Relu_output_0_1" [label="[1, 160, 14, 14]", style=dashed]; +"291 DequantizeLinear_/inception4e/branch2/branch2.0/Relu_output_0_1" -> "294 /inception4e/branch2/branch2.1/conv/Conv" [label="[1, 160, 14, 14]", style=solid]; +"292 QuantizeLinear_onnx^^Conv_690_1" -> "293 DequantizeLinear_onnx^^Conv_690_1" [label="[320, 160, 3, 3]", style=dashed]; +"293 DequantizeLinear_onnx^^Conv_690_1" -> "294 /inception4e/branch2/branch2.1/conv/Conv" [label="[320, 160, 3, 3]", style=solid]; +"294 /inception4e/branch2/branch2.1/conv/Conv" -> "295 /inception4e/branch2/branch2.1/Relu" [label="[1, 320, 14, 14]", style=solid]; +"295 /inception4e/branch2/branch2.1/Relu" -> "311 QuantizeLinear_/inception4e/branch2/branch2.1/Relu_output_0_1" [label="[1, 320, 14, 14]", style=solid]; +"296 QuantizeLinear_onnx^^Conv_693_1" -> "297 DequantizeLinear_onnx^^Conv_693_1" [label="[32, 528, 1, 1]", style=dashed]; +"297 DequantizeLinear_onnx^^Conv_693_1" -> "298 /inception4e/branch3/branch3.0/conv/Conv" [label="[32, 528, 1, 1]", style=solid]; +"298 /inception4e/branch3/branch3.0/conv/Conv" -> "299 /inception4e/branch3/branch3.0/Relu" [label="[1, 32, 14, 14]", style=solid]; +"299 /inception4e/branch3/branch3.0/Relu" -> "300 QuantizeLinear_/inception4e/branch3/branch3.0/Relu_output_0_1" [label="[1, 32, 14, 14]", style=solid]; +"300 QuantizeLinear_/inception4e/branch3/branch3.0/Relu_output_0_1" -> "301 DequantizeLinear_/inception4e/branch3/branch3.0/Relu_output_0_1" [label="[1, 32, 14, 14]", style=dashed]; +"301 DequantizeLinear_/inception4e/branch3/branch3.0/Relu_output_0_1" -> "304 /inception4e/branch3/branch3.1/conv/Conv" [label="[1, 32, 14, 14]", style=solid]; +"302 QuantizeLinear_onnx^^Conv_696_1" -> "303 DequantizeLinear_onnx^^Conv_696_1" [label="[128, 32, 3, 3]", style=dashed]; +"303 DequantizeLinear_onnx^^Conv_696_1" -> "304 /inception4e/branch3/branch3.1/conv/Conv" [label="[128, 32, 3, 3]", style=solid]; +"304 /inception4e/branch3/branch3.1/conv/Conv" -> "305 /inception4e/branch3/branch3.1/Relu" [label="[1, 128, 14, 14]", style=solid]; +"305 /inception4e/branch3/branch3.1/Relu" -> "313 QuantizeLinear_/inception4e/branch3/branch3.1/Relu_output_0_1" [label="[1, 128, 14, 14]", style=solid]; +"306 /inception4e/branch4/branch4.0/MaxPool" -> "309 /inception4e/branch4/branch4.1/conv/Conv" [label="[1, 528, 14, 14]", style=solid]; +"307 QuantizeLinear_onnx^^Conv_699_1" -> "308 DequantizeLinear_onnx^^Conv_699_1" [label="[128, 528, 1, 1]", style=dashed]; +"308 DequantizeLinear_onnx^^Conv_699_1" -> "309 /inception4e/branch4/branch4.1/conv/Conv" [label="[128, 528, 1, 1]", style=solid]; +"309 /inception4e/branch4/branch4.1/conv/Conv" -> "310 /inception4e/branch4/branch4.1/Relu" [label="[1, 128, 14, 14]", style=solid]; +"310 /inception4e/branch4/branch4.1/Relu" -> "315 QuantizeLinear_/inception4e/branch4/branch4.1/Relu_output_0_1" [label="[1, 128, 14, 14]", style=solid]; +"311 QuantizeLinear_/inception4e/branch2/branch2.1/Relu_output_0_1" -> "312 DequantizeLinear_/inception4e/branch2/branch2.1/Relu_output_0_1" [label="[1, 320, 14, 14]", style=dashed]; +"312 DequantizeLinear_/inception4e/branch2/branch2.1/Relu_output_0_1" -> "319 /inception4e/Concat" [label="[1, 320, 14, 14]", style=solid]; +"313 QuantizeLinear_/inception4e/branch3/branch3.1/Relu_output_0_1" -> "314 DequantizeLinear_/inception4e/branch3/branch3.1/Relu_output_0_1" [label="[1, 128, 14, 14]", style=dashed]; +"314 DequantizeLinear_/inception4e/branch3/branch3.1/Relu_output_0_1" -> "319 /inception4e/Concat" [label="[1, 128, 14, 14]", style=solid]; +"315 QuantizeLinear_/inception4e/branch4/branch4.1/Relu_output_0_1" -> "316 DequantizeLinear_/inception4e/branch4/branch4.1/Relu_output_0_1" [label="[1, 128, 14, 14]", style=dashed]; +"316 DequantizeLinear_/inception4e/branch4/branch4.1/Relu_output_0_1" -> "319 /inception4e/Concat" [label="[1, 128, 14, 14]", style=solid]; +"317 QuantizeLinear_/inception4e/branch1/Relu_output_0_1" -> "318 DequantizeLinear_/inception4e/branch1/Relu_output_0_1" [label="[1, 256, 14, 14]", style=dashed]; +"318 DequantizeLinear_/inception4e/branch1/Relu_output_0_1" -> "319 /inception4e/Concat" [label="[1, 256, 14, 14]", style=solid]; +"319 /inception4e/Concat" -> "320 /maxpool4/MaxPool" [label="[1, 832, 14, 14]", style=solid]; +"320 /maxpool4/MaxPool" -> "323 /inception5a/branch1/conv/Conv" [label="[1, 832, 7, 7]", style=solid]; +"320 /maxpool4/MaxPool" -> "327 /inception5a/branch2/branch2.0/conv/Conv" [label="[1, 832, 7, 7]", style=solid]; +"320 /maxpool4/MaxPool" -> "337 /inception5a/branch3/branch3.0/conv/Conv" [label="[1, 832, 7, 7]", style=solid]; +"320 /maxpool4/MaxPool" -> "345 /inception5a/branch4/branch4.0/MaxPool" [label="[1, 832, 7, 7]", style=solid]; +"321 QuantizeLinear_onnx^^Conv_702_1" -> "322 DequantizeLinear_onnx^^Conv_702_1" [label="[256, 832, 1, 1]", style=dashed]; +"322 DequantizeLinear_onnx^^Conv_702_1" -> "323 /inception5a/branch1/conv/Conv" [label="[256, 832, 1, 1]", style=solid]; +"323 /inception5a/branch1/conv/Conv" -> "324 /inception5a/branch1/Relu" [label="[1, 256, 7, 7]", style=solid]; +"324 /inception5a/branch1/Relu" -> "356 QuantizeLinear_/inception5a/branch1/Relu_output_0_1" [label="[1, 256, 7, 7]", style=solid]; +"325 QuantizeLinear_onnx^^Conv_705_1" -> "326 DequantizeLinear_onnx^^Conv_705_1" [label="[160, 832, 1, 1]", style=dashed]; +"326 DequantizeLinear_onnx^^Conv_705_1" -> "327 /inception5a/branch2/branch2.0/conv/Conv" [label="[160, 832, 1, 1]", style=solid]; +"327 /inception5a/branch2/branch2.0/conv/Conv" -> "328 /inception5a/branch2/branch2.0/Relu" [label="[1, 160, 7, 7]", style=solid]; +"328 /inception5a/branch2/branch2.0/Relu" -> "329 QuantizeLinear_/inception5a/branch2/branch2.0/Relu_output_0_1" [label="[1, 160, 7, 7]", style=solid]; +"329 QuantizeLinear_/inception5a/branch2/branch2.0/Relu_output_0_1" -> "330 DequantizeLinear_/inception5a/branch2/branch2.0/Relu_output_0_1" [label="[1, 160, 7, 7]", style=dashed]; +"330 DequantizeLinear_/inception5a/branch2/branch2.0/Relu_output_0_1" -> "333 /inception5a/branch2/branch2.1/conv/Conv" [label="[1, 160, 7, 7]", style=solid]; +"331 QuantizeLinear_onnx^^Conv_708_1" -> "332 DequantizeLinear_onnx^^Conv_708_1" [label="[320, 160, 3, 3]", style=dashed]; +"332 DequantizeLinear_onnx^^Conv_708_1" -> "333 /inception5a/branch2/branch2.1/conv/Conv" [label="[320, 160, 3, 3]", style=solid]; +"333 /inception5a/branch2/branch2.1/conv/Conv" -> "334 /inception5a/branch2/branch2.1/Relu" [label="[1, 320, 7, 7]", style=solid]; +"334 /inception5a/branch2/branch2.1/Relu" -> "350 QuantizeLinear_/inception5a/branch2/branch2.1/Relu_output_0_1" [label="[1, 320, 7, 7]", style=solid]; +"335 QuantizeLinear_onnx^^Conv_711_1" -> "336 DequantizeLinear_onnx^^Conv_711_1" [label="[32, 832, 1, 1]", style=dashed]; +"336 DequantizeLinear_onnx^^Conv_711_1" -> "337 /inception5a/branch3/branch3.0/conv/Conv" [label="[32, 832, 1, 1]", style=solid]; +"337 /inception5a/branch3/branch3.0/conv/Conv" -> "338 /inception5a/branch3/branch3.0/Relu" [label="[1, 32, 7, 7]", style=solid]; +"338 /inception5a/branch3/branch3.0/Relu" -> "339 QuantizeLinear_/inception5a/branch3/branch3.0/Relu_output_0_1" [label="[1, 32, 7, 7]", style=solid]; +"339 QuantizeLinear_/inception5a/branch3/branch3.0/Relu_output_0_1" -> "340 DequantizeLinear_/inception5a/branch3/branch3.0/Relu_output_0_1" [label="[1, 32, 7, 7]", style=dashed]; +"340 DequantizeLinear_/inception5a/branch3/branch3.0/Relu_output_0_1" -> "343 /inception5a/branch3/branch3.1/conv/Conv" [label="[1, 32, 7, 7]", style=solid]; +"341 QuantizeLinear_onnx^^Conv_714_1" -> "342 DequantizeLinear_onnx^^Conv_714_1" [label="[128, 32, 3, 3]", style=dashed]; +"342 DequantizeLinear_onnx^^Conv_714_1" -> "343 /inception5a/branch3/branch3.1/conv/Conv" [label="[128, 32, 3, 3]", style=solid]; +"343 /inception5a/branch3/branch3.1/conv/Conv" -> "344 /inception5a/branch3/branch3.1/Relu" [label="[1, 128, 7, 7]", style=solid]; +"344 /inception5a/branch3/branch3.1/Relu" -> "352 QuantizeLinear_/inception5a/branch3/branch3.1/Relu_output_0_1" [label="[1, 128, 7, 7]", style=solid]; +"345 /inception5a/branch4/branch4.0/MaxPool" -> "348 /inception5a/branch4/branch4.1/conv/Conv" [label="[1, 832, 7, 7]", style=solid]; +"346 QuantizeLinear_onnx^^Conv_717_1" -> "347 DequantizeLinear_onnx^^Conv_717_1" [label="[128, 832, 1, 1]", style=dashed]; +"347 DequantizeLinear_onnx^^Conv_717_1" -> "348 /inception5a/branch4/branch4.1/conv/Conv" [label="[128, 832, 1, 1]", style=solid]; +"348 /inception5a/branch4/branch4.1/conv/Conv" -> "349 /inception5a/branch4/branch4.1/Relu" [label="[1, 128, 7, 7]", style=solid]; +"349 /inception5a/branch4/branch4.1/Relu" -> "354 QuantizeLinear_/inception5a/branch4/branch4.1/Relu_output_0_1" [label="[1, 128, 7, 7]", style=solid]; +"350 QuantizeLinear_/inception5a/branch2/branch2.1/Relu_output_0_1" -> "351 DequantizeLinear_/inception5a/branch2/branch2.1/Relu_output_0_1" [label="[1, 320, 7, 7]", style=dashed]; +"351 DequantizeLinear_/inception5a/branch2/branch2.1/Relu_output_0_1" -> "358 /inception5a/Concat" [label="[1, 320, 7, 7]", style=solid]; +"352 QuantizeLinear_/inception5a/branch3/branch3.1/Relu_output_0_1" -> "353 DequantizeLinear_/inception5a/branch3/branch3.1/Relu_output_0_1" [label="[1, 128, 7, 7]", style=dashed]; +"353 DequantizeLinear_/inception5a/branch3/branch3.1/Relu_output_0_1" -> "358 /inception5a/Concat" [label="[1, 128, 7, 7]", style=solid]; +"354 QuantizeLinear_/inception5a/branch4/branch4.1/Relu_output_0_1" -> "355 DequantizeLinear_/inception5a/branch4/branch4.1/Relu_output_0_1" [label="[1, 128, 7, 7]", style=dashed]; +"355 DequantizeLinear_/inception5a/branch4/branch4.1/Relu_output_0_1" -> "358 /inception5a/Concat" [label="[1, 128, 7, 7]", style=solid]; +"356 QuantizeLinear_/inception5a/branch1/Relu_output_0_1" -> "357 DequantizeLinear_/inception5a/branch1/Relu_output_0_1" [label="[1, 256, 7, 7]", style=dashed]; +"357 DequantizeLinear_/inception5a/branch1/Relu_output_0_1" -> "358 /inception5a/Concat" [label="[1, 256, 7, 7]", style=solid]; +"358 /inception5a/Concat" -> "361 /inception5b/branch1/conv/Conv" [label="[1, 832, 7, 7]", style=solid]; +"358 /inception5a/Concat" -> "365 /inception5b/branch2/branch2.0/conv/Conv" [label="[1, 832, 7, 7]", style=solid]; +"358 /inception5a/Concat" -> "375 /inception5b/branch3/branch3.0/conv/Conv" [label="[1, 832, 7, 7]", style=solid]; +"358 /inception5a/Concat" -> "383 /inception5b/branch4/branch4.0/MaxPool" [label="[1, 832, 7, 7]", style=solid]; +"359 QuantizeLinear_onnx^^Conv_720_1" -> "360 DequantizeLinear_onnx^^Conv_720_1" [label="[384, 832, 1, 1]", style=dashed]; +"360 DequantizeLinear_onnx^^Conv_720_1" -> "361 /inception5b/branch1/conv/Conv" [label="[384, 832, 1, 1]", style=solid]; +"361 /inception5b/branch1/conv/Conv" -> "362 /inception5b/branch1/Relu" [label="[1, 384, 7, 7]", style=solid]; +"362 /inception5b/branch1/Relu" -> "388 QuantizeLinear_/inception5b/branch1/Relu_output_0_1" [label="[1, 384, 7, 7]", style=solid]; +"363 QuantizeLinear_onnx^^Conv_723_1" -> "364 DequantizeLinear_onnx^^Conv_723_1" [label="[192, 832, 1, 1]", style=dashed]; +"364 DequantizeLinear_onnx^^Conv_723_1" -> "365 /inception5b/branch2/branch2.0/conv/Conv" [label="[192, 832, 1, 1]", style=solid]; +"365 /inception5b/branch2/branch2.0/conv/Conv" -> "366 /inception5b/branch2/branch2.0/Relu" [label="[1, 192, 7, 7]", style=solid]; +"366 /inception5b/branch2/branch2.0/Relu" -> "367 QuantizeLinear_/inception5b/branch2/branch2.0/Relu_output_0_1" [label="[1, 192, 7, 7]", style=solid]; +"367 QuantizeLinear_/inception5b/branch2/branch2.0/Relu_output_0_1" -> "368 DequantizeLinear_/inception5b/branch2/branch2.0/Relu_output_0_1" [label="[1, 192, 7, 7]", style=dashed]; +"368 DequantizeLinear_/inception5b/branch2/branch2.0/Relu_output_0_1" -> "371 /inception5b/branch2/branch2.1/conv/Conv" [label="[1, 192, 7, 7]", style=solid]; +"369 QuantizeLinear_onnx^^Conv_726_1" -> "370 DequantizeLinear_onnx^^Conv_726_1" [label="[384, 192, 3, 3]", style=dashed]; +"370 DequantizeLinear_onnx^^Conv_726_1" -> "371 /inception5b/branch2/branch2.1/conv/Conv" [label="[384, 192, 3, 3]", style=solid]; +"371 /inception5b/branch2/branch2.1/conv/Conv" -> "372 /inception5b/branch2/branch2.1/Relu" [label="[1, 384, 7, 7]", style=solid]; +"372 /inception5b/branch2/branch2.1/Relu" -> "390 QuantizeLinear_/inception5b/branch2/branch2.1/Relu_output_0_1" [label="[1, 384, 7, 7]", style=solid]; +"373 QuantizeLinear_onnx^^Conv_729_1" -> "374 DequantizeLinear_onnx^^Conv_729_1" [label="[48, 832, 1, 1]", style=dashed]; +"374 DequantizeLinear_onnx^^Conv_729_1" -> "375 /inception5b/branch3/branch3.0/conv/Conv" [label="[48, 832, 1, 1]", style=solid]; +"375 /inception5b/branch3/branch3.0/conv/Conv" -> "376 /inception5b/branch3/branch3.0/Relu" [label="[1, 48, 7, 7]", style=solid]; +"376 /inception5b/branch3/branch3.0/Relu" -> "377 QuantizeLinear_/inception5b/branch3/branch3.0/Relu_output_0_1" [label="[1, 48, 7, 7]", style=solid]; +"377 QuantizeLinear_/inception5b/branch3/branch3.0/Relu_output_0_1" -> "378 DequantizeLinear_/inception5b/branch3/branch3.0/Relu_output_0_1" [label="[1, 48, 7, 7]", style=dashed]; +"378 DequantizeLinear_/inception5b/branch3/branch3.0/Relu_output_0_1" -> "381 /inception5b/branch3/branch3.1/conv/Conv" [label="[1, 48, 7, 7]", style=solid]; +"379 QuantizeLinear_onnx^^Conv_732_1" -> "380 DequantizeLinear_onnx^^Conv_732_1" [label="[128, 48, 3, 3]", style=dashed]; +"380 DequantizeLinear_onnx^^Conv_732_1" -> "381 /inception5b/branch3/branch3.1/conv/Conv" [label="[128, 48, 3, 3]", style=solid]; +"381 /inception5b/branch3/branch3.1/conv/Conv" -> "382 /inception5b/branch3/branch3.1/Relu" [label="[1, 128, 7, 7]", style=solid]; +"382 /inception5b/branch3/branch3.1/Relu" -> "392 QuantizeLinear_/inception5b/branch3/branch3.1/Relu_output_0_1" [label="[1, 128, 7, 7]", style=solid]; +"383 /inception5b/branch4/branch4.0/MaxPool" -> "386 /inception5b/branch4/branch4.1/conv/Conv" [label="[1, 832, 7, 7]", style=solid]; +"384 QuantizeLinear_onnx^^Conv_735_1" -> "385 DequantizeLinear_onnx^^Conv_735_1" [label="[128, 832, 1, 1]", style=dashed]; +"385 DequantizeLinear_onnx^^Conv_735_1" -> "386 /inception5b/branch4/branch4.1/conv/Conv" [label="[128, 832, 1, 1]", style=solid]; +"386 /inception5b/branch4/branch4.1/conv/Conv" -> "387 /inception5b/branch4/branch4.1/Relu" [label="[1, 128, 7, 7]", style=solid]; +"387 /inception5b/branch4/branch4.1/Relu" -> "394 QuantizeLinear_/inception5b/branch4/branch4.1/Relu_output_0_1" [label="[1, 128, 7, 7]", style=solid]; +"388 QuantizeLinear_/inception5b/branch1/Relu_output_0_1" -> "389 DequantizeLinear_/inception5b/branch1/Relu_output_0_1" [label="[1, 384, 7, 7]", style=dashed]; +"389 DequantizeLinear_/inception5b/branch1/Relu_output_0_1" -> "396 /inception5b/Concat" [label="[1, 384, 7, 7]", style=solid]; +"390 QuantizeLinear_/inception5b/branch2/branch2.1/Relu_output_0_1" -> "391 DequantizeLinear_/inception5b/branch2/branch2.1/Relu_output_0_1" [label="[1, 384, 7, 7]", style=dashed]; +"391 DequantizeLinear_/inception5b/branch2/branch2.1/Relu_output_0_1" -> "396 /inception5b/Concat" [label="[1, 384, 7, 7]", style=solid]; +"392 QuantizeLinear_/inception5b/branch3/branch3.1/Relu_output_0_1" -> "393 DequantizeLinear_/inception5b/branch3/branch3.1/Relu_output_0_1" [label="[1, 128, 7, 7]", style=dashed]; +"393 DequantizeLinear_/inception5b/branch3/branch3.1/Relu_output_0_1" -> "396 /inception5b/Concat" [label="[1, 128, 7, 7]", style=solid]; +"394 QuantizeLinear_/inception5b/branch4/branch4.1/Relu_output_0_1" -> "395 DequantizeLinear_/inception5b/branch4/branch4.1/Relu_output_0_1" [label="[1, 128, 7, 7]", style=dashed]; +"395 DequantizeLinear_/inception5b/branch4/branch4.1/Relu_output_0_1" -> "396 /inception5b/Concat" [label="[1, 128, 7, 7]", style=solid]; +"396 /inception5b/Concat" -> "397 /avgpool/GlobalAveragePool" [label="[1, 1024, 7, 7]", style=solid]; +"397 /avgpool/GlobalAveragePool" -> "398 QuantizeLinear_/avgpool/GlobalAveragePool_output_0_1" [label="[1, 1024, 1, 1]", style=solid]; +"398 QuantizeLinear_/avgpool/GlobalAveragePool_output_0_1" -> "399 DequantizeLinear_/avgpool/GlobalAveragePool_output_0_1" [label="[1, 1024, 1, 1]", style=dashed]; +"399 DequantizeLinear_/avgpool/GlobalAveragePool_output_0_1" -> "400 /Flatten" [label="[1, 1024, 1, 1]", style=solid]; +"400 /Flatten" -> "403 /fc/Gemm" [label="[1, 1024]", style=solid]; +"401 QuantizeLinear_fc.weight_1" -> "402 DequantizeLinear_fc.weight_1" [label="[1000, 1024]", style=dashed]; +"402 DequantizeLinear_fc.weight_1" -> "403 /fc/Gemm" [label="[1000, 1024]", style=solid]; +"403 /fc/Gemm" -> "405 nncf_model_output_0" [label="[1, 1000]", style=solid]; +"404 nncf_model_input_0" -> "2 QuantizeLinear_x.1_1" [label="[1, 3, 224, 224]", style=solid]; } diff --git a/tests/onnx/data/reference_graphs/quantization/gpt2-10.dot b/tests/onnx/data/reference_graphs/quantization/gpt2-10.dot new file mode 100644 index 00000000000..a9338508e43 --- /dev/null +++ b/tests/onnx/data/reference_graphs/quantization/gpt2-10.dot @@ -0,0 +1,6180 @@ +strict digraph { +"0 Shape_0" [id=0, type=Shape]; +"1 Constant_1" [id=1, type=Constant]; +"2 Gather_2" [id=2, type=Gather]; +"3 Shape_3" [id=3, type=Shape]; +"4 Constant_4" [id=4, type=Constant]; +"5 Gather_5" [id=5, type=Gather]; +"6 Shape_6" [id=6, type=Shape]; +"7 Constant_7" [id=7, type=Constant]; +"8 Gather_8" [id=8, type=Gather]; +"9 Unsqueeze_9" [id=9, type=Unsqueeze]; +"10 Concat_10" [id=10, type=Concat]; +"11 Reshape_11" [id=11, type=Reshape]; +"12 Unsqueeze_12" [id=12, type=Unsqueeze]; +"13 Sub_13" [id=13, type=Sub]; +"14 Div_14" [id=14, type=Div]; +"15 ConstantOfShape_15" [id=15, type=ConstantOfShape]; +"16 NonZero_16" [id=16, type=NonZero]; +"17 Transpose_17" [id=17, type=Transpose]; +"18 Squeeze_18" [id=18, type=Squeeze]; +"19 Mul_19" [id=19, type=Mul]; +"20 Add_20" [id=20, type=Add]; +"21 Cast_21" [id=21, type=Cast]; +"22 Unsqueeze_22" [id=22, type=Unsqueeze]; +"23 Unsqueeze_23" [id=23, type=Unsqueeze]; +"24 Concat_24" [id=24, type=Concat]; +"25 Reshape_25" [id=25, type=Reshape]; +"26 QuantizeLinear_wte.weight_1" [id=26, type=QuantizeLinear]; +"27 DequantizeLinear_wte.weight_1" [id=27, type=DequantizeLinear]; +"28 Gather_26" [id=28, type=Gather]; +"29 QuantizeLinear_wpe.weight_1" [id=29, type=QuantizeLinear]; +"30 DequantizeLinear_wpe.weight_1" [id=30, type=DequantizeLinear]; +"31 Gather_27" [id=31, type=Gather]; +"32 Add_28" [id=32, type=Add]; +"33 Shape_29" [id=33, type=Shape]; +"34 Constant_30" [id=34, type=Constant]; +"35 Gather_31" [id=35, type=Gather]; +"36 ReduceMean_32" [id=36, type=ReduceMean]; +"37 Sub_33" [id=37, type=Sub]; +"38 Constant_34" [id=38, type=Constant]; +"39 Pow_35" [id=39, type=Pow]; +"40 ReduceMean_36" [id=40, type=ReduceMean]; +"41 Constant_37" [id=41, type=Constant]; +"42 Add_38" [id=42, type=Add]; +"43 Sqrt_39" [id=43, type=Sqrt]; +"44 Div_40" [id=44, type=Div]; +"45 Mul_41" [id=45, type=Mul]; +"46 Add_42" [id=46, type=Add]; +"47 QuantizeLinear_211_1" [id=47, type=QuantizeLinear]; +"48 DequantizeLinear_211_1" [id=48, type=DequantizeLinear]; +"49 Shape_43" [id=49, type=Shape]; +"50 Constant_44" [id=50, type=Constant]; +"51 Gather_45" [id=51, type=Gather]; +"52 Shape_46" [id=52, type=Shape]; +"53 Constant_47" [id=53, type=Constant]; +"54 Gather_48" [id=54, type=Gather]; +"55 Shape_49" [id=55, type=Shape]; +"56 Constant_50" [id=56, type=Constant]; +"57 Gather_51" [id=57, type=Gather]; +"58 Unsqueeze_52" [id=58, type=Unsqueeze]; +"59 Concat_53" [id=59, type=Concat]; +"60 Reshape_54" [id=60, type=Reshape]; +"61 QuantizeLinear_h.0.attn.c_attn.weight_1" [id=61, type=QuantizeLinear]; +"62 DequantizeLinear_h.0.attn.c_attn.weight_1" [id=62, type=DequantizeLinear]; +"63 Gemm_55" [id=63, type=Gemm]; +"64 Unsqueeze_56" [id=64, type=Unsqueeze]; +"65 Unsqueeze_57" [id=65, type=Unsqueeze]; +"66 Concat_58" [id=66, type=Concat]; +"67 Reshape_59" [id=67, type=Reshape]; +"68 Split_60" [id=68, type=Split]; +"69 QuantizeLinear_query.1_1" [id=69, type=QuantizeLinear]; +"70 DequantizeLinear_query.1_1" [id=70, type=DequantizeLinear]; +"71 Shape_61" [id=71, type=Shape]; +"72 Constant_62" [id=72, type=Constant]; +"73 Gather_63" [id=73, type=Gather]; +"74 Shape_64" [id=74, type=Shape]; +"75 Constant_65" [id=75, type=Constant]; +"76 Gather_66" [id=76, type=Gather]; +"77 Shape_67" [id=77, type=Shape]; +"78 Constant_68" [id=78, type=Constant]; +"79 Gather_69" [id=79, type=Gather]; +"80 Constant_70" [id=80, type=Constant]; +"81 Div_71" [id=81, type=Div]; +"82 Cast_72" [id=82, type=Cast]; +"83 Cast_73" [id=83, type=Cast]; +"84 Unsqueeze_74" [id=84, type=Unsqueeze]; +"85 Unsqueeze_75" [id=85, type=Unsqueeze]; +"86 Unsqueeze_76" [id=86, type=Unsqueeze]; +"87 Concat_77" [id=87, type=Concat]; +"88 Reshape_78" [id=88, type=Reshape]; +"89 Transpose_79" [id=89, type=Transpose]; +"90 Shape_80" [id=90, type=Shape]; +"91 Constant_81" [id=91, type=Constant]; +"92 Gather_82" [id=92, type=Gather]; +"93 Shape_83" [id=93, type=Shape]; +"94 Constant_84" [id=94, type=Constant]; +"95 Gather_85" [id=95, type=Gather]; +"96 Shape_86" [id=96, type=Shape]; +"97 Constant_87" [id=97, type=Constant]; +"98 Gather_88" [id=98, type=Gather]; +"99 Constant_89" [id=99, type=Constant]; +"100 Div_90" [id=100, type=Div]; +"101 Cast_91" [id=101, type=Cast]; +"102 Cast_92" [id=102, type=Cast]; +"103 Unsqueeze_93" [id=103, type=Unsqueeze]; +"104 Unsqueeze_94" [id=104, type=Unsqueeze]; +"105 Unsqueeze_95" [id=105, type=Unsqueeze]; +"106 Concat_96" [id=106, type=Concat]; +"107 Reshape_97" [id=107, type=Reshape]; +"108 QuantizeLinear_276_1" [id=108, type=QuantizeLinear]; +"109 DequantizeLinear_276_1" [id=109, type=DequantizeLinear]; +"110 Transpose_98" [id=110, type=Transpose]; +"111 Shape_99" [id=111, type=Shape]; +"112 Constant_100" [id=112, type=Constant]; +"113 Gather_101" [id=113, type=Gather]; +"114 Shape_102" [id=114, type=Shape]; +"115 Constant_103" [id=115, type=Constant]; +"116 Gather_104" [id=116, type=Gather]; +"117 Shape_105" [id=117, type=Shape]; +"118 Constant_106" [id=118, type=Constant]; +"119 Gather_107" [id=119, type=Gather]; +"120 Constant_108" [id=120, type=Constant]; +"121 Div_109" [id=121, type=Div]; +"122 Cast_110" [id=122, type=Cast]; +"123 Cast_111" [id=123, type=Cast]; +"124 Unsqueeze_112" [id=124, type=Unsqueeze]; +"125 Unsqueeze_113" [id=125, type=Unsqueeze]; +"126 Unsqueeze_114" [id=126, type=Unsqueeze]; +"127 Concat_115" [id=127, type=Concat]; +"128 Reshape_116" [id=128, type=Reshape]; +"129 Transpose_117" [id=129, type=Transpose]; +"130 Transpose_118" [id=130, type=Transpose]; +"131 Unsqueeze_119" [id=131, type=Unsqueeze]; +"132 Unsqueeze_120" [id=132, type=Unsqueeze]; +"133 Concat_121" [id=133, type=Concat]; +"134 MatMul_122" [id=134, type=MatMul]; +"135 Constant_123" [id=135, type=Constant]; +"136 Div_124" [id=136, type=Div]; +"137 Shape_125" [id=137, type=Shape]; +"138 Constant_126" [id=138, type=Constant]; +"139 Gather_127" [id=139, type=Gather]; +"140 Shape_128" [id=140, type=Shape]; +"141 Constant_129" [id=141, type=Constant]; +"142 Gather_130" [id=142, type=Gather]; +"143 Sub_131" [id=143, type=Sub]; +"144 Unsqueeze_132" [id=144, type=Unsqueeze]; +"145 Unsqueeze_133" [id=145, type=Unsqueeze]; +"146 Constant_134" [id=146, type=Constant]; +"147 Slice_135" [id=147, type=Slice]; +"148 Unsqueeze_136" [id=148, type=Unsqueeze]; +"149 Constant_137" [id=149, type=Constant]; +"150 Slice_138" [id=150, type=Slice]; +"151 Mul_139" [id=151, type=Mul]; +"152 Constant_140" [id=152, type=Constant]; +"153 Sub_141" [id=153, type=Sub]; +"154 Constant_142" [id=154, type=Constant]; +"155 Mul_143" [id=155, type=Mul]; +"156 Sub_144" [id=156, type=Sub]; +"157 Softmax_145" [id=157, type=Softmax]; +"158 MatMul_146" [id=158, type=MatMul]; +"159 QuantizeLinear_333_1" [id=159, type=QuantizeLinear]; +"160 DequantizeLinear_333_1" [id=160, type=DequantizeLinear]; +"161 Transpose_147" [id=161, type=Transpose]; +"162 Shape_148" [id=162, type=Shape]; +"163 Constant_149" [id=163, type=Constant]; +"164 Gather_150" [id=164, type=Gather]; +"165 Shape_151" [id=165, type=Shape]; +"166 Constant_152" [id=166, type=Constant]; +"167 Gather_153" [id=167, type=Gather]; +"168 Shape_154" [id=168, type=Shape]; +"169 Constant_155" [id=169, type=Constant]; +"170 Gather_156" [id=170, type=Gather]; +"171 Shape_157" [id=171, type=Shape]; +"172 Constant_158" [id=172, type=Constant]; +"173 Gather_159" [id=173, type=Gather]; +"174 Mul_160" [id=174, type=Mul]; +"175 Unsqueeze_161" [id=175, type=Unsqueeze]; +"176 Unsqueeze_162" [id=176, type=Unsqueeze]; +"177 Unsqueeze_163" [id=177, type=Unsqueeze]; +"178 Concat_164" [id=178, type=Concat]; +"179 Reshape_165" [id=179, type=Reshape]; +"180 Shape_166" [id=180, type=Shape]; +"181 Constant_167" [id=181, type=Constant]; +"182 Gather_168" [id=182, type=Gather]; +"183 Shape_169" [id=183, type=Shape]; +"184 Constant_170" [id=184, type=Constant]; +"185 Gather_171" [id=185, type=Gather]; +"186 Shape_172" [id=186, type=Shape]; +"187 Constant_173" [id=187, type=Constant]; +"188 Gather_174" [id=188, type=Gather]; +"189 Unsqueeze_175" [id=189, type=Unsqueeze]; +"190 Concat_176" [id=190, type=Concat]; +"191 Reshape_177" [id=191, type=Reshape]; +"192 QuantizeLinear_h.0.attn.c_proj.weight_1" [id=192, type=QuantizeLinear]; +"193 DequantizeLinear_h.0.attn.c_proj.weight_1" [id=193, type=DequantizeLinear]; +"194 Gemm_178" [id=194, type=Gemm]; +"195 Unsqueeze_179" [id=195, type=Unsqueeze]; +"196 Unsqueeze_180" [id=196, type=Unsqueeze]; +"197 Concat_181" [id=197, type=Concat]; +"198 Reshape_182" [id=198, type=Reshape]; +"199 Add_183" [id=199, type=Add]; +"200 ReduceMean_184" [id=200, type=ReduceMean]; +"201 Sub_185" [id=201, type=Sub]; +"202 Constant_186" [id=202, type=Constant]; +"203 Pow_187" [id=203, type=Pow]; +"204 ReduceMean_188" [id=204, type=ReduceMean]; +"205 Constant_189" [id=205, type=Constant]; +"206 Add_190" [id=206, type=Add]; +"207 Sqrt_191" [id=207, type=Sqrt]; +"208 Div_192" [id=208, type=Div]; +"209 Mul_193" [id=209, type=Mul]; +"210 Add_194" [id=210, type=Add]; +"211 QuantizeLinear_385_1" [id=211, type=QuantizeLinear]; +"212 DequantizeLinear_385_1" [id=212, type=DequantizeLinear]; +"213 Shape_195" [id=213, type=Shape]; +"214 Constant_196" [id=214, type=Constant]; +"215 Gather_197" [id=215, type=Gather]; +"216 Shape_198" [id=216, type=Shape]; +"217 Constant_199" [id=217, type=Constant]; +"218 Gather_200" [id=218, type=Gather]; +"219 Shape_201" [id=219, type=Shape]; +"220 Constant_202" [id=220, type=Constant]; +"221 Gather_203" [id=221, type=Gather]; +"222 Unsqueeze_204" [id=222, type=Unsqueeze]; +"223 Concat_205" [id=223, type=Concat]; +"224 Reshape_206" [id=224, type=Reshape]; +"225 QuantizeLinear_h.0.mlp.c_fc.weight_1" [id=225, type=QuantizeLinear]; +"226 DequantizeLinear_h.0.mlp.c_fc.weight_1" [id=226, type=DequantizeLinear]; +"227 Gemm_207" [id=227, type=Gemm]; +"228 Unsqueeze_208" [id=228, type=Unsqueeze]; +"229 Unsqueeze_209" [id=229, type=Unsqueeze]; +"230 Concat_210" [id=230, type=Concat]; +"231 Reshape_211" [id=231, type=Reshape]; +"232 Constant_212" [id=232, type=Constant]; +"233 Mul_213" [id=233, type=Mul]; +"234 Constant_214" [id=234, type=Constant]; +"235 Pow_215" [id=235, type=Pow]; +"236 Constant_216" [id=236, type=Constant]; +"237 Mul_217" [id=237, type=Mul]; +"238 Add_218" [id=238, type=Add]; +"239 Constant_219" [id=239, type=Constant]; +"240 Mul_220" [id=240, type=Mul]; +"241 Tanh_221" [id=241, type=Tanh]; +"242 Constant_222" [id=242, type=Constant]; +"243 Add_223" [id=243, type=Add]; +"244 Mul_224" [id=244, type=Mul]; +"245 QuantizeLinear_419_1" [id=245, type=QuantizeLinear]; +"246 DequantizeLinear_419_1" [id=246, type=DequantizeLinear]; +"247 Shape_225" [id=247, type=Shape]; +"248 Constant_226" [id=248, type=Constant]; +"249 Gather_227" [id=249, type=Gather]; +"250 Shape_228" [id=250, type=Shape]; +"251 Constant_229" [id=251, type=Constant]; +"252 Gather_230" [id=252, type=Gather]; +"253 Shape_231" [id=253, type=Shape]; +"254 Constant_232" [id=254, type=Constant]; +"255 Gather_233" [id=255, type=Gather]; +"256 Unsqueeze_234" [id=256, type=Unsqueeze]; +"257 Concat_235" [id=257, type=Concat]; +"258 Reshape_236" [id=258, type=Reshape]; +"259 QuantizeLinear_h.0.mlp.c_proj.weight_1" [id=259, type=QuantizeLinear]; +"260 DequantizeLinear_h.0.mlp.c_proj.weight_1" [id=260, type=DequantizeLinear]; +"261 Gemm_237" [id=261, type=Gemm]; +"262 Unsqueeze_238" [id=262, type=Unsqueeze]; +"263 Unsqueeze_239" [id=263, type=Unsqueeze]; +"264 Concat_240" [id=264, type=Concat]; +"265 Reshape_241" [id=265, type=Reshape]; +"266 Add_242" [id=266, type=Add]; +"267 ReduceMean_243" [id=267, type=ReduceMean]; +"268 Sub_244" [id=268, type=Sub]; +"269 Constant_245" [id=269, type=Constant]; +"270 Pow_246" [id=270, type=Pow]; +"271 ReduceMean_247" [id=271, type=ReduceMean]; +"272 Constant_248" [id=272, type=Constant]; +"273 Add_249" [id=273, type=Add]; +"274 Sqrt_250" [id=274, type=Sqrt]; +"275 Div_251" [id=275, type=Div]; +"276 Mul_252" [id=276, type=Mul]; +"277 Add_253" [id=277, type=Add]; +"278 QuantizeLinear_452_1" [id=278, type=QuantizeLinear]; +"279 DequantizeLinear_452_1" [id=279, type=DequantizeLinear]; +"280 Shape_254" [id=280, type=Shape]; +"281 Constant_255" [id=281, type=Constant]; +"282 Gather_256" [id=282, type=Gather]; +"283 Shape_257" [id=283, type=Shape]; +"284 Constant_258" [id=284, type=Constant]; +"285 Gather_259" [id=285, type=Gather]; +"286 Shape_260" [id=286, type=Shape]; +"287 Constant_261" [id=287, type=Constant]; +"288 Gather_262" [id=288, type=Gather]; +"289 Unsqueeze_263" [id=289, type=Unsqueeze]; +"290 Concat_264" [id=290, type=Concat]; +"291 Reshape_265" [id=291, type=Reshape]; +"292 QuantizeLinear_h.1.attn.c_attn.weight_1" [id=292, type=QuantizeLinear]; +"293 DequantizeLinear_h.1.attn.c_attn.weight_1" [id=293, type=DequantizeLinear]; +"294 Gemm_266" [id=294, type=Gemm]; +"295 Unsqueeze_267" [id=295, type=Unsqueeze]; +"296 Unsqueeze_268" [id=296, type=Unsqueeze]; +"297 Concat_269" [id=297, type=Concat]; +"298 Reshape_270" [id=298, type=Reshape]; +"299 Split_271" [id=299, type=Split]; +"300 QuantizeLinear_query.3_1" [id=300, type=QuantizeLinear]; +"301 DequantizeLinear_query.3_1" [id=301, type=DequantizeLinear]; +"302 Shape_272" [id=302, type=Shape]; +"303 Constant_273" [id=303, type=Constant]; +"304 Gather_274" [id=304, type=Gather]; +"305 Shape_275" [id=305, type=Shape]; +"306 Constant_276" [id=306, type=Constant]; +"307 Gather_277" [id=307, type=Gather]; +"308 Shape_278" [id=308, type=Shape]; +"309 Constant_279" [id=309, type=Constant]; +"310 Gather_280" [id=310, type=Gather]; +"311 Constant_281" [id=311, type=Constant]; +"312 Div_282" [id=312, type=Div]; +"313 Cast_283" [id=313, type=Cast]; +"314 Cast_284" [id=314, type=Cast]; +"315 Unsqueeze_285" [id=315, type=Unsqueeze]; +"316 Unsqueeze_286" [id=316, type=Unsqueeze]; +"317 Unsqueeze_287" [id=317, type=Unsqueeze]; +"318 Concat_288" [id=318, type=Concat]; +"319 Reshape_289" [id=319, type=Reshape]; +"320 Transpose_290" [id=320, type=Transpose]; +"321 Shape_291" [id=321, type=Shape]; +"322 Constant_292" [id=322, type=Constant]; +"323 Gather_293" [id=323, type=Gather]; +"324 Shape_294" [id=324, type=Shape]; +"325 Constant_295" [id=325, type=Constant]; +"326 Gather_296" [id=326, type=Gather]; +"327 Shape_297" [id=327, type=Shape]; +"328 Constant_298" [id=328, type=Constant]; +"329 Gather_299" [id=329, type=Gather]; +"330 Constant_300" [id=330, type=Constant]; +"331 Div_301" [id=331, type=Div]; +"332 Cast_302" [id=332, type=Cast]; +"333 Cast_303" [id=333, type=Cast]; +"334 Unsqueeze_304" [id=334, type=Unsqueeze]; +"335 Unsqueeze_305" [id=335, type=Unsqueeze]; +"336 Unsqueeze_306" [id=336, type=Unsqueeze]; +"337 Concat_307" [id=337, type=Concat]; +"338 Reshape_308" [id=338, type=Reshape]; +"339 QuantizeLinear_517_1" [id=339, type=QuantizeLinear]; +"340 DequantizeLinear_517_1" [id=340, type=DequantizeLinear]; +"341 Transpose_309" [id=341, type=Transpose]; +"342 Shape_310" [id=342, type=Shape]; +"343 Constant_311" [id=343, type=Constant]; +"344 Gather_312" [id=344, type=Gather]; +"345 Shape_313" [id=345, type=Shape]; +"346 Constant_314" [id=346, type=Constant]; +"347 Gather_315" [id=347, type=Gather]; +"348 Shape_316" [id=348, type=Shape]; +"349 Constant_317" [id=349, type=Constant]; +"350 Gather_318" [id=350, type=Gather]; +"351 Constant_319" [id=351, type=Constant]; +"352 Div_320" [id=352, type=Div]; +"353 Cast_321" [id=353, type=Cast]; +"354 Cast_322" [id=354, type=Cast]; +"355 Unsqueeze_323" [id=355, type=Unsqueeze]; +"356 Unsqueeze_324" [id=356, type=Unsqueeze]; +"357 Unsqueeze_325" [id=357, type=Unsqueeze]; +"358 Concat_326" [id=358, type=Concat]; +"359 Reshape_327" [id=359, type=Reshape]; +"360 Transpose_328" [id=360, type=Transpose]; +"361 Transpose_329" [id=361, type=Transpose]; +"362 Unsqueeze_330" [id=362, type=Unsqueeze]; +"363 Unsqueeze_331" [id=363, type=Unsqueeze]; +"364 Concat_332" [id=364, type=Concat]; +"365 MatMul_333" [id=365, type=MatMul]; +"366 Constant_334" [id=366, type=Constant]; +"367 Div_335" [id=367, type=Div]; +"368 Shape_336" [id=368, type=Shape]; +"369 Constant_337" [id=369, type=Constant]; +"370 Gather_338" [id=370, type=Gather]; +"371 Shape_339" [id=371, type=Shape]; +"372 Constant_340" [id=372, type=Constant]; +"373 Gather_341" [id=373, type=Gather]; +"374 Sub_342" [id=374, type=Sub]; +"375 Unsqueeze_343" [id=375, type=Unsqueeze]; +"376 Unsqueeze_344" [id=376, type=Unsqueeze]; +"377 Constant_345" [id=377, type=Constant]; +"378 Slice_346" [id=378, type=Slice]; +"379 Unsqueeze_347" [id=379, type=Unsqueeze]; +"380 Constant_348" [id=380, type=Constant]; +"381 Slice_349" [id=381, type=Slice]; +"382 Mul_350" [id=382, type=Mul]; +"383 Constant_351" [id=383, type=Constant]; +"384 Sub_352" [id=384, type=Sub]; +"385 Constant_353" [id=385, type=Constant]; +"386 Mul_354" [id=386, type=Mul]; +"387 Sub_355" [id=387, type=Sub]; +"388 Softmax_356" [id=388, type=Softmax]; +"389 MatMul_357" [id=389, type=MatMul]; +"390 QuantizeLinear_574_1" [id=390, type=QuantizeLinear]; +"391 DequantizeLinear_574_1" [id=391, type=DequantizeLinear]; +"392 Transpose_358" [id=392, type=Transpose]; +"393 Shape_359" [id=393, type=Shape]; +"394 Constant_360" [id=394, type=Constant]; +"395 Gather_361" [id=395, type=Gather]; +"396 Shape_362" [id=396, type=Shape]; +"397 Constant_363" [id=397, type=Constant]; +"398 Gather_364" [id=398, type=Gather]; +"399 Shape_365" [id=399, type=Shape]; +"400 Constant_366" [id=400, type=Constant]; +"401 Gather_367" [id=401, type=Gather]; +"402 Shape_368" [id=402, type=Shape]; +"403 Constant_369" [id=403, type=Constant]; +"404 Gather_370" [id=404, type=Gather]; +"405 Mul_371" [id=405, type=Mul]; +"406 Unsqueeze_372" [id=406, type=Unsqueeze]; +"407 Unsqueeze_373" [id=407, type=Unsqueeze]; +"408 Unsqueeze_374" [id=408, type=Unsqueeze]; +"409 Concat_375" [id=409, type=Concat]; +"410 Reshape_376" [id=410, type=Reshape]; +"411 Shape_377" [id=411, type=Shape]; +"412 Constant_378" [id=412, type=Constant]; +"413 Gather_379" [id=413, type=Gather]; +"414 Shape_380" [id=414, type=Shape]; +"415 Constant_381" [id=415, type=Constant]; +"416 Gather_382" [id=416, type=Gather]; +"417 Shape_383" [id=417, type=Shape]; +"418 Constant_384" [id=418, type=Constant]; +"419 Gather_385" [id=419, type=Gather]; +"420 Unsqueeze_386" [id=420, type=Unsqueeze]; +"421 Concat_387" [id=421, type=Concat]; +"422 Reshape_388" [id=422, type=Reshape]; +"423 QuantizeLinear_h.1.attn.c_proj.weight_1" [id=423, type=QuantizeLinear]; +"424 DequantizeLinear_h.1.attn.c_proj.weight_1" [id=424, type=DequantizeLinear]; +"425 Gemm_389" [id=425, type=Gemm]; +"426 Unsqueeze_390" [id=426, type=Unsqueeze]; +"427 Unsqueeze_391" [id=427, type=Unsqueeze]; +"428 Concat_392" [id=428, type=Concat]; +"429 Reshape_393" [id=429, type=Reshape]; +"430 Add_394" [id=430, type=Add]; +"431 ReduceMean_395" [id=431, type=ReduceMean]; +"432 Sub_396" [id=432, type=Sub]; +"433 Constant_397" [id=433, type=Constant]; +"434 Pow_398" [id=434, type=Pow]; +"435 ReduceMean_399" [id=435, type=ReduceMean]; +"436 Constant_400" [id=436, type=Constant]; +"437 Add_401" [id=437, type=Add]; +"438 Sqrt_402" [id=438, type=Sqrt]; +"439 Div_403" [id=439, type=Div]; +"440 Mul_404" [id=440, type=Mul]; +"441 Add_405" [id=441, type=Add]; +"442 QuantizeLinear_626_1" [id=442, type=QuantizeLinear]; +"443 DequantizeLinear_626_1" [id=443, type=DequantizeLinear]; +"444 Shape_406" [id=444, type=Shape]; +"445 Constant_407" [id=445, type=Constant]; +"446 Gather_408" [id=446, type=Gather]; +"447 Shape_409" [id=447, type=Shape]; +"448 Constant_410" [id=448, type=Constant]; +"449 Gather_411" [id=449, type=Gather]; +"450 Shape_412" [id=450, type=Shape]; +"451 Constant_413" [id=451, type=Constant]; +"452 Gather_414" [id=452, type=Gather]; +"453 Unsqueeze_415" [id=453, type=Unsqueeze]; +"454 Concat_416" [id=454, type=Concat]; +"455 Reshape_417" [id=455, type=Reshape]; +"456 QuantizeLinear_h.1.mlp.c_fc.weight_1" [id=456, type=QuantizeLinear]; +"457 DequantizeLinear_h.1.mlp.c_fc.weight_1" [id=457, type=DequantizeLinear]; +"458 Gemm_418" [id=458, type=Gemm]; +"459 Unsqueeze_419" [id=459, type=Unsqueeze]; +"460 Unsqueeze_420" [id=460, type=Unsqueeze]; +"461 Concat_421" [id=461, type=Concat]; +"462 Reshape_422" [id=462, type=Reshape]; +"463 Constant_423" [id=463, type=Constant]; +"464 Mul_424" [id=464, type=Mul]; +"465 Constant_425" [id=465, type=Constant]; +"466 Pow_426" [id=466, type=Pow]; +"467 Constant_427" [id=467, type=Constant]; +"468 Mul_428" [id=468, type=Mul]; +"469 Add_429" [id=469, type=Add]; +"470 Constant_430" [id=470, type=Constant]; +"471 Mul_431" [id=471, type=Mul]; +"472 Tanh_432" [id=472, type=Tanh]; +"473 Constant_433" [id=473, type=Constant]; +"474 Add_434" [id=474, type=Add]; +"475 Mul_435" [id=475, type=Mul]; +"476 QuantizeLinear_660_1" [id=476, type=QuantizeLinear]; +"477 DequantizeLinear_660_1" [id=477, type=DequantizeLinear]; +"478 Shape_436" [id=478, type=Shape]; +"479 Constant_437" [id=479, type=Constant]; +"480 Gather_438" [id=480, type=Gather]; +"481 Shape_439" [id=481, type=Shape]; +"482 Constant_440" [id=482, type=Constant]; +"483 Gather_441" [id=483, type=Gather]; +"484 Shape_442" [id=484, type=Shape]; +"485 Constant_443" [id=485, type=Constant]; +"486 Gather_444" [id=486, type=Gather]; +"487 Unsqueeze_445" [id=487, type=Unsqueeze]; +"488 Concat_446" [id=488, type=Concat]; +"489 Reshape_447" [id=489, type=Reshape]; +"490 QuantizeLinear_h.1.mlp.c_proj.weight_1" [id=490, type=QuantizeLinear]; +"491 DequantizeLinear_h.1.mlp.c_proj.weight_1" [id=491, type=DequantizeLinear]; +"492 Gemm_448" [id=492, type=Gemm]; +"493 Unsqueeze_449" [id=493, type=Unsqueeze]; +"494 Unsqueeze_450" [id=494, type=Unsqueeze]; +"495 Concat_451" [id=495, type=Concat]; +"496 Reshape_452" [id=496, type=Reshape]; +"497 Add_453" [id=497, type=Add]; +"498 ReduceMean_454" [id=498, type=ReduceMean]; +"499 Sub_455" [id=499, type=Sub]; +"500 Constant_456" [id=500, type=Constant]; +"501 Pow_457" [id=501, type=Pow]; +"502 ReduceMean_458" [id=502, type=ReduceMean]; +"503 Constant_459" [id=503, type=Constant]; +"504 Add_460" [id=504, type=Add]; +"505 Sqrt_461" [id=505, type=Sqrt]; +"506 Div_462" [id=506, type=Div]; +"507 Mul_463" [id=507, type=Mul]; +"508 Add_464" [id=508, type=Add]; +"509 QuantizeLinear_693_1" [id=509, type=QuantizeLinear]; +"510 DequantizeLinear_693_1" [id=510, type=DequantizeLinear]; +"511 Shape_465" [id=511, type=Shape]; +"512 Constant_466" [id=512, type=Constant]; +"513 Gather_467" [id=513, type=Gather]; +"514 Shape_468" [id=514, type=Shape]; +"515 Constant_469" [id=515, type=Constant]; +"516 Gather_470" [id=516, type=Gather]; +"517 Shape_471" [id=517, type=Shape]; +"518 Constant_472" [id=518, type=Constant]; +"519 Gather_473" [id=519, type=Gather]; +"520 Unsqueeze_474" [id=520, type=Unsqueeze]; +"521 Concat_475" [id=521, type=Concat]; +"522 Reshape_476" [id=522, type=Reshape]; +"523 QuantizeLinear_h.2.attn.c_attn.weight_1" [id=523, type=QuantizeLinear]; +"524 DequantizeLinear_h.2.attn.c_attn.weight_1" [id=524, type=DequantizeLinear]; +"525 Gemm_477" [id=525, type=Gemm]; +"526 Unsqueeze_478" [id=526, type=Unsqueeze]; +"527 Unsqueeze_479" [id=527, type=Unsqueeze]; +"528 Concat_480" [id=528, type=Concat]; +"529 Reshape_481" [id=529, type=Reshape]; +"530 Split_482" [id=530, type=Split]; +"531 QuantizeLinear_query.5_1" [id=531, type=QuantizeLinear]; +"532 DequantizeLinear_query.5_1" [id=532, type=DequantizeLinear]; +"533 Shape_483" [id=533, type=Shape]; +"534 Constant_484" [id=534, type=Constant]; +"535 Gather_485" [id=535, type=Gather]; +"536 Shape_486" [id=536, type=Shape]; +"537 Constant_487" [id=537, type=Constant]; +"538 Gather_488" [id=538, type=Gather]; +"539 Shape_489" [id=539, type=Shape]; +"540 Constant_490" [id=540, type=Constant]; +"541 Gather_491" [id=541, type=Gather]; +"542 Constant_492" [id=542, type=Constant]; +"543 Div_493" [id=543, type=Div]; +"544 Cast_494" [id=544, type=Cast]; +"545 Cast_495" [id=545, type=Cast]; +"546 Unsqueeze_496" [id=546, type=Unsqueeze]; +"547 Unsqueeze_497" [id=547, type=Unsqueeze]; +"548 Unsqueeze_498" [id=548, type=Unsqueeze]; +"549 Concat_499" [id=549, type=Concat]; +"550 Reshape_500" [id=550, type=Reshape]; +"551 Transpose_501" [id=551, type=Transpose]; +"552 Shape_502" [id=552, type=Shape]; +"553 Constant_503" [id=553, type=Constant]; +"554 Gather_504" [id=554, type=Gather]; +"555 Shape_505" [id=555, type=Shape]; +"556 Constant_506" [id=556, type=Constant]; +"557 Gather_507" [id=557, type=Gather]; +"558 Shape_508" [id=558, type=Shape]; +"559 Constant_509" [id=559, type=Constant]; +"560 Gather_510" [id=560, type=Gather]; +"561 Constant_511" [id=561, type=Constant]; +"562 Div_512" [id=562, type=Div]; +"563 Cast_513" [id=563, type=Cast]; +"564 Cast_514" [id=564, type=Cast]; +"565 Unsqueeze_515" [id=565, type=Unsqueeze]; +"566 Unsqueeze_516" [id=566, type=Unsqueeze]; +"567 Unsqueeze_517" [id=567, type=Unsqueeze]; +"568 Concat_518" [id=568, type=Concat]; +"569 Reshape_519" [id=569, type=Reshape]; +"570 QuantizeLinear_758_1" [id=570, type=QuantizeLinear]; +"571 DequantizeLinear_758_1" [id=571, type=DequantizeLinear]; +"572 Transpose_520" [id=572, type=Transpose]; +"573 Shape_521" [id=573, type=Shape]; +"574 Constant_522" [id=574, type=Constant]; +"575 Gather_523" [id=575, type=Gather]; +"576 Shape_524" [id=576, type=Shape]; +"577 Constant_525" [id=577, type=Constant]; +"578 Gather_526" [id=578, type=Gather]; +"579 Shape_527" [id=579, type=Shape]; +"580 Constant_528" [id=580, type=Constant]; +"581 Gather_529" [id=581, type=Gather]; +"582 Constant_530" [id=582, type=Constant]; +"583 Div_531" [id=583, type=Div]; +"584 Cast_532" [id=584, type=Cast]; +"585 Cast_533" [id=585, type=Cast]; +"586 Unsqueeze_534" [id=586, type=Unsqueeze]; +"587 Unsqueeze_535" [id=587, type=Unsqueeze]; +"588 Unsqueeze_536" [id=588, type=Unsqueeze]; +"589 Concat_537" [id=589, type=Concat]; +"590 Reshape_538" [id=590, type=Reshape]; +"591 Transpose_539" [id=591, type=Transpose]; +"592 Transpose_540" [id=592, type=Transpose]; +"593 Unsqueeze_541" [id=593, type=Unsqueeze]; +"594 Unsqueeze_542" [id=594, type=Unsqueeze]; +"595 Concat_543" [id=595, type=Concat]; +"596 MatMul_544" [id=596, type=MatMul]; +"597 Constant_545" [id=597, type=Constant]; +"598 Div_546" [id=598, type=Div]; +"599 Shape_547" [id=599, type=Shape]; +"600 Constant_548" [id=600, type=Constant]; +"601 Gather_549" [id=601, type=Gather]; +"602 Shape_550" [id=602, type=Shape]; +"603 Constant_551" [id=603, type=Constant]; +"604 Gather_552" [id=604, type=Gather]; +"605 Sub_553" [id=605, type=Sub]; +"606 Unsqueeze_554" [id=606, type=Unsqueeze]; +"607 Unsqueeze_555" [id=607, type=Unsqueeze]; +"608 Constant_556" [id=608, type=Constant]; +"609 Slice_557" [id=609, type=Slice]; +"610 Unsqueeze_558" [id=610, type=Unsqueeze]; +"611 Constant_559" [id=611, type=Constant]; +"612 Slice_560" [id=612, type=Slice]; +"613 Mul_561" [id=613, type=Mul]; +"614 Constant_562" [id=614, type=Constant]; +"615 Sub_563" [id=615, type=Sub]; +"616 Constant_564" [id=616, type=Constant]; +"617 Mul_565" [id=617, type=Mul]; +"618 Sub_566" [id=618, type=Sub]; +"619 Softmax_567" [id=619, type=Softmax]; +"620 MatMul_568" [id=620, type=MatMul]; +"621 QuantizeLinear_815_1" [id=621, type=QuantizeLinear]; +"622 DequantizeLinear_815_1" [id=622, type=DequantizeLinear]; +"623 Transpose_569" [id=623, type=Transpose]; +"624 Shape_570" [id=624, type=Shape]; +"625 Constant_571" [id=625, type=Constant]; +"626 Gather_572" [id=626, type=Gather]; +"627 Shape_573" [id=627, type=Shape]; +"628 Constant_574" [id=628, type=Constant]; +"629 Gather_575" [id=629, type=Gather]; +"630 Shape_576" [id=630, type=Shape]; +"631 Constant_577" [id=631, type=Constant]; +"632 Gather_578" [id=632, type=Gather]; +"633 Shape_579" [id=633, type=Shape]; +"634 Constant_580" [id=634, type=Constant]; +"635 Gather_581" [id=635, type=Gather]; +"636 Mul_582" [id=636, type=Mul]; +"637 Unsqueeze_583" [id=637, type=Unsqueeze]; +"638 Unsqueeze_584" [id=638, type=Unsqueeze]; +"639 Unsqueeze_585" [id=639, type=Unsqueeze]; +"640 Concat_586" [id=640, type=Concat]; +"641 Reshape_587" [id=641, type=Reshape]; +"642 Shape_588" [id=642, type=Shape]; +"643 Constant_589" [id=643, type=Constant]; +"644 Gather_590" [id=644, type=Gather]; +"645 Shape_591" [id=645, type=Shape]; +"646 Constant_592" [id=646, type=Constant]; +"647 Gather_593" [id=647, type=Gather]; +"648 Shape_594" [id=648, type=Shape]; +"649 Constant_595" [id=649, type=Constant]; +"650 Gather_596" [id=650, type=Gather]; +"651 Unsqueeze_597" [id=651, type=Unsqueeze]; +"652 Concat_598" [id=652, type=Concat]; +"653 Reshape_599" [id=653, type=Reshape]; +"654 QuantizeLinear_h.2.attn.c_proj.weight_1" [id=654, type=QuantizeLinear]; +"655 DequantizeLinear_h.2.attn.c_proj.weight_1" [id=655, type=DequantizeLinear]; +"656 Gemm_600" [id=656, type=Gemm]; +"657 Unsqueeze_601" [id=657, type=Unsqueeze]; +"658 Unsqueeze_602" [id=658, type=Unsqueeze]; +"659 Concat_603" [id=659, type=Concat]; +"660 Reshape_604" [id=660, type=Reshape]; +"661 Add_605" [id=661, type=Add]; +"662 ReduceMean_606" [id=662, type=ReduceMean]; +"663 Sub_607" [id=663, type=Sub]; +"664 Constant_608" [id=664, type=Constant]; +"665 Pow_609" [id=665, type=Pow]; +"666 ReduceMean_610" [id=666, type=ReduceMean]; +"667 Constant_611" [id=667, type=Constant]; +"668 Add_612" [id=668, type=Add]; +"669 Sqrt_613" [id=669, type=Sqrt]; +"670 Div_614" [id=670, type=Div]; +"671 Mul_615" [id=671, type=Mul]; +"672 Add_616" [id=672, type=Add]; +"673 QuantizeLinear_867_1" [id=673, type=QuantizeLinear]; +"674 DequantizeLinear_867_1" [id=674, type=DequantizeLinear]; +"675 Shape_617" [id=675, type=Shape]; +"676 Constant_618" [id=676, type=Constant]; +"677 Gather_619" [id=677, type=Gather]; +"678 Shape_620" [id=678, type=Shape]; +"679 Constant_621" [id=679, type=Constant]; +"680 Gather_622" [id=680, type=Gather]; +"681 Shape_623" [id=681, type=Shape]; +"682 Constant_624" [id=682, type=Constant]; +"683 Gather_625" [id=683, type=Gather]; +"684 Unsqueeze_626" [id=684, type=Unsqueeze]; +"685 Concat_627" [id=685, type=Concat]; +"686 Reshape_628" [id=686, type=Reshape]; +"687 QuantizeLinear_h.2.mlp.c_fc.weight_1" [id=687, type=QuantizeLinear]; +"688 DequantizeLinear_h.2.mlp.c_fc.weight_1" [id=688, type=DequantizeLinear]; +"689 Gemm_629" [id=689, type=Gemm]; +"690 Unsqueeze_630" [id=690, type=Unsqueeze]; +"691 Unsqueeze_631" [id=691, type=Unsqueeze]; +"692 Concat_632" [id=692, type=Concat]; +"693 Reshape_633" [id=693, type=Reshape]; +"694 Constant_634" [id=694, type=Constant]; +"695 Mul_635" [id=695, type=Mul]; +"696 Constant_636" [id=696, type=Constant]; +"697 Pow_637" [id=697, type=Pow]; +"698 Constant_638" [id=698, type=Constant]; +"699 Mul_639" [id=699, type=Mul]; +"700 Add_640" [id=700, type=Add]; +"701 Constant_641" [id=701, type=Constant]; +"702 Mul_642" [id=702, type=Mul]; +"703 Tanh_643" [id=703, type=Tanh]; +"704 Constant_644" [id=704, type=Constant]; +"705 Add_645" [id=705, type=Add]; +"706 Mul_646" [id=706, type=Mul]; +"707 QuantizeLinear_901_1" [id=707, type=QuantizeLinear]; +"708 DequantizeLinear_901_1" [id=708, type=DequantizeLinear]; +"709 Shape_647" [id=709, type=Shape]; +"710 Constant_648" [id=710, type=Constant]; +"711 Gather_649" [id=711, type=Gather]; +"712 Shape_650" [id=712, type=Shape]; +"713 Constant_651" [id=713, type=Constant]; +"714 Gather_652" [id=714, type=Gather]; +"715 Shape_653" [id=715, type=Shape]; +"716 Constant_654" [id=716, type=Constant]; +"717 Gather_655" [id=717, type=Gather]; +"718 Unsqueeze_656" [id=718, type=Unsqueeze]; +"719 Concat_657" [id=719, type=Concat]; +"720 Reshape_658" [id=720, type=Reshape]; +"721 QuantizeLinear_h.2.mlp.c_proj.weight_1" [id=721, type=QuantizeLinear]; +"722 DequantizeLinear_h.2.mlp.c_proj.weight_1" [id=722, type=DequantizeLinear]; +"723 Gemm_659" [id=723, type=Gemm]; +"724 Unsqueeze_660" [id=724, type=Unsqueeze]; +"725 Unsqueeze_661" [id=725, type=Unsqueeze]; +"726 Concat_662" [id=726, type=Concat]; +"727 Reshape_663" [id=727, type=Reshape]; +"728 Add_664" [id=728, type=Add]; +"729 ReduceMean_665" [id=729, type=ReduceMean]; +"730 Sub_666" [id=730, type=Sub]; +"731 Constant_667" [id=731, type=Constant]; +"732 Pow_668" [id=732, type=Pow]; +"733 ReduceMean_669" [id=733, type=ReduceMean]; +"734 Constant_670" [id=734, type=Constant]; +"735 Add_671" [id=735, type=Add]; +"736 Sqrt_672" [id=736, type=Sqrt]; +"737 Div_673" [id=737, type=Div]; +"738 Mul_674" [id=738, type=Mul]; +"739 Add_675" [id=739, type=Add]; +"740 QuantizeLinear_934_1" [id=740, type=QuantizeLinear]; +"741 DequantizeLinear_934_1" [id=741, type=DequantizeLinear]; +"742 Shape_676" [id=742, type=Shape]; +"743 Constant_677" [id=743, type=Constant]; +"744 Gather_678" [id=744, type=Gather]; +"745 Shape_679" [id=745, type=Shape]; +"746 Constant_680" [id=746, type=Constant]; +"747 Gather_681" [id=747, type=Gather]; +"748 Shape_682" [id=748, type=Shape]; +"749 Constant_683" [id=749, type=Constant]; +"750 Gather_684" [id=750, type=Gather]; +"751 Unsqueeze_685" [id=751, type=Unsqueeze]; +"752 Concat_686" [id=752, type=Concat]; +"753 Reshape_687" [id=753, type=Reshape]; +"754 QuantizeLinear_h.3.attn.c_attn.weight_1" [id=754, type=QuantizeLinear]; +"755 DequantizeLinear_h.3.attn.c_attn.weight_1" [id=755, type=DequantizeLinear]; +"756 Gemm_688" [id=756, type=Gemm]; +"757 Unsqueeze_689" [id=757, type=Unsqueeze]; +"758 Unsqueeze_690" [id=758, type=Unsqueeze]; +"759 Concat_691" [id=759, type=Concat]; +"760 Reshape_692" [id=760, type=Reshape]; +"761 Split_693" [id=761, type=Split]; +"762 QuantizeLinear_query.7_1" [id=762, type=QuantizeLinear]; +"763 DequantizeLinear_query.7_1" [id=763, type=DequantizeLinear]; +"764 Shape_694" [id=764, type=Shape]; +"765 Constant_695" [id=765, type=Constant]; +"766 Gather_696" [id=766, type=Gather]; +"767 Shape_697" [id=767, type=Shape]; +"768 Constant_698" [id=768, type=Constant]; +"769 Gather_699" [id=769, type=Gather]; +"770 Shape_700" [id=770, type=Shape]; +"771 Constant_701" [id=771, type=Constant]; +"772 Gather_702" [id=772, type=Gather]; +"773 Constant_703" [id=773, type=Constant]; +"774 Div_704" [id=774, type=Div]; +"775 Cast_705" [id=775, type=Cast]; +"776 Cast_706" [id=776, type=Cast]; +"777 Unsqueeze_707" [id=777, type=Unsqueeze]; +"778 Unsqueeze_708" [id=778, type=Unsqueeze]; +"779 Unsqueeze_709" [id=779, type=Unsqueeze]; +"780 Concat_710" [id=780, type=Concat]; +"781 Reshape_711" [id=781, type=Reshape]; +"782 Transpose_712" [id=782, type=Transpose]; +"783 Shape_713" [id=783, type=Shape]; +"784 Constant_714" [id=784, type=Constant]; +"785 Gather_715" [id=785, type=Gather]; +"786 Shape_716" [id=786, type=Shape]; +"787 Constant_717" [id=787, type=Constant]; +"788 Gather_718" [id=788, type=Gather]; +"789 Shape_719" [id=789, type=Shape]; +"790 Constant_720" [id=790, type=Constant]; +"791 Gather_721" [id=791, type=Gather]; +"792 Constant_722" [id=792, type=Constant]; +"793 Div_723" [id=793, type=Div]; +"794 Cast_724" [id=794, type=Cast]; +"795 Cast_725" [id=795, type=Cast]; +"796 Unsqueeze_726" [id=796, type=Unsqueeze]; +"797 Unsqueeze_727" [id=797, type=Unsqueeze]; +"798 Unsqueeze_728" [id=798, type=Unsqueeze]; +"799 Concat_729" [id=799, type=Concat]; +"800 Reshape_730" [id=800, type=Reshape]; +"801 QuantizeLinear_999_1" [id=801, type=QuantizeLinear]; +"802 DequantizeLinear_999_1" [id=802, type=DequantizeLinear]; +"803 Transpose_731" [id=803, type=Transpose]; +"804 Shape_732" [id=804, type=Shape]; +"805 Constant_733" [id=805, type=Constant]; +"806 Gather_734" [id=806, type=Gather]; +"807 Shape_735" [id=807, type=Shape]; +"808 Constant_736" [id=808, type=Constant]; +"809 Gather_737" [id=809, type=Gather]; +"810 Shape_738" [id=810, type=Shape]; +"811 Constant_739" [id=811, type=Constant]; +"812 Gather_740" [id=812, type=Gather]; +"813 Constant_741" [id=813, type=Constant]; +"814 Div_742" [id=814, type=Div]; +"815 Cast_743" [id=815, type=Cast]; +"816 Cast_744" [id=816, type=Cast]; +"817 Unsqueeze_745" [id=817, type=Unsqueeze]; +"818 Unsqueeze_746" [id=818, type=Unsqueeze]; +"819 Unsqueeze_747" [id=819, type=Unsqueeze]; +"820 Concat_748" [id=820, type=Concat]; +"821 Reshape_749" [id=821, type=Reshape]; +"822 Transpose_750" [id=822, type=Transpose]; +"823 Transpose_751" [id=823, type=Transpose]; +"824 Unsqueeze_752" [id=824, type=Unsqueeze]; +"825 Unsqueeze_753" [id=825, type=Unsqueeze]; +"826 Concat_754" [id=826, type=Concat]; +"827 MatMul_755" [id=827, type=MatMul]; +"828 Constant_756" [id=828, type=Constant]; +"829 Div_757" [id=829, type=Div]; +"830 Shape_758" [id=830, type=Shape]; +"831 Constant_759" [id=831, type=Constant]; +"832 Gather_760" [id=832, type=Gather]; +"833 Shape_761" [id=833, type=Shape]; +"834 Constant_762" [id=834, type=Constant]; +"835 Gather_763" [id=835, type=Gather]; +"836 Sub_764" [id=836, type=Sub]; +"837 Unsqueeze_765" [id=837, type=Unsqueeze]; +"838 Unsqueeze_766" [id=838, type=Unsqueeze]; +"839 Constant_767" [id=839, type=Constant]; +"840 Slice_768" [id=840, type=Slice]; +"841 Unsqueeze_769" [id=841, type=Unsqueeze]; +"842 Constant_770" [id=842, type=Constant]; +"843 Slice_771" [id=843, type=Slice]; +"844 Mul_772" [id=844, type=Mul]; +"845 Constant_773" [id=845, type=Constant]; +"846 Sub_774" [id=846, type=Sub]; +"847 Constant_775" [id=847, type=Constant]; +"848 Mul_776" [id=848, type=Mul]; +"849 Sub_777" [id=849, type=Sub]; +"850 Softmax_778" [id=850, type=Softmax]; +"851 MatMul_779" [id=851, type=MatMul]; +"852 QuantizeLinear_1056_1" [id=852, type=QuantizeLinear]; +"853 DequantizeLinear_1056_1" [id=853, type=DequantizeLinear]; +"854 Transpose_780" [id=854, type=Transpose]; +"855 Shape_781" [id=855, type=Shape]; +"856 Constant_782" [id=856, type=Constant]; +"857 Gather_783" [id=857, type=Gather]; +"858 Shape_784" [id=858, type=Shape]; +"859 Constant_785" [id=859, type=Constant]; +"860 Gather_786" [id=860, type=Gather]; +"861 Shape_787" [id=861, type=Shape]; +"862 Constant_788" [id=862, type=Constant]; +"863 Gather_789" [id=863, type=Gather]; +"864 Shape_790" [id=864, type=Shape]; +"865 Constant_791" [id=865, type=Constant]; +"866 Gather_792" [id=866, type=Gather]; +"867 Mul_793" [id=867, type=Mul]; +"868 Unsqueeze_794" [id=868, type=Unsqueeze]; +"869 Unsqueeze_795" [id=869, type=Unsqueeze]; +"870 Unsqueeze_796" [id=870, type=Unsqueeze]; +"871 Concat_797" [id=871, type=Concat]; +"872 Reshape_798" [id=872, type=Reshape]; +"873 Shape_799" [id=873, type=Shape]; +"874 Constant_800" [id=874, type=Constant]; +"875 Gather_801" [id=875, type=Gather]; +"876 Shape_802" [id=876, type=Shape]; +"877 Constant_803" [id=877, type=Constant]; +"878 Gather_804" [id=878, type=Gather]; +"879 Shape_805" [id=879, type=Shape]; +"880 Constant_806" [id=880, type=Constant]; +"881 Gather_807" [id=881, type=Gather]; +"882 Unsqueeze_808" [id=882, type=Unsqueeze]; +"883 Concat_809" [id=883, type=Concat]; +"884 Reshape_810" [id=884, type=Reshape]; +"885 QuantizeLinear_h.3.attn.c_proj.weight_1" [id=885, type=QuantizeLinear]; +"886 DequantizeLinear_h.3.attn.c_proj.weight_1" [id=886, type=DequantizeLinear]; +"887 Gemm_811" [id=887, type=Gemm]; +"888 Unsqueeze_812" [id=888, type=Unsqueeze]; +"889 Unsqueeze_813" [id=889, type=Unsqueeze]; +"890 Concat_814" [id=890, type=Concat]; +"891 Reshape_815" [id=891, type=Reshape]; +"892 Add_816" [id=892, type=Add]; +"893 ReduceMean_817" [id=893, type=ReduceMean]; +"894 Sub_818" [id=894, type=Sub]; +"895 Constant_819" [id=895, type=Constant]; +"896 Pow_820" [id=896, type=Pow]; +"897 ReduceMean_821" [id=897, type=ReduceMean]; +"898 Constant_822" [id=898, type=Constant]; +"899 Add_823" [id=899, type=Add]; +"900 Sqrt_824" [id=900, type=Sqrt]; +"901 Div_825" [id=901, type=Div]; +"902 Mul_826" [id=902, type=Mul]; +"903 Add_827" [id=903, type=Add]; +"904 QuantizeLinear_1108_1" [id=904, type=QuantizeLinear]; +"905 DequantizeLinear_1108_1" [id=905, type=DequantizeLinear]; +"906 Shape_828" [id=906, type=Shape]; +"907 Constant_829" [id=907, type=Constant]; +"908 Gather_830" [id=908, type=Gather]; +"909 Shape_831" [id=909, type=Shape]; +"910 Constant_832" [id=910, type=Constant]; +"911 Gather_833" [id=911, type=Gather]; +"912 Shape_834" [id=912, type=Shape]; +"913 Constant_835" [id=913, type=Constant]; +"914 Gather_836" [id=914, type=Gather]; +"915 Unsqueeze_837" [id=915, type=Unsqueeze]; +"916 Concat_838" [id=916, type=Concat]; +"917 Reshape_839" [id=917, type=Reshape]; +"918 QuantizeLinear_h.3.mlp.c_fc.weight_1" [id=918, type=QuantizeLinear]; +"919 DequantizeLinear_h.3.mlp.c_fc.weight_1" [id=919, type=DequantizeLinear]; +"920 Gemm_840" [id=920, type=Gemm]; +"921 Unsqueeze_841" [id=921, type=Unsqueeze]; +"922 Unsqueeze_842" [id=922, type=Unsqueeze]; +"923 Concat_843" [id=923, type=Concat]; +"924 Reshape_844" [id=924, type=Reshape]; +"925 Constant_845" [id=925, type=Constant]; +"926 Mul_846" [id=926, type=Mul]; +"927 Constant_847" [id=927, type=Constant]; +"928 Pow_848" [id=928, type=Pow]; +"929 Constant_849" [id=929, type=Constant]; +"930 Mul_850" [id=930, type=Mul]; +"931 Add_851" [id=931, type=Add]; +"932 Constant_852" [id=932, type=Constant]; +"933 Mul_853" [id=933, type=Mul]; +"934 Tanh_854" [id=934, type=Tanh]; +"935 Constant_855" [id=935, type=Constant]; +"936 Add_856" [id=936, type=Add]; +"937 Mul_857" [id=937, type=Mul]; +"938 QuantizeLinear_1142_1" [id=938, type=QuantizeLinear]; +"939 DequantizeLinear_1142_1" [id=939, type=DequantizeLinear]; +"940 Shape_858" [id=940, type=Shape]; +"941 Constant_859" [id=941, type=Constant]; +"942 Gather_860" [id=942, type=Gather]; +"943 Shape_861" [id=943, type=Shape]; +"944 Constant_862" [id=944, type=Constant]; +"945 Gather_863" [id=945, type=Gather]; +"946 Shape_864" [id=946, type=Shape]; +"947 Constant_865" [id=947, type=Constant]; +"948 Gather_866" [id=948, type=Gather]; +"949 Unsqueeze_867" [id=949, type=Unsqueeze]; +"950 Concat_868" [id=950, type=Concat]; +"951 Reshape_869" [id=951, type=Reshape]; +"952 QuantizeLinear_h.3.mlp.c_proj.weight_1" [id=952, type=QuantizeLinear]; +"953 DequantizeLinear_h.3.mlp.c_proj.weight_1" [id=953, type=DequantizeLinear]; +"954 Gemm_870" [id=954, type=Gemm]; +"955 Unsqueeze_871" [id=955, type=Unsqueeze]; +"956 Unsqueeze_872" [id=956, type=Unsqueeze]; +"957 Concat_873" [id=957, type=Concat]; +"958 Reshape_874" [id=958, type=Reshape]; +"959 Add_875" [id=959, type=Add]; +"960 ReduceMean_876" [id=960, type=ReduceMean]; +"961 Sub_877" [id=961, type=Sub]; +"962 Constant_878" [id=962, type=Constant]; +"963 Pow_879" [id=963, type=Pow]; +"964 ReduceMean_880" [id=964, type=ReduceMean]; +"965 Constant_881" [id=965, type=Constant]; +"966 Add_882" [id=966, type=Add]; +"967 Sqrt_883" [id=967, type=Sqrt]; +"968 Div_884" [id=968, type=Div]; +"969 Mul_885" [id=969, type=Mul]; +"970 Add_886" [id=970, type=Add]; +"971 QuantizeLinear_1175_1" [id=971, type=QuantizeLinear]; +"972 DequantizeLinear_1175_1" [id=972, type=DequantizeLinear]; +"973 Shape_887" [id=973, type=Shape]; +"974 Constant_888" [id=974, type=Constant]; +"975 Gather_889" [id=975, type=Gather]; +"976 Shape_890" [id=976, type=Shape]; +"977 Constant_891" [id=977, type=Constant]; +"978 Gather_892" [id=978, type=Gather]; +"979 Shape_893" [id=979, type=Shape]; +"980 Constant_894" [id=980, type=Constant]; +"981 Gather_895" [id=981, type=Gather]; +"982 Unsqueeze_896" [id=982, type=Unsqueeze]; +"983 Concat_897" [id=983, type=Concat]; +"984 Reshape_898" [id=984, type=Reshape]; +"985 QuantizeLinear_h.4.attn.c_attn.weight_1" [id=985, type=QuantizeLinear]; +"986 DequantizeLinear_h.4.attn.c_attn.weight_1" [id=986, type=DequantizeLinear]; +"987 Gemm_899" [id=987, type=Gemm]; +"988 Unsqueeze_900" [id=988, type=Unsqueeze]; +"989 Unsqueeze_901" [id=989, type=Unsqueeze]; +"990 Concat_902" [id=990, type=Concat]; +"991 Reshape_903" [id=991, type=Reshape]; +"992 Split_904" [id=992, type=Split]; +"993 QuantizeLinear_query.9_1" [id=993, type=QuantizeLinear]; +"994 DequantizeLinear_query.9_1" [id=994, type=DequantizeLinear]; +"995 Shape_905" [id=995, type=Shape]; +"996 Constant_906" [id=996, type=Constant]; +"997 Gather_907" [id=997, type=Gather]; +"998 Shape_908" [id=998, type=Shape]; +"999 Constant_909" [id=999, type=Constant]; +"1000 Gather_910" [id=1000, type=Gather]; +"1001 Shape_911" [id=1001, type=Shape]; +"1002 Constant_912" [id=1002, type=Constant]; +"1003 Gather_913" [id=1003, type=Gather]; +"1004 Constant_914" [id=1004, type=Constant]; +"1005 Div_915" [id=1005, type=Div]; +"1006 Cast_916" [id=1006, type=Cast]; +"1007 Cast_917" [id=1007, type=Cast]; +"1008 Unsqueeze_918" [id=1008, type=Unsqueeze]; +"1009 Unsqueeze_919" [id=1009, type=Unsqueeze]; +"1010 Unsqueeze_920" [id=1010, type=Unsqueeze]; +"1011 Concat_921" [id=1011, type=Concat]; +"1012 Reshape_922" [id=1012, type=Reshape]; +"1013 Transpose_923" [id=1013, type=Transpose]; +"1014 Shape_924" [id=1014, type=Shape]; +"1015 Constant_925" [id=1015, type=Constant]; +"1016 Gather_926" [id=1016, type=Gather]; +"1017 Shape_927" [id=1017, type=Shape]; +"1018 Constant_928" [id=1018, type=Constant]; +"1019 Gather_929" [id=1019, type=Gather]; +"1020 Shape_930" [id=1020, type=Shape]; +"1021 Constant_931" [id=1021, type=Constant]; +"1022 Gather_932" [id=1022, type=Gather]; +"1023 Constant_933" [id=1023, type=Constant]; +"1024 Div_934" [id=1024, type=Div]; +"1025 Cast_935" [id=1025, type=Cast]; +"1026 Cast_936" [id=1026, type=Cast]; +"1027 Unsqueeze_937" [id=1027, type=Unsqueeze]; +"1028 Unsqueeze_938" [id=1028, type=Unsqueeze]; +"1029 Unsqueeze_939" [id=1029, type=Unsqueeze]; +"1030 Concat_940" [id=1030, type=Concat]; +"1031 Reshape_941" [id=1031, type=Reshape]; +"1032 QuantizeLinear_1240_1" [id=1032, type=QuantizeLinear]; +"1033 DequantizeLinear_1240_1" [id=1033, type=DequantizeLinear]; +"1034 Transpose_942" [id=1034, type=Transpose]; +"1035 Shape_943" [id=1035, type=Shape]; +"1036 Constant_944" [id=1036, type=Constant]; +"1037 Gather_945" [id=1037, type=Gather]; +"1038 Shape_946" [id=1038, type=Shape]; +"1039 Constant_947" [id=1039, type=Constant]; +"1040 Gather_948" [id=1040, type=Gather]; +"1041 Shape_949" [id=1041, type=Shape]; +"1042 Constant_950" [id=1042, type=Constant]; +"1043 Gather_951" [id=1043, type=Gather]; +"1044 Constant_952" [id=1044, type=Constant]; +"1045 Div_953" [id=1045, type=Div]; +"1046 Cast_954" [id=1046, type=Cast]; +"1047 Cast_955" [id=1047, type=Cast]; +"1048 Unsqueeze_956" [id=1048, type=Unsqueeze]; +"1049 Unsqueeze_957" [id=1049, type=Unsqueeze]; +"1050 Unsqueeze_958" [id=1050, type=Unsqueeze]; +"1051 Concat_959" [id=1051, type=Concat]; +"1052 Reshape_960" [id=1052, type=Reshape]; +"1053 Transpose_961" [id=1053, type=Transpose]; +"1054 Transpose_962" [id=1054, type=Transpose]; +"1055 Unsqueeze_963" [id=1055, type=Unsqueeze]; +"1056 Unsqueeze_964" [id=1056, type=Unsqueeze]; +"1057 Concat_965" [id=1057, type=Concat]; +"1058 MatMul_966" [id=1058, type=MatMul]; +"1059 Constant_967" [id=1059, type=Constant]; +"1060 Div_968" [id=1060, type=Div]; +"1061 Shape_969" [id=1061, type=Shape]; +"1062 Constant_970" [id=1062, type=Constant]; +"1063 Gather_971" [id=1063, type=Gather]; +"1064 Shape_972" [id=1064, type=Shape]; +"1065 Constant_973" [id=1065, type=Constant]; +"1066 Gather_974" [id=1066, type=Gather]; +"1067 Sub_975" [id=1067, type=Sub]; +"1068 Unsqueeze_976" [id=1068, type=Unsqueeze]; +"1069 Unsqueeze_977" [id=1069, type=Unsqueeze]; +"1070 Constant_978" [id=1070, type=Constant]; +"1071 Slice_979" [id=1071, type=Slice]; +"1072 Unsqueeze_980" [id=1072, type=Unsqueeze]; +"1073 Constant_981" [id=1073, type=Constant]; +"1074 Slice_982" [id=1074, type=Slice]; +"1075 Mul_983" [id=1075, type=Mul]; +"1076 Constant_984" [id=1076, type=Constant]; +"1077 Sub_985" [id=1077, type=Sub]; +"1078 Constant_986" [id=1078, type=Constant]; +"1079 Mul_987" [id=1079, type=Mul]; +"1080 Sub_988" [id=1080, type=Sub]; +"1081 Softmax_989" [id=1081, type=Softmax]; +"1082 MatMul_990" [id=1082, type=MatMul]; +"1083 QuantizeLinear_1297_1" [id=1083, type=QuantizeLinear]; +"1084 DequantizeLinear_1297_1" [id=1084, type=DequantizeLinear]; +"1085 Transpose_991" [id=1085, type=Transpose]; +"1086 Shape_992" [id=1086, type=Shape]; +"1087 Constant_993" [id=1087, type=Constant]; +"1088 Gather_994" [id=1088, type=Gather]; +"1089 Shape_995" [id=1089, type=Shape]; +"1090 Constant_996" [id=1090, type=Constant]; +"1091 Gather_997" [id=1091, type=Gather]; +"1092 Shape_998" [id=1092, type=Shape]; +"1093 Constant_999" [id=1093, type=Constant]; +"1094 Gather_1000" [id=1094, type=Gather]; +"1095 Shape_1001" [id=1095, type=Shape]; +"1096 Constant_1002" [id=1096, type=Constant]; +"1097 Gather_1003" [id=1097, type=Gather]; +"1098 Mul_1004" [id=1098, type=Mul]; +"1099 Unsqueeze_1005" [id=1099, type=Unsqueeze]; +"1100 Unsqueeze_1006" [id=1100, type=Unsqueeze]; +"1101 Unsqueeze_1007" [id=1101, type=Unsqueeze]; +"1102 Concat_1008" [id=1102, type=Concat]; +"1103 Reshape_1009" [id=1103, type=Reshape]; +"1104 Shape_1010" [id=1104, type=Shape]; +"1105 Constant_1011" [id=1105, type=Constant]; +"1106 Gather_1012" [id=1106, type=Gather]; +"1107 Shape_1013" [id=1107, type=Shape]; +"1108 Constant_1014" [id=1108, type=Constant]; +"1109 Gather_1015" [id=1109, type=Gather]; +"1110 Shape_1016" [id=1110, type=Shape]; +"1111 Constant_1017" [id=1111, type=Constant]; +"1112 Gather_1018" [id=1112, type=Gather]; +"1113 Unsqueeze_1019" [id=1113, type=Unsqueeze]; +"1114 Concat_1020" [id=1114, type=Concat]; +"1115 Reshape_1021" [id=1115, type=Reshape]; +"1116 QuantizeLinear_h.4.attn.c_proj.weight_1" [id=1116, type=QuantizeLinear]; +"1117 DequantizeLinear_h.4.attn.c_proj.weight_1" [id=1117, type=DequantizeLinear]; +"1118 Gemm_1022" [id=1118, type=Gemm]; +"1119 Unsqueeze_1023" [id=1119, type=Unsqueeze]; +"1120 Unsqueeze_1024" [id=1120, type=Unsqueeze]; +"1121 Concat_1025" [id=1121, type=Concat]; +"1122 Reshape_1026" [id=1122, type=Reshape]; +"1123 Add_1027" [id=1123, type=Add]; +"1124 ReduceMean_1028" [id=1124, type=ReduceMean]; +"1125 Sub_1029" [id=1125, type=Sub]; +"1126 Constant_1030" [id=1126, type=Constant]; +"1127 Pow_1031" [id=1127, type=Pow]; +"1128 ReduceMean_1032" [id=1128, type=ReduceMean]; +"1129 Constant_1033" [id=1129, type=Constant]; +"1130 Add_1034" [id=1130, type=Add]; +"1131 Sqrt_1035" [id=1131, type=Sqrt]; +"1132 Div_1036" [id=1132, type=Div]; +"1133 Mul_1037" [id=1133, type=Mul]; +"1134 Add_1038" [id=1134, type=Add]; +"1135 QuantizeLinear_1349_1" [id=1135, type=QuantizeLinear]; +"1136 DequantizeLinear_1349_1" [id=1136, type=DequantizeLinear]; +"1137 Shape_1039" [id=1137, type=Shape]; +"1138 Constant_1040" [id=1138, type=Constant]; +"1139 Gather_1041" [id=1139, type=Gather]; +"1140 Shape_1042" [id=1140, type=Shape]; +"1141 Constant_1043" [id=1141, type=Constant]; +"1142 Gather_1044" [id=1142, type=Gather]; +"1143 Shape_1045" [id=1143, type=Shape]; +"1144 Constant_1046" [id=1144, type=Constant]; +"1145 Gather_1047" [id=1145, type=Gather]; +"1146 Unsqueeze_1048" [id=1146, type=Unsqueeze]; +"1147 Concat_1049" [id=1147, type=Concat]; +"1148 Reshape_1050" [id=1148, type=Reshape]; +"1149 QuantizeLinear_h.4.mlp.c_fc.weight_1" [id=1149, type=QuantizeLinear]; +"1150 DequantizeLinear_h.4.mlp.c_fc.weight_1" [id=1150, type=DequantizeLinear]; +"1151 Gemm_1051" [id=1151, type=Gemm]; +"1152 Unsqueeze_1052" [id=1152, type=Unsqueeze]; +"1153 Unsqueeze_1053" [id=1153, type=Unsqueeze]; +"1154 Concat_1054" [id=1154, type=Concat]; +"1155 Reshape_1055" [id=1155, type=Reshape]; +"1156 Constant_1056" [id=1156, type=Constant]; +"1157 Mul_1057" [id=1157, type=Mul]; +"1158 Constant_1058" [id=1158, type=Constant]; +"1159 Pow_1059" [id=1159, type=Pow]; +"1160 Constant_1060" [id=1160, type=Constant]; +"1161 Mul_1061" [id=1161, type=Mul]; +"1162 Add_1062" [id=1162, type=Add]; +"1163 Constant_1063" [id=1163, type=Constant]; +"1164 Mul_1064" [id=1164, type=Mul]; +"1165 Tanh_1065" [id=1165, type=Tanh]; +"1166 Constant_1066" [id=1166, type=Constant]; +"1167 Add_1067" [id=1167, type=Add]; +"1168 Mul_1068" [id=1168, type=Mul]; +"1169 QuantizeLinear_1383_1" [id=1169, type=QuantizeLinear]; +"1170 DequantizeLinear_1383_1" [id=1170, type=DequantizeLinear]; +"1171 Shape_1069" [id=1171, type=Shape]; +"1172 Constant_1070" [id=1172, type=Constant]; +"1173 Gather_1071" [id=1173, type=Gather]; +"1174 Shape_1072" [id=1174, type=Shape]; +"1175 Constant_1073" [id=1175, type=Constant]; +"1176 Gather_1074" [id=1176, type=Gather]; +"1177 Shape_1075" [id=1177, type=Shape]; +"1178 Constant_1076" [id=1178, type=Constant]; +"1179 Gather_1077" [id=1179, type=Gather]; +"1180 Unsqueeze_1078" [id=1180, type=Unsqueeze]; +"1181 Concat_1079" [id=1181, type=Concat]; +"1182 Reshape_1080" [id=1182, type=Reshape]; +"1183 QuantizeLinear_h.4.mlp.c_proj.weight_1" [id=1183, type=QuantizeLinear]; +"1184 DequantizeLinear_h.4.mlp.c_proj.weight_1" [id=1184, type=DequantizeLinear]; +"1185 Gemm_1081" [id=1185, type=Gemm]; +"1186 Unsqueeze_1082" [id=1186, type=Unsqueeze]; +"1187 Unsqueeze_1083" [id=1187, type=Unsqueeze]; +"1188 Concat_1084" [id=1188, type=Concat]; +"1189 Reshape_1085" [id=1189, type=Reshape]; +"1190 Add_1086" [id=1190, type=Add]; +"1191 ReduceMean_1087" [id=1191, type=ReduceMean]; +"1192 Sub_1088" [id=1192, type=Sub]; +"1193 Constant_1089" [id=1193, type=Constant]; +"1194 Pow_1090" [id=1194, type=Pow]; +"1195 ReduceMean_1091" [id=1195, type=ReduceMean]; +"1196 Constant_1092" [id=1196, type=Constant]; +"1197 Add_1093" [id=1197, type=Add]; +"1198 Sqrt_1094" [id=1198, type=Sqrt]; +"1199 Div_1095" [id=1199, type=Div]; +"1200 Mul_1096" [id=1200, type=Mul]; +"1201 Add_1097" [id=1201, type=Add]; +"1202 QuantizeLinear_1416_1" [id=1202, type=QuantizeLinear]; +"1203 DequantizeLinear_1416_1" [id=1203, type=DequantizeLinear]; +"1204 Shape_1098" [id=1204, type=Shape]; +"1205 Constant_1099" [id=1205, type=Constant]; +"1206 Gather_1100" [id=1206, type=Gather]; +"1207 Shape_1101" [id=1207, type=Shape]; +"1208 Constant_1102" [id=1208, type=Constant]; +"1209 Gather_1103" [id=1209, type=Gather]; +"1210 Shape_1104" [id=1210, type=Shape]; +"1211 Constant_1105" [id=1211, type=Constant]; +"1212 Gather_1106" [id=1212, type=Gather]; +"1213 Unsqueeze_1107" [id=1213, type=Unsqueeze]; +"1214 Concat_1108" [id=1214, type=Concat]; +"1215 Reshape_1109" [id=1215, type=Reshape]; +"1216 QuantizeLinear_h.5.attn.c_attn.weight_1" [id=1216, type=QuantizeLinear]; +"1217 DequantizeLinear_h.5.attn.c_attn.weight_1" [id=1217, type=DequantizeLinear]; +"1218 Gemm_1110" [id=1218, type=Gemm]; +"1219 Unsqueeze_1111" [id=1219, type=Unsqueeze]; +"1220 Unsqueeze_1112" [id=1220, type=Unsqueeze]; +"1221 Concat_1113" [id=1221, type=Concat]; +"1222 Reshape_1114" [id=1222, type=Reshape]; +"1223 Split_1115" [id=1223, type=Split]; +"1224 QuantizeLinear_query.11_1" [id=1224, type=QuantizeLinear]; +"1225 DequantizeLinear_query.11_1" [id=1225, type=DequantizeLinear]; +"1226 Shape_1116" [id=1226, type=Shape]; +"1227 Constant_1117" [id=1227, type=Constant]; +"1228 Gather_1118" [id=1228, type=Gather]; +"1229 Shape_1119" [id=1229, type=Shape]; +"1230 Constant_1120" [id=1230, type=Constant]; +"1231 Gather_1121" [id=1231, type=Gather]; +"1232 Shape_1122" [id=1232, type=Shape]; +"1233 Constant_1123" [id=1233, type=Constant]; +"1234 Gather_1124" [id=1234, type=Gather]; +"1235 Constant_1125" [id=1235, type=Constant]; +"1236 Div_1126" [id=1236, type=Div]; +"1237 Cast_1127" [id=1237, type=Cast]; +"1238 Cast_1128" [id=1238, type=Cast]; +"1239 Unsqueeze_1129" [id=1239, type=Unsqueeze]; +"1240 Unsqueeze_1130" [id=1240, type=Unsqueeze]; +"1241 Unsqueeze_1131" [id=1241, type=Unsqueeze]; +"1242 Concat_1132" [id=1242, type=Concat]; +"1243 Reshape_1133" [id=1243, type=Reshape]; +"1244 Transpose_1134" [id=1244, type=Transpose]; +"1245 Shape_1135" [id=1245, type=Shape]; +"1246 Constant_1136" [id=1246, type=Constant]; +"1247 Gather_1137" [id=1247, type=Gather]; +"1248 Shape_1138" [id=1248, type=Shape]; +"1249 Constant_1139" [id=1249, type=Constant]; +"1250 Gather_1140" [id=1250, type=Gather]; +"1251 Shape_1141" [id=1251, type=Shape]; +"1252 Constant_1142" [id=1252, type=Constant]; +"1253 Gather_1143" [id=1253, type=Gather]; +"1254 Constant_1144" [id=1254, type=Constant]; +"1255 Div_1145" [id=1255, type=Div]; +"1256 Cast_1146" [id=1256, type=Cast]; +"1257 Cast_1147" [id=1257, type=Cast]; +"1258 Unsqueeze_1148" [id=1258, type=Unsqueeze]; +"1259 Unsqueeze_1149" [id=1259, type=Unsqueeze]; +"1260 Unsqueeze_1150" [id=1260, type=Unsqueeze]; +"1261 Concat_1151" [id=1261, type=Concat]; +"1262 Reshape_1152" [id=1262, type=Reshape]; +"1263 QuantizeLinear_1481_1" [id=1263, type=QuantizeLinear]; +"1264 DequantizeLinear_1481_1" [id=1264, type=DequantizeLinear]; +"1265 Transpose_1153" [id=1265, type=Transpose]; +"1266 Shape_1154" [id=1266, type=Shape]; +"1267 Constant_1155" [id=1267, type=Constant]; +"1268 Gather_1156" [id=1268, type=Gather]; +"1269 Shape_1157" [id=1269, type=Shape]; +"1270 Constant_1158" [id=1270, type=Constant]; +"1271 Gather_1159" [id=1271, type=Gather]; +"1272 Shape_1160" [id=1272, type=Shape]; +"1273 Constant_1161" [id=1273, type=Constant]; +"1274 Gather_1162" [id=1274, type=Gather]; +"1275 Constant_1163" [id=1275, type=Constant]; +"1276 Div_1164" [id=1276, type=Div]; +"1277 Cast_1165" [id=1277, type=Cast]; +"1278 Cast_1166" [id=1278, type=Cast]; +"1279 Unsqueeze_1167" [id=1279, type=Unsqueeze]; +"1280 Unsqueeze_1168" [id=1280, type=Unsqueeze]; +"1281 Unsqueeze_1169" [id=1281, type=Unsqueeze]; +"1282 Concat_1170" [id=1282, type=Concat]; +"1283 Reshape_1171" [id=1283, type=Reshape]; +"1284 Transpose_1172" [id=1284, type=Transpose]; +"1285 Transpose_1173" [id=1285, type=Transpose]; +"1286 Unsqueeze_1174" [id=1286, type=Unsqueeze]; +"1287 Unsqueeze_1175" [id=1287, type=Unsqueeze]; +"1288 Concat_1176" [id=1288, type=Concat]; +"1289 MatMul_1177" [id=1289, type=MatMul]; +"1290 Constant_1178" [id=1290, type=Constant]; +"1291 Div_1179" [id=1291, type=Div]; +"1292 Shape_1180" [id=1292, type=Shape]; +"1293 Constant_1181" [id=1293, type=Constant]; +"1294 Gather_1182" [id=1294, type=Gather]; +"1295 Shape_1183" [id=1295, type=Shape]; +"1296 Constant_1184" [id=1296, type=Constant]; +"1297 Gather_1185" [id=1297, type=Gather]; +"1298 Sub_1186" [id=1298, type=Sub]; +"1299 Unsqueeze_1187" [id=1299, type=Unsqueeze]; +"1300 Unsqueeze_1188" [id=1300, type=Unsqueeze]; +"1301 Constant_1189" [id=1301, type=Constant]; +"1302 Slice_1190" [id=1302, type=Slice]; +"1303 Unsqueeze_1191" [id=1303, type=Unsqueeze]; +"1304 Constant_1192" [id=1304, type=Constant]; +"1305 Slice_1193" [id=1305, type=Slice]; +"1306 Mul_1194" [id=1306, type=Mul]; +"1307 Constant_1195" [id=1307, type=Constant]; +"1308 Sub_1196" [id=1308, type=Sub]; +"1309 Constant_1197" [id=1309, type=Constant]; +"1310 Mul_1198" [id=1310, type=Mul]; +"1311 Sub_1199" [id=1311, type=Sub]; +"1312 Softmax_1200" [id=1312, type=Softmax]; +"1313 MatMul_1201" [id=1313, type=MatMul]; +"1314 QuantizeLinear_1538_1" [id=1314, type=QuantizeLinear]; +"1315 DequantizeLinear_1538_1" [id=1315, type=DequantizeLinear]; +"1316 Transpose_1202" [id=1316, type=Transpose]; +"1317 Shape_1203" [id=1317, type=Shape]; +"1318 Constant_1204" [id=1318, type=Constant]; +"1319 Gather_1205" [id=1319, type=Gather]; +"1320 Shape_1206" [id=1320, type=Shape]; +"1321 Constant_1207" [id=1321, type=Constant]; +"1322 Gather_1208" [id=1322, type=Gather]; +"1323 Shape_1209" [id=1323, type=Shape]; +"1324 Constant_1210" [id=1324, type=Constant]; +"1325 Gather_1211" [id=1325, type=Gather]; +"1326 Shape_1212" [id=1326, type=Shape]; +"1327 Constant_1213" [id=1327, type=Constant]; +"1328 Gather_1214" [id=1328, type=Gather]; +"1329 Mul_1215" [id=1329, type=Mul]; +"1330 Unsqueeze_1216" [id=1330, type=Unsqueeze]; +"1331 Unsqueeze_1217" [id=1331, type=Unsqueeze]; +"1332 Unsqueeze_1218" [id=1332, type=Unsqueeze]; +"1333 Concat_1219" [id=1333, type=Concat]; +"1334 Reshape_1220" [id=1334, type=Reshape]; +"1335 Shape_1221" [id=1335, type=Shape]; +"1336 Constant_1222" [id=1336, type=Constant]; +"1337 Gather_1223" [id=1337, type=Gather]; +"1338 Shape_1224" [id=1338, type=Shape]; +"1339 Constant_1225" [id=1339, type=Constant]; +"1340 Gather_1226" [id=1340, type=Gather]; +"1341 Shape_1227" [id=1341, type=Shape]; +"1342 Constant_1228" [id=1342, type=Constant]; +"1343 Gather_1229" [id=1343, type=Gather]; +"1344 Unsqueeze_1230" [id=1344, type=Unsqueeze]; +"1345 Concat_1231" [id=1345, type=Concat]; +"1346 Reshape_1232" [id=1346, type=Reshape]; +"1347 QuantizeLinear_h.5.attn.c_proj.weight_1" [id=1347, type=QuantizeLinear]; +"1348 DequantizeLinear_h.5.attn.c_proj.weight_1" [id=1348, type=DequantizeLinear]; +"1349 Gemm_1233" [id=1349, type=Gemm]; +"1350 Unsqueeze_1234" [id=1350, type=Unsqueeze]; +"1351 Unsqueeze_1235" [id=1351, type=Unsqueeze]; +"1352 Concat_1236" [id=1352, type=Concat]; +"1353 Reshape_1237" [id=1353, type=Reshape]; +"1354 Add_1238" [id=1354, type=Add]; +"1355 ReduceMean_1239" [id=1355, type=ReduceMean]; +"1356 Sub_1240" [id=1356, type=Sub]; +"1357 Constant_1241" [id=1357, type=Constant]; +"1358 Pow_1242" [id=1358, type=Pow]; +"1359 ReduceMean_1243" [id=1359, type=ReduceMean]; +"1360 Constant_1244" [id=1360, type=Constant]; +"1361 Add_1245" [id=1361, type=Add]; +"1362 Sqrt_1246" [id=1362, type=Sqrt]; +"1363 Div_1247" [id=1363, type=Div]; +"1364 Mul_1248" [id=1364, type=Mul]; +"1365 Add_1249" [id=1365, type=Add]; +"1366 QuantizeLinear_1590_1" [id=1366, type=QuantizeLinear]; +"1367 DequantizeLinear_1590_1" [id=1367, type=DequantizeLinear]; +"1368 Shape_1250" [id=1368, type=Shape]; +"1369 Constant_1251" [id=1369, type=Constant]; +"1370 Gather_1252" [id=1370, type=Gather]; +"1371 Shape_1253" [id=1371, type=Shape]; +"1372 Constant_1254" [id=1372, type=Constant]; +"1373 Gather_1255" [id=1373, type=Gather]; +"1374 Shape_1256" [id=1374, type=Shape]; +"1375 Constant_1257" [id=1375, type=Constant]; +"1376 Gather_1258" [id=1376, type=Gather]; +"1377 Unsqueeze_1259" [id=1377, type=Unsqueeze]; +"1378 Concat_1260" [id=1378, type=Concat]; +"1379 Reshape_1261" [id=1379, type=Reshape]; +"1380 QuantizeLinear_h.5.mlp.c_fc.weight_1" [id=1380, type=QuantizeLinear]; +"1381 DequantizeLinear_h.5.mlp.c_fc.weight_1" [id=1381, type=DequantizeLinear]; +"1382 Gemm_1262" [id=1382, type=Gemm]; +"1383 Unsqueeze_1263" [id=1383, type=Unsqueeze]; +"1384 Unsqueeze_1264" [id=1384, type=Unsqueeze]; +"1385 Concat_1265" [id=1385, type=Concat]; +"1386 Reshape_1266" [id=1386, type=Reshape]; +"1387 Constant_1267" [id=1387, type=Constant]; +"1388 Mul_1268" [id=1388, type=Mul]; +"1389 Constant_1269" [id=1389, type=Constant]; +"1390 Pow_1270" [id=1390, type=Pow]; +"1391 Constant_1271" [id=1391, type=Constant]; +"1392 Mul_1272" [id=1392, type=Mul]; +"1393 Add_1273" [id=1393, type=Add]; +"1394 Constant_1274" [id=1394, type=Constant]; +"1395 Mul_1275" [id=1395, type=Mul]; +"1396 Tanh_1276" [id=1396, type=Tanh]; +"1397 Constant_1277" [id=1397, type=Constant]; +"1398 Add_1278" [id=1398, type=Add]; +"1399 Mul_1279" [id=1399, type=Mul]; +"1400 QuantizeLinear_1624_1" [id=1400, type=QuantizeLinear]; +"1401 DequantizeLinear_1624_1" [id=1401, type=DequantizeLinear]; +"1402 Shape_1280" [id=1402, type=Shape]; +"1403 Constant_1281" [id=1403, type=Constant]; +"1404 Gather_1282" [id=1404, type=Gather]; +"1405 Shape_1283" [id=1405, type=Shape]; +"1406 Constant_1284" [id=1406, type=Constant]; +"1407 Gather_1285" [id=1407, type=Gather]; +"1408 Shape_1286" [id=1408, type=Shape]; +"1409 Constant_1287" [id=1409, type=Constant]; +"1410 Gather_1288" [id=1410, type=Gather]; +"1411 Unsqueeze_1289" [id=1411, type=Unsqueeze]; +"1412 Concat_1290" [id=1412, type=Concat]; +"1413 Reshape_1291" [id=1413, type=Reshape]; +"1414 QuantizeLinear_h.5.mlp.c_proj.weight_1" [id=1414, type=QuantizeLinear]; +"1415 DequantizeLinear_h.5.mlp.c_proj.weight_1" [id=1415, type=DequantizeLinear]; +"1416 Gemm_1292" [id=1416, type=Gemm]; +"1417 Unsqueeze_1293" [id=1417, type=Unsqueeze]; +"1418 Unsqueeze_1294" [id=1418, type=Unsqueeze]; +"1419 Concat_1295" [id=1419, type=Concat]; +"1420 Reshape_1296" [id=1420, type=Reshape]; +"1421 Add_1297" [id=1421, type=Add]; +"1422 ReduceMean_1298" [id=1422, type=ReduceMean]; +"1423 Sub_1299" [id=1423, type=Sub]; +"1424 Constant_1300" [id=1424, type=Constant]; +"1425 Pow_1301" [id=1425, type=Pow]; +"1426 ReduceMean_1302" [id=1426, type=ReduceMean]; +"1427 Constant_1303" [id=1427, type=Constant]; +"1428 Add_1304" [id=1428, type=Add]; +"1429 Sqrt_1305" [id=1429, type=Sqrt]; +"1430 Div_1306" [id=1430, type=Div]; +"1431 Mul_1307" [id=1431, type=Mul]; +"1432 Add_1308" [id=1432, type=Add]; +"1433 QuantizeLinear_1657_1" [id=1433, type=QuantizeLinear]; +"1434 DequantizeLinear_1657_1" [id=1434, type=DequantizeLinear]; +"1435 Shape_1309" [id=1435, type=Shape]; +"1436 Constant_1310" [id=1436, type=Constant]; +"1437 Gather_1311" [id=1437, type=Gather]; +"1438 Shape_1312" [id=1438, type=Shape]; +"1439 Constant_1313" [id=1439, type=Constant]; +"1440 Gather_1314" [id=1440, type=Gather]; +"1441 Shape_1315" [id=1441, type=Shape]; +"1442 Constant_1316" [id=1442, type=Constant]; +"1443 Gather_1317" [id=1443, type=Gather]; +"1444 Unsqueeze_1318" [id=1444, type=Unsqueeze]; +"1445 Concat_1319" [id=1445, type=Concat]; +"1446 Reshape_1320" [id=1446, type=Reshape]; +"1447 QuantizeLinear_h.6.attn.c_attn.weight_1" [id=1447, type=QuantizeLinear]; +"1448 DequantizeLinear_h.6.attn.c_attn.weight_1" [id=1448, type=DequantizeLinear]; +"1449 Gemm_1321" [id=1449, type=Gemm]; +"1450 Unsqueeze_1322" [id=1450, type=Unsqueeze]; +"1451 Unsqueeze_1323" [id=1451, type=Unsqueeze]; +"1452 Concat_1324" [id=1452, type=Concat]; +"1453 Reshape_1325" [id=1453, type=Reshape]; +"1454 Split_1326" [id=1454, type=Split]; +"1455 QuantizeLinear_query.13_1" [id=1455, type=QuantizeLinear]; +"1456 DequantizeLinear_query.13_1" [id=1456, type=DequantizeLinear]; +"1457 Shape_1327" [id=1457, type=Shape]; +"1458 Constant_1328" [id=1458, type=Constant]; +"1459 Gather_1329" [id=1459, type=Gather]; +"1460 Shape_1330" [id=1460, type=Shape]; +"1461 Constant_1331" [id=1461, type=Constant]; +"1462 Gather_1332" [id=1462, type=Gather]; +"1463 Shape_1333" [id=1463, type=Shape]; +"1464 Constant_1334" [id=1464, type=Constant]; +"1465 Gather_1335" [id=1465, type=Gather]; +"1466 Constant_1336" [id=1466, type=Constant]; +"1467 Div_1337" [id=1467, type=Div]; +"1468 Cast_1338" [id=1468, type=Cast]; +"1469 Cast_1339" [id=1469, type=Cast]; +"1470 Unsqueeze_1340" [id=1470, type=Unsqueeze]; +"1471 Unsqueeze_1341" [id=1471, type=Unsqueeze]; +"1472 Unsqueeze_1342" [id=1472, type=Unsqueeze]; +"1473 Concat_1343" [id=1473, type=Concat]; +"1474 Reshape_1344" [id=1474, type=Reshape]; +"1475 Transpose_1345" [id=1475, type=Transpose]; +"1476 Shape_1346" [id=1476, type=Shape]; +"1477 Constant_1347" [id=1477, type=Constant]; +"1478 Gather_1348" [id=1478, type=Gather]; +"1479 Shape_1349" [id=1479, type=Shape]; +"1480 Constant_1350" [id=1480, type=Constant]; +"1481 Gather_1351" [id=1481, type=Gather]; +"1482 Shape_1352" [id=1482, type=Shape]; +"1483 Constant_1353" [id=1483, type=Constant]; +"1484 Gather_1354" [id=1484, type=Gather]; +"1485 Constant_1355" [id=1485, type=Constant]; +"1486 Div_1356" [id=1486, type=Div]; +"1487 Cast_1357" [id=1487, type=Cast]; +"1488 Cast_1358" [id=1488, type=Cast]; +"1489 Unsqueeze_1359" [id=1489, type=Unsqueeze]; +"1490 Unsqueeze_1360" [id=1490, type=Unsqueeze]; +"1491 Unsqueeze_1361" [id=1491, type=Unsqueeze]; +"1492 Concat_1362" [id=1492, type=Concat]; +"1493 Reshape_1363" [id=1493, type=Reshape]; +"1494 QuantizeLinear_1722_1" [id=1494, type=QuantizeLinear]; +"1495 DequantizeLinear_1722_1" [id=1495, type=DequantizeLinear]; +"1496 Transpose_1364" [id=1496, type=Transpose]; +"1497 Shape_1365" [id=1497, type=Shape]; +"1498 Constant_1366" [id=1498, type=Constant]; +"1499 Gather_1367" [id=1499, type=Gather]; +"1500 Shape_1368" [id=1500, type=Shape]; +"1501 Constant_1369" [id=1501, type=Constant]; +"1502 Gather_1370" [id=1502, type=Gather]; +"1503 Shape_1371" [id=1503, type=Shape]; +"1504 Constant_1372" [id=1504, type=Constant]; +"1505 Gather_1373" [id=1505, type=Gather]; +"1506 Constant_1374" [id=1506, type=Constant]; +"1507 Div_1375" [id=1507, type=Div]; +"1508 Cast_1376" [id=1508, type=Cast]; +"1509 Cast_1377" [id=1509, type=Cast]; +"1510 Unsqueeze_1378" [id=1510, type=Unsqueeze]; +"1511 Unsqueeze_1379" [id=1511, type=Unsqueeze]; +"1512 Unsqueeze_1380" [id=1512, type=Unsqueeze]; +"1513 Concat_1381" [id=1513, type=Concat]; +"1514 Reshape_1382" [id=1514, type=Reshape]; +"1515 Transpose_1383" [id=1515, type=Transpose]; +"1516 Transpose_1384" [id=1516, type=Transpose]; +"1517 Unsqueeze_1385" [id=1517, type=Unsqueeze]; +"1518 Unsqueeze_1386" [id=1518, type=Unsqueeze]; +"1519 Concat_1387" [id=1519, type=Concat]; +"1520 MatMul_1388" [id=1520, type=MatMul]; +"1521 Constant_1389" [id=1521, type=Constant]; +"1522 Div_1390" [id=1522, type=Div]; +"1523 Shape_1391" [id=1523, type=Shape]; +"1524 Constant_1392" [id=1524, type=Constant]; +"1525 Gather_1393" [id=1525, type=Gather]; +"1526 Shape_1394" [id=1526, type=Shape]; +"1527 Constant_1395" [id=1527, type=Constant]; +"1528 Gather_1396" [id=1528, type=Gather]; +"1529 Sub_1397" [id=1529, type=Sub]; +"1530 Unsqueeze_1398" [id=1530, type=Unsqueeze]; +"1531 Unsqueeze_1399" [id=1531, type=Unsqueeze]; +"1532 Constant_1400" [id=1532, type=Constant]; +"1533 Slice_1401" [id=1533, type=Slice]; +"1534 Unsqueeze_1402" [id=1534, type=Unsqueeze]; +"1535 Constant_1403" [id=1535, type=Constant]; +"1536 Slice_1404" [id=1536, type=Slice]; +"1537 Mul_1405" [id=1537, type=Mul]; +"1538 Constant_1406" [id=1538, type=Constant]; +"1539 Sub_1407" [id=1539, type=Sub]; +"1540 Constant_1408" [id=1540, type=Constant]; +"1541 Mul_1409" [id=1541, type=Mul]; +"1542 Sub_1410" [id=1542, type=Sub]; +"1543 Softmax_1411" [id=1543, type=Softmax]; +"1544 MatMul_1412" [id=1544, type=MatMul]; +"1545 QuantizeLinear_1779_1" [id=1545, type=QuantizeLinear]; +"1546 DequantizeLinear_1779_1" [id=1546, type=DequantizeLinear]; +"1547 Transpose_1413" [id=1547, type=Transpose]; +"1548 Shape_1414" [id=1548, type=Shape]; +"1549 Constant_1415" [id=1549, type=Constant]; +"1550 Gather_1416" [id=1550, type=Gather]; +"1551 Shape_1417" [id=1551, type=Shape]; +"1552 Constant_1418" [id=1552, type=Constant]; +"1553 Gather_1419" [id=1553, type=Gather]; +"1554 Shape_1420" [id=1554, type=Shape]; +"1555 Constant_1421" [id=1555, type=Constant]; +"1556 Gather_1422" [id=1556, type=Gather]; +"1557 Shape_1423" [id=1557, type=Shape]; +"1558 Constant_1424" [id=1558, type=Constant]; +"1559 Gather_1425" [id=1559, type=Gather]; +"1560 Mul_1426" [id=1560, type=Mul]; +"1561 Unsqueeze_1427" [id=1561, type=Unsqueeze]; +"1562 Unsqueeze_1428" [id=1562, type=Unsqueeze]; +"1563 Unsqueeze_1429" [id=1563, type=Unsqueeze]; +"1564 Concat_1430" [id=1564, type=Concat]; +"1565 Reshape_1431" [id=1565, type=Reshape]; +"1566 Shape_1432" [id=1566, type=Shape]; +"1567 Constant_1433" [id=1567, type=Constant]; +"1568 Gather_1434" [id=1568, type=Gather]; +"1569 Shape_1435" [id=1569, type=Shape]; +"1570 Constant_1436" [id=1570, type=Constant]; +"1571 Gather_1437" [id=1571, type=Gather]; +"1572 Shape_1438" [id=1572, type=Shape]; +"1573 Constant_1439" [id=1573, type=Constant]; +"1574 Gather_1440" [id=1574, type=Gather]; +"1575 Unsqueeze_1441" [id=1575, type=Unsqueeze]; +"1576 Concat_1442" [id=1576, type=Concat]; +"1577 Reshape_1443" [id=1577, type=Reshape]; +"1578 QuantizeLinear_h.6.attn.c_proj.weight_1" [id=1578, type=QuantizeLinear]; +"1579 DequantizeLinear_h.6.attn.c_proj.weight_1" [id=1579, type=DequantizeLinear]; +"1580 Gemm_1444" [id=1580, type=Gemm]; +"1581 Unsqueeze_1445" [id=1581, type=Unsqueeze]; +"1582 Unsqueeze_1446" [id=1582, type=Unsqueeze]; +"1583 Concat_1447" [id=1583, type=Concat]; +"1584 Reshape_1448" [id=1584, type=Reshape]; +"1585 Add_1449" [id=1585, type=Add]; +"1586 ReduceMean_1450" [id=1586, type=ReduceMean]; +"1587 Sub_1451" [id=1587, type=Sub]; +"1588 Constant_1452" [id=1588, type=Constant]; +"1589 Pow_1453" [id=1589, type=Pow]; +"1590 ReduceMean_1454" [id=1590, type=ReduceMean]; +"1591 Constant_1455" [id=1591, type=Constant]; +"1592 Add_1456" [id=1592, type=Add]; +"1593 Sqrt_1457" [id=1593, type=Sqrt]; +"1594 Div_1458" [id=1594, type=Div]; +"1595 Mul_1459" [id=1595, type=Mul]; +"1596 Add_1460" [id=1596, type=Add]; +"1597 QuantizeLinear_1831_1" [id=1597, type=QuantizeLinear]; +"1598 DequantizeLinear_1831_1" [id=1598, type=DequantizeLinear]; +"1599 Shape_1461" [id=1599, type=Shape]; +"1600 Constant_1462" [id=1600, type=Constant]; +"1601 Gather_1463" [id=1601, type=Gather]; +"1602 Shape_1464" [id=1602, type=Shape]; +"1603 Constant_1465" [id=1603, type=Constant]; +"1604 Gather_1466" [id=1604, type=Gather]; +"1605 Shape_1467" [id=1605, type=Shape]; +"1606 Constant_1468" [id=1606, type=Constant]; +"1607 Gather_1469" [id=1607, type=Gather]; +"1608 Unsqueeze_1470" [id=1608, type=Unsqueeze]; +"1609 Concat_1471" [id=1609, type=Concat]; +"1610 Reshape_1472" [id=1610, type=Reshape]; +"1611 QuantizeLinear_h.6.mlp.c_fc.weight_1" [id=1611, type=QuantizeLinear]; +"1612 DequantizeLinear_h.6.mlp.c_fc.weight_1" [id=1612, type=DequantizeLinear]; +"1613 Gemm_1473" [id=1613, type=Gemm]; +"1614 Unsqueeze_1474" [id=1614, type=Unsqueeze]; +"1615 Unsqueeze_1475" [id=1615, type=Unsqueeze]; +"1616 Concat_1476" [id=1616, type=Concat]; +"1617 Reshape_1477" [id=1617, type=Reshape]; +"1618 Constant_1478" [id=1618, type=Constant]; +"1619 Mul_1479" [id=1619, type=Mul]; +"1620 Constant_1480" [id=1620, type=Constant]; +"1621 Pow_1481" [id=1621, type=Pow]; +"1622 Constant_1482" [id=1622, type=Constant]; +"1623 Mul_1483" [id=1623, type=Mul]; +"1624 Add_1484" [id=1624, type=Add]; +"1625 Constant_1485" [id=1625, type=Constant]; +"1626 Mul_1486" [id=1626, type=Mul]; +"1627 Tanh_1487" [id=1627, type=Tanh]; +"1628 Constant_1488" [id=1628, type=Constant]; +"1629 Add_1489" [id=1629, type=Add]; +"1630 Mul_1490" [id=1630, type=Mul]; +"1631 QuantizeLinear_1865_1" [id=1631, type=QuantizeLinear]; +"1632 DequantizeLinear_1865_1" [id=1632, type=DequantizeLinear]; +"1633 Shape_1491" [id=1633, type=Shape]; +"1634 Constant_1492" [id=1634, type=Constant]; +"1635 Gather_1493" [id=1635, type=Gather]; +"1636 Shape_1494" [id=1636, type=Shape]; +"1637 Constant_1495" [id=1637, type=Constant]; +"1638 Gather_1496" [id=1638, type=Gather]; +"1639 Shape_1497" [id=1639, type=Shape]; +"1640 Constant_1498" [id=1640, type=Constant]; +"1641 Gather_1499" [id=1641, type=Gather]; +"1642 Unsqueeze_1500" [id=1642, type=Unsqueeze]; +"1643 Concat_1501" [id=1643, type=Concat]; +"1644 Reshape_1502" [id=1644, type=Reshape]; +"1645 QuantizeLinear_h.6.mlp.c_proj.weight_1" [id=1645, type=QuantizeLinear]; +"1646 DequantizeLinear_h.6.mlp.c_proj.weight_1" [id=1646, type=DequantizeLinear]; +"1647 Gemm_1503" [id=1647, type=Gemm]; +"1648 Unsqueeze_1504" [id=1648, type=Unsqueeze]; +"1649 Unsqueeze_1505" [id=1649, type=Unsqueeze]; +"1650 Concat_1506" [id=1650, type=Concat]; +"1651 Reshape_1507" [id=1651, type=Reshape]; +"1652 Add_1508" [id=1652, type=Add]; +"1653 ReduceMean_1509" [id=1653, type=ReduceMean]; +"1654 Sub_1510" [id=1654, type=Sub]; +"1655 Constant_1511" [id=1655, type=Constant]; +"1656 Pow_1512" [id=1656, type=Pow]; +"1657 ReduceMean_1513" [id=1657, type=ReduceMean]; +"1658 Constant_1514" [id=1658, type=Constant]; +"1659 Add_1515" [id=1659, type=Add]; +"1660 Sqrt_1516" [id=1660, type=Sqrt]; +"1661 Div_1517" [id=1661, type=Div]; +"1662 Mul_1518" [id=1662, type=Mul]; +"1663 Add_1519" [id=1663, type=Add]; +"1664 QuantizeLinear_1898_1" [id=1664, type=QuantizeLinear]; +"1665 DequantizeLinear_1898_1" [id=1665, type=DequantizeLinear]; +"1666 Shape_1520" [id=1666, type=Shape]; +"1667 Constant_1521" [id=1667, type=Constant]; +"1668 Gather_1522" [id=1668, type=Gather]; +"1669 Shape_1523" [id=1669, type=Shape]; +"1670 Constant_1524" [id=1670, type=Constant]; +"1671 Gather_1525" [id=1671, type=Gather]; +"1672 Shape_1526" [id=1672, type=Shape]; +"1673 Constant_1527" [id=1673, type=Constant]; +"1674 Gather_1528" [id=1674, type=Gather]; +"1675 Unsqueeze_1529" [id=1675, type=Unsqueeze]; +"1676 Concat_1530" [id=1676, type=Concat]; +"1677 Reshape_1531" [id=1677, type=Reshape]; +"1678 QuantizeLinear_h.7.attn.c_attn.weight_1" [id=1678, type=QuantizeLinear]; +"1679 DequantizeLinear_h.7.attn.c_attn.weight_1" [id=1679, type=DequantizeLinear]; +"1680 Gemm_1532" [id=1680, type=Gemm]; +"1681 Unsqueeze_1533" [id=1681, type=Unsqueeze]; +"1682 Unsqueeze_1534" [id=1682, type=Unsqueeze]; +"1683 Concat_1535" [id=1683, type=Concat]; +"1684 Reshape_1536" [id=1684, type=Reshape]; +"1685 Split_1537" [id=1685, type=Split]; +"1686 QuantizeLinear_query.15_1" [id=1686, type=QuantizeLinear]; +"1687 DequantizeLinear_query.15_1" [id=1687, type=DequantizeLinear]; +"1688 Shape_1538" [id=1688, type=Shape]; +"1689 Constant_1539" [id=1689, type=Constant]; +"1690 Gather_1540" [id=1690, type=Gather]; +"1691 Shape_1541" [id=1691, type=Shape]; +"1692 Constant_1542" [id=1692, type=Constant]; +"1693 Gather_1543" [id=1693, type=Gather]; +"1694 Shape_1544" [id=1694, type=Shape]; +"1695 Constant_1545" [id=1695, type=Constant]; +"1696 Gather_1546" [id=1696, type=Gather]; +"1697 Constant_1547" [id=1697, type=Constant]; +"1698 Div_1548" [id=1698, type=Div]; +"1699 Cast_1549" [id=1699, type=Cast]; +"1700 Cast_1550" [id=1700, type=Cast]; +"1701 Unsqueeze_1551" [id=1701, type=Unsqueeze]; +"1702 Unsqueeze_1552" [id=1702, type=Unsqueeze]; +"1703 Unsqueeze_1553" [id=1703, type=Unsqueeze]; +"1704 Concat_1554" [id=1704, type=Concat]; +"1705 Reshape_1555" [id=1705, type=Reshape]; +"1706 Transpose_1556" [id=1706, type=Transpose]; +"1707 Shape_1557" [id=1707, type=Shape]; +"1708 Constant_1558" [id=1708, type=Constant]; +"1709 Gather_1559" [id=1709, type=Gather]; +"1710 Shape_1560" [id=1710, type=Shape]; +"1711 Constant_1561" [id=1711, type=Constant]; +"1712 Gather_1562" [id=1712, type=Gather]; +"1713 Shape_1563" [id=1713, type=Shape]; +"1714 Constant_1564" [id=1714, type=Constant]; +"1715 Gather_1565" [id=1715, type=Gather]; +"1716 Constant_1566" [id=1716, type=Constant]; +"1717 Div_1567" [id=1717, type=Div]; +"1718 Cast_1568" [id=1718, type=Cast]; +"1719 Cast_1569" [id=1719, type=Cast]; +"1720 Unsqueeze_1570" [id=1720, type=Unsqueeze]; +"1721 Unsqueeze_1571" [id=1721, type=Unsqueeze]; +"1722 Unsqueeze_1572" [id=1722, type=Unsqueeze]; +"1723 Concat_1573" [id=1723, type=Concat]; +"1724 Reshape_1574" [id=1724, type=Reshape]; +"1725 QuantizeLinear_1963_1" [id=1725, type=QuantizeLinear]; +"1726 DequantizeLinear_1963_1" [id=1726, type=DequantizeLinear]; +"1727 Transpose_1575" [id=1727, type=Transpose]; +"1728 Shape_1576" [id=1728, type=Shape]; +"1729 Constant_1577" [id=1729, type=Constant]; +"1730 Gather_1578" [id=1730, type=Gather]; +"1731 Shape_1579" [id=1731, type=Shape]; +"1732 Constant_1580" [id=1732, type=Constant]; +"1733 Gather_1581" [id=1733, type=Gather]; +"1734 Shape_1582" [id=1734, type=Shape]; +"1735 Constant_1583" [id=1735, type=Constant]; +"1736 Gather_1584" [id=1736, type=Gather]; +"1737 Constant_1585" [id=1737, type=Constant]; +"1738 Div_1586" [id=1738, type=Div]; +"1739 Cast_1587" [id=1739, type=Cast]; +"1740 Cast_1588" [id=1740, type=Cast]; +"1741 Unsqueeze_1589" [id=1741, type=Unsqueeze]; +"1742 Unsqueeze_1590" [id=1742, type=Unsqueeze]; +"1743 Unsqueeze_1591" [id=1743, type=Unsqueeze]; +"1744 Concat_1592" [id=1744, type=Concat]; +"1745 Reshape_1593" [id=1745, type=Reshape]; +"1746 Transpose_1594" [id=1746, type=Transpose]; +"1747 Transpose_1595" [id=1747, type=Transpose]; +"1748 Unsqueeze_1596" [id=1748, type=Unsqueeze]; +"1749 Unsqueeze_1597" [id=1749, type=Unsqueeze]; +"1750 Concat_1598" [id=1750, type=Concat]; +"1751 MatMul_1599" [id=1751, type=MatMul]; +"1752 Constant_1600" [id=1752, type=Constant]; +"1753 Div_1601" [id=1753, type=Div]; +"1754 Shape_1602" [id=1754, type=Shape]; +"1755 Constant_1603" [id=1755, type=Constant]; +"1756 Gather_1604" [id=1756, type=Gather]; +"1757 Shape_1605" [id=1757, type=Shape]; +"1758 Constant_1606" [id=1758, type=Constant]; +"1759 Gather_1607" [id=1759, type=Gather]; +"1760 Sub_1608" [id=1760, type=Sub]; +"1761 Unsqueeze_1609" [id=1761, type=Unsqueeze]; +"1762 Unsqueeze_1610" [id=1762, type=Unsqueeze]; +"1763 Constant_1611" [id=1763, type=Constant]; +"1764 Slice_1612" [id=1764, type=Slice]; +"1765 Unsqueeze_1613" [id=1765, type=Unsqueeze]; +"1766 Constant_1614" [id=1766, type=Constant]; +"1767 Slice_1615" [id=1767, type=Slice]; +"1768 Mul_1616" [id=1768, type=Mul]; +"1769 Constant_1617" [id=1769, type=Constant]; +"1770 Sub_1618" [id=1770, type=Sub]; +"1771 Constant_1619" [id=1771, type=Constant]; +"1772 Mul_1620" [id=1772, type=Mul]; +"1773 Sub_1621" [id=1773, type=Sub]; +"1774 Softmax_1622" [id=1774, type=Softmax]; +"1775 MatMul_1623" [id=1775, type=MatMul]; +"1776 QuantizeLinear_2020_1" [id=1776, type=QuantizeLinear]; +"1777 DequantizeLinear_2020_1" [id=1777, type=DequantizeLinear]; +"1778 Transpose_1624" [id=1778, type=Transpose]; +"1779 Shape_1625" [id=1779, type=Shape]; +"1780 Constant_1626" [id=1780, type=Constant]; +"1781 Gather_1627" [id=1781, type=Gather]; +"1782 Shape_1628" [id=1782, type=Shape]; +"1783 Constant_1629" [id=1783, type=Constant]; +"1784 Gather_1630" [id=1784, type=Gather]; +"1785 Shape_1631" [id=1785, type=Shape]; +"1786 Constant_1632" [id=1786, type=Constant]; +"1787 Gather_1633" [id=1787, type=Gather]; +"1788 Shape_1634" [id=1788, type=Shape]; +"1789 Constant_1635" [id=1789, type=Constant]; +"1790 Gather_1636" [id=1790, type=Gather]; +"1791 Mul_1637" [id=1791, type=Mul]; +"1792 Unsqueeze_1638" [id=1792, type=Unsqueeze]; +"1793 Unsqueeze_1639" [id=1793, type=Unsqueeze]; +"1794 Unsqueeze_1640" [id=1794, type=Unsqueeze]; +"1795 Concat_1641" [id=1795, type=Concat]; +"1796 Reshape_1642" [id=1796, type=Reshape]; +"1797 Shape_1643" [id=1797, type=Shape]; +"1798 Constant_1644" [id=1798, type=Constant]; +"1799 Gather_1645" [id=1799, type=Gather]; +"1800 Shape_1646" [id=1800, type=Shape]; +"1801 Constant_1647" [id=1801, type=Constant]; +"1802 Gather_1648" [id=1802, type=Gather]; +"1803 Shape_1649" [id=1803, type=Shape]; +"1804 Constant_1650" [id=1804, type=Constant]; +"1805 Gather_1651" [id=1805, type=Gather]; +"1806 Unsqueeze_1652" [id=1806, type=Unsqueeze]; +"1807 Concat_1653" [id=1807, type=Concat]; +"1808 Reshape_1654" [id=1808, type=Reshape]; +"1809 QuantizeLinear_h.7.attn.c_proj.weight_1" [id=1809, type=QuantizeLinear]; +"1810 DequantizeLinear_h.7.attn.c_proj.weight_1" [id=1810, type=DequantizeLinear]; +"1811 Gemm_1655" [id=1811, type=Gemm]; +"1812 Unsqueeze_1656" [id=1812, type=Unsqueeze]; +"1813 Unsqueeze_1657" [id=1813, type=Unsqueeze]; +"1814 Concat_1658" [id=1814, type=Concat]; +"1815 Reshape_1659" [id=1815, type=Reshape]; +"1816 Add_1660" [id=1816, type=Add]; +"1817 ReduceMean_1661" [id=1817, type=ReduceMean]; +"1818 Sub_1662" [id=1818, type=Sub]; +"1819 Constant_1663" [id=1819, type=Constant]; +"1820 Pow_1664" [id=1820, type=Pow]; +"1821 ReduceMean_1665" [id=1821, type=ReduceMean]; +"1822 Constant_1666" [id=1822, type=Constant]; +"1823 Add_1667" [id=1823, type=Add]; +"1824 Sqrt_1668" [id=1824, type=Sqrt]; +"1825 Div_1669" [id=1825, type=Div]; +"1826 Mul_1670" [id=1826, type=Mul]; +"1827 Add_1671" [id=1827, type=Add]; +"1828 QuantizeLinear_2072_1" [id=1828, type=QuantizeLinear]; +"1829 DequantizeLinear_2072_1" [id=1829, type=DequantizeLinear]; +"1830 Shape_1672" [id=1830, type=Shape]; +"1831 Constant_1673" [id=1831, type=Constant]; +"1832 Gather_1674" [id=1832, type=Gather]; +"1833 Shape_1675" [id=1833, type=Shape]; +"1834 Constant_1676" [id=1834, type=Constant]; +"1835 Gather_1677" [id=1835, type=Gather]; +"1836 Shape_1678" [id=1836, type=Shape]; +"1837 Constant_1679" [id=1837, type=Constant]; +"1838 Gather_1680" [id=1838, type=Gather]; +"1839 Unsqueeze_1681" [id=1839, type=Unsqueeze]; +"1840 Concat_1682" [id=1840, type=Concat]; +"1841 Reshape_1683" [id=1841, type=Reshape]; +"1842 QuantizeLinear_h.7.mlp.c_fc.weight_1" [id=1842, type=QuantizeLinear]; +"1843 DequantizeLinear_h.7.mlp.c_fc.weight_1" [id=1843, type=DequantizeLinear]; +"1844 Gemm_1684" [id=1844, type=Gemm]; +"1845 Unsqueeze_1685" [id=1845, type=Unsqueeze]; +"1846 Unsqueeze_1686" [id=1846, type=Unsqueeze]; +"1847 Concat_1687" [id=1847, type=Concat]; +"1848 Reshape_1688" [id=1848, type=Reshape]; +"1849 Constant_1689" [id=1849, type=Constant]; +"1850 Mul_1690" [id=1850, type=Mul]; +"1851 Constant_1691" [id=1851, type=Constant]; +"1852 Pow_1692" [id=1852, type=Pow]; +"1853 Constant_1693" [id=1853, type=Constant]; +"1854 Mul_1694" [id=1854, type=Mul]; +"1855 Add_1695" [id=1855, type=Add]; +"1856 Constant_1696" [id=1856, type=Constant]; +"1857 Mul_1697" [id=1857, type=Mul]; +"1858 Tanh_1698" [id=1858, type=Tanh]; +"1859 Constant_1699" [id=1859, type=Constant]; +"1860 Add_1700" [id=1860, type=Add]; +"1861 Mul_1701" [id=1861, type=Mul]; +"1862 QuantizeLinear_2106_1" [id=1862, type=QuantizeLinear]; +"1863 DequantizeLinear_2106_1" [id=1863, type=DequantizeLinear]; +"1864 Shape_1702" [id=1864, type=Shape]; +"1865 Constant_1703" [id=1865, type=Constant]; +"1866 Gather_1704" [id=1866, type=Gather]; +"1867 Shape_1705" [id=1867, type=Shape]; +"1868 Constant_1706" [id=1868, type=Constant]; +"1869 Gather_1707" [id=1869, type=Gather]; +"1870 Shape_1708" [id=1870, type=Shape]; +"1871 Constant_1709" [id=1871, type=Constant]; +"1872 Gather_1710" [id=1872, type=Gather]; +"1873 Unsqueeze_1711" [id=1873, type=Unsqueeze]; +"1874 Concat_1712" [id=1874, type=Concat]; +"1875 Reshape_1713" [id=1875, type=Reshape]; +"1876 QuantizeLinear_h.7.mlp.c_proj.weight_1" [id=1876, type=QuantizeLinear]; +"1877 DequantizeLinear_h.7.mlp.c_proj.weight_1" [id=1877, type=DequantizeLinear]; +"1878 Gemm_1714" [id=1878, type=Gemm]; +"1879 Unsqueeze_1715" [id=1879, type=Unsqueeze]; +"1880 Unsqueeze_1716" [id=1880, type=Unsqueeze]; +"1881 Concat_1717" [id=1881, type=Concat]; +"1882 Reshape_1718" [id=1882, type=Reshape]; +"1883 Add_1719" [id=1883, type=Add]; +"1884 ReduceMean_1720" [id=1884, type=ReduceMean]; +"1885 Sub_1721" [id=1885, type=Sub]; +"1886 Constant_1722" [id=1886, type=Constant]; +"1887 Pow_1723" [id=1887, type=Pow]; +"1888 ReduceMean_1724" [id=1888, type=ReduceMean]; +"1889 Constant_1725" [id=1889, type=Constant]; +"1890 Add_1726" [id=1890, type=Add]; +"1891 Sqrt_1727" [id=1891, type=Sqrt]; +"1892 Div_1728" [id=1892, type=Div]; +"1893 Mul_1729" [id=1893, type=Mul]; +"1894 Add_1730" [id=1894, type=Add]; +"1895 QuantizeLinear_2139_1" [id=1895, type=QuantizeLinear]; +"1896 DequantizeLinear_2139_1" [id=1896, type=DequantizeLinear]; +"1897 Shape_1731" [id=1897, type=Shape]; +"1898 Constant_1732" [id=1898, type=Constant]; +"1899 Gather_1733" [id=1899, type=Gather]; +"1900 Shape_1734" [id=1900, type=Shape]; +"1901 Constant_1735" [id=1901, type=Constant]; +"1902 Gather_1736" [id=1902, type=Gather]; +"1903 Shape_1737" [id=1903, type=Shape]; +"1904 Constant_1738" [id=1904, type=Constant]; +"1905 Gather_1739" [id=1905, type=Gather]; +"1906 Unsqueeze_1740" [id=1906, type=Unsqueeze]; +"1907 Concat_1741" [id=1907, type=Concat]; +"1908 Reshape_1742" [id=1908, type=Reshape]; +"1909 QuantizeLinear_h.8.attn.c_attn.weight_1" [id=1909, type=QuantizeLinear]; +"1910 DequantizeLinear_h.8.attn.c_attn.weight_1" [id=1910, type=DequantizeLinear]; +"1911 Gemm_1743" [id=1911, type=Gemm]; +"1912 Unsqueeze_1744" [id=1912, type=Unsqueeze]; +"1913 Unsqueeze_1745" [id=1913, type=Unsqueeze]; +"1914 Concat_1746" [id=1914, type=Concat]; +"1915 Reshape_1747" [id=1915, type=Reshape]; +"1916 Split_1748" [id=1916, type=Split]; +"1917 QuantizeLinear_query.17_1" [id=1917, type=QuantizeLinear]; +"1918 DequantizeLinear_query.17_1" [id=1918, type=DequantizeLinear]; +"1919 Shape_1749" [id=1919, type=Shape]; +"1920 Constant_1750" [id=1920, type=Constant]; +"1921 Gather_1751" [id=1921, type=Gather]; +"1922 Shape_1752" [id=1922, type=Shape]; +"1923 Constant_1753" [id=1923, type=Constant]; +"1924 Gather_1754" [id=1924, type=Gather]; +"1925 Shape_1755" [id=1925, type=Shape]; +"1926 Constant_1756" [id=1926, type=Constant]; +"1927 Gather_1757" [id=1927, type=Gather]; +"1928 Constant_1758" [id=1928, type=Constant]; +"1929 Div_1759" [id=1929, type=Div]; +"1930 Cast_1760" [id=1930, type=Cast]; +"1931 Cast_1761" [id=1931, type=Cast]; +"1932 Unsqueeze_1762" [id=1932, type=Unsqueeze]; +"1933 Unsqueeze_1763" [id=1933, type=Unsqueeze]; +"1934 Unsqueeze_1764" [id=1934, type=Unsqueeze]; +"1935 Concat_1765" [id=1935, type=Concat]; +"1936 Reshape_1766" [id=1936, type=Reshape]; +"1937 Transpose_1767" [id=1937, type=Transpose]; +"1938 Shape_1768" [id=1938, type=Shape]; +"1939 Constant_1769" [id=1939, type=Constant]; +"1940 Gather_1770" [id=1940, type=Gather]; +"1941 Shape_1771" [id=1941, type=Shape]; +"1942 Constant_1772" [id=1942, type=Constant]; +"1943 Gather_1773" [id=1943, type=Gather]; +"1944 Shape_1774" [id=1944, type=Shape]; +"1945 Constant_1775" [id=1945, type=Constant]; +"1946 Gather_1776" [id=1946, type=Gather]; +"1947 Constant_1777" [id=1947, type=Constant]; +"1948 Div_1778" [id=1948, type=Div]; +"1949 Cast_1779" [id=1949, type=Cast]; +"1950 Cast_1780" [id=1950, type=Cast]; +"1951 Unsqueeze_1781" [id=1951, type=Unsqueeze]; +"1952 Unsqueeze_1782" [id=1952, type=Unsqueeze]; +"1953 Unsqueeze_1783" [id=1953, type=Unsqueeze]; +"1954 Concat_1784" [id=1954, type=Concat]; +"1955 Reshape_1785" [id=1955, type=Reshape]; +"1956 QuantizeLinear_2204_1" [id=1956, type=QuantizeLinear]; +"1957 DequantizeLinear_2204_1" [id=1957, type=DequantizeLinear]; +"1958 Transpose_1786" [id=1958, type=Transpose]; +"1959 Shape_1787" [id=1959, type=Shape]; +"1960 Constant_1788" [id=1960, type=Constant]; +"1961 Gather_1789" [id=1961, type=Gather]; +"1962 Shape_1790" [id=1962, type=Shape]; +"1963 Constant_1791" [id=1963, type=Constant]; +"1964 Gather_1792" [id=1964, type=Gather]; +"1965 Shape_1793" [id=1965, type=Shape]; +"1966 Constant_1794" [id=1966, type=Constant]; +"1967 Gather_1795" [id=1967, type=Gather]; +"1968 Constant_1796" [id=1968, type=Constant]; +"1969 Div_1797" [id=1969, type=Div]; +"1970 Cast_1798" [id=1970, type=Cast]; +"1971 Cast_1799" [id=1971, type=Cast]; +"1972 Unsqueeze_1800" [id=1972, type=Unsqueeze]; +"1973 Unsqueeze_1801" [id=1973, type=Unsqueeze]; +"1974 Unsqueeze_1802" [id=1974, type=Unsqueeze]; +"1975 Concat_1803" [id=1975, type=Concat]; +"1976 Reshape_1804" [id=1976, type=Reshape]; +"1977 Transpose_1805" [id=1977, type=Transpose]; +"1978 Transpose_1806" [id=1978, type=Transpose]; +"1979 Unsqueeze_1807" [id=1979, type=Unsqueeze]; +"1980 Unsqueeze_1808" [id=1980, type=Unsqueeze]; +"1981 Concat_1809" [id=1981, type=Concat]; +"1982 MatMul_1810" [id=1982, type=MatMul]; +"1983 Constant_1811" [id=1983, type=Constant]; +"1984 Div_1812" [id=1984, type=Div]; +"1985 Shape_1813" [id=1985, type=Shape]; +"1986 Constant_1814" [id=1986, type=Constant]; +"1987 Gather_1815" [id=1987, type=Gather]; +"1988 Shape_1816" [id=1988, type=Shape]; +"1989 Constant_1817" [id=1989, type=Constant]; +"1990 Gather_1818" [id=1990, type=Gather]; +"1991 Sub_1819" [id=1991, type=Sub]; +"1992 Unsqueeze_1820" [id=1992, type=Unsqueeze]; +"1993 Unsqueeze_1821" [id=1993, type=Unsqueeze]; +"1994 Constant_1822" [id=1994, type=Constant]; +"1995 Slice_1823" [id=1995, type=Slice]; +"1996 Unsqueeze_1824" [id=1996, type=Unsqueeze]; +"1997 Constant_1825" [id=1997, type=Constant]; +"1998 Slice_1826" [id=1998, type=Slice]; +"1999 Mul_1827" [id=1999, type=Mul]; +"2000 Constant_1828" [id=2000, type=Constant]; +"2001 Sub_1829" [id=2001, type=Sub]; +"2002 Constant_1830" [id=2002, type=Constant]; +"2003 Mul_1831" [id=2003, type=Mul]; +"2004 Sub_1832" [id=2004, type=Sub]; +"2005 Softmax_1833" [id=2005, type=Softmax]; +"2006 MatMul_1834" [id=2006, type=MatMul]; +"2007 QuantizeLinear_2261_1" [id=2007, type=QuantizeLinear]; +"2008 DequantizeLinear_2261_1" [id=2008, type=DequantizeLinear]; +"2009 Transpose_1835" [id=2009, type=Transpose]; +"2010 Shape_1836" [id=2010, type=Shape]; +"2011 Constant_1837" [id=2011, type=Constant]; +"2012 Gather_1838" [id=2012, type=Gather]; +"2013 Shape_1839" [id=2013, type=Shape]; +"2014 Constant_1840" [id=2014, type=Constant]; +"2015 Gather_1841" [id=2015, type=Gather]; +"2016 Shape_1842" [id=2016, type=Shape]; +"2017 Constant_1843" [id=2017, type=Constant]; +"2018 Gather_1844" [id=2018, type=Gather]; +"2019 Shape_1845" [id=2019, type=Shape]; +"2020 Constant_1846" [id=2020, type=Constant]; +"2021 Gather_1847" [id=2021, type=Gather]; +"2022 Mul_1848" [id=2022, type=Mul]; +"2023 Unsqueeze_1849" [id=2023, type=Unsqueeze]; +"2024 Unsqueeze_1850" [id=2024, type=Unsqueeze]; +"2025 Unsqueeze_1851" [id=2025, type=Unsqueeze]; +"2026 Concat_1852" [id=2026, type=Concat]; +"2027 Reshape_1853" [id=2027, type=Reshape]; +"2028 Shape_1854" [id=2028, type=Shape]; +"2029 Constant_1855" [id=2029, type=Constant]; +"2030 Gather_1856" [id=2030, type=Gather]; +"2031 Shape_1857" [id=2031, type=Shape]; +"2032 Constant_1858" [id=2032, type=Constant]; +"2033 Gather_1859" [id=2033, type=Gather]; +"2034 Shape_1860" [id=2034, type=Shape]; +"2035 Constant_1861" [id=2035, type=Constant]; +"2036 Gather_1862" [id=2036, type=Gather]; +"2037 Unsqueeze_1863" [id=2037, type=Unsqueeze]; +"2038 Concat_1864" [id=2038, type=Concat]; +"2039 Reshape_1865" [id=2039, type=Reshape]; +"2040 QuantizeLinear_h.8.attn.c_proj.weight_1" [id=2040, type=QuantizeLinear]; +"2041 DequantizeLinear_h.8.attn.c_proj.weight_1" [id=2041, type=DequantizeLinear]; +"2042 Gemm_1866" [id=2042, type=Gemm]; +"2043 Unsqueeze_1867" [id=2043, type=Unsqueeze]; +"2044 Unsqueeze_1868" [id=2044, type=Unsqueeze]; +"2045 Concat_1869" [id=2045, type=Concat]; +"2046 Reshape_1870" [id=2046, type=Reshape]; +"2047 Add_1871" [id=2047, type=Add]; +"2048 ReduceMean_1872" [id=2048, type=ReduceMean]; +"2049 Sub_1873" [id=2049, type=Sub]; +"2050 Constant_1874" [id=2050, type=Constant]; +"2051 Pow_1875" [id=2051, type=Pow]; +"2052 ReduceMean_1876" [id=2052, type=ReduceMean]; +"2053 Constant_1877" [id=2053, type=Constant]; +"2054 Add_1878" [id=2054, type=Add]; +"2055 Sqrt_1879" [id=2055, type=Sqrt]; +"2056 Div_1880" [id=2056, type=Div]; +"2057 Mul_1881" [id=2057, type=Mul]; +"2058 Add_1882" [id=2058, type=Add]; +"2059 QuantizeLinear_2313_1" [id=2059, type=QuantizeLinear]; +"2060 DequantizeLinear_2313_1" [id=2060, type=DequantizeLinear]; +"2061 Shape_1883" [id=2061, type=Shape]; +"2062 Constant_1884" [id=2062, type=Constant]; +"2063 Gather_1885" [id=2063, type=Gather]; +"2064 Shape_1886" [id=2064, type=Shape]; +"2065 Constant_1887" [id=2065, type=Constant]; +"2066 Gather_1888" [id=2066, type=Gather]; +"2067 Shape_1889" [id=2067, type=Shape]; +"2068 Constant_1890" [id=2068, type=Constant]; +"2069 Gather_1891" [id=2069, type=Gather]; +"2070 Unsqueeze_1892" [id=2070, type=Unsqueeze]; +"2071 Concat_1893" [id=2071, type=Concat]; +"2072 Reshape_1894" [id=2072, type=Reshape]; +"2073 QuantizeLinear_h.8.mlp.c_fc.weight_1" [id=2073, type=QuantizeLinear]; +"2074 DequantizeLinear_h.8.mlp.c_fc.weight_1" [id=2074, type=DequantizeLinear]; +"2075 Gemm_1895" [id=2075, type=Gemm]; +"2076 Unsqueeze_1896" [id=2076, type=Unsqueeze]; +"2077 Unsqueeze_1897" [id=2077, type=Unsqueeze]; +"2078 Concat_1898" [id=2078, type=Concat]; +"2079 Reshape_1899" [id=2079, type=Reshape]; +"2080 Constant_1900" [id=2080, type=Constant]; +"2081 Mul_1901" [id=2081, type=Mul]; +"2082 Constant_1902" [id=2082, type=Constant]; +"2083 Pow_1903" [id=2083, type=Pow]; +"2084 Constant_1904" [id=2084, type=Constant]; +"2085 Mul_1905" [id=2085, type=Mul]; +"2086 Add_1906" [id=2086, type=Add]; +"2087 Constant_1907" [id=2087, type=Constant]; +"2088 Mul_1908" [id=2088, type=Mul]; +"2089 Tanh_1909" [id=2089, type=Tanh]; +"2090 Constant_1910" [id=2090, type=Constant]; +"2091 Add_1911" [id=2091, type=Add]; +"2092 Mul_1912" [id=2092, type=Mul]; +"2093 QuantizeLinear_2347_1" [id=2093, type=QuantizeLinear]; +"2094 DequantizeLinear_2347_1" [id=2094, type=DequantizeLinear]; +"2095 Shape_1913" [id=2095, type=Shape]; +"2096 Constant_1914" [id=2096, type=Constant]; +"2097 Gather_1915" [id=2097, type=Gather]; +"2098 Shape_1916" [id=2098, type=Shape]; +"2099 Constant_1917" [id=2099, type=Constant]; +"2100 Gather_1918" [id=2100, type=Gather]; +"2101 Shape_1919" [id=2101, type=Shape]; +"2102 Constant_1920" [id=2102, type=Constant]; +"2103 Gather_1921" [id=2103, type=Gather]; +"2104 Unsqueeze_1922" [id=2104, type=Unsqueeze]; +"2105 Concat_1923" [id=2105, type=Concat]; +"2106 Reshape_1924" [id=2106, type=Reshape]; +"2107 QuantizeLinear_h.8.mlp.c_proj.weight_1" [id=2107, type=QuantizeLinear]; +"2108 DequantizeLinear_h.8.mlp.c_proj.weight_1" [id=2108, type=DequantizeLinear]; +"2109 Gemm_1925" [id=2109, type=Gemm]; +"2110 Unsqueeze_1926" [id=2110, type=Unsqueeze]; +"2111 Unsqueeze_1927" [id=2111, type=Unsqueeze]; +"2112 Concat_1928" [id=2112, type=Concat]; +"2113 Reshape_1929" [id=2113, type=Reshape]; +"2114 Add_1930" [id=2114, type=Add]; +"2115 ReduceMean_1931" [id=2115, type=ReduceMean]; +"2116 Sub_1932" [id=2116, type=Sub]; +"2117 Constant_1933" [id=2117, type=Constant]; +"2118 Pow_1934" [id=2118, type=Pow]; +"2119 ReduceMean_1935" [id=2119, type=ReduceMean]; +"2120 Constant_1936" [id=2120, type=Constant]; +"2121 Add_1937" [id=2121, type=Add]; +"2122 Sqrt_1938" [id=2122, type=Sqrt]; +"2123 Div_1939" [id=2123, type=Div]; +"2124 Mul_1940" [id=2124, type=Mul]; +"2125 Add_1941" [id=2125, type=Add]; +"2126 QuantizeLinear_2380_1" [id=2126, type=QuantizeLinear]; +"2127 DequantizeLinear_2380_1" [id=2127, type=DequantizeLinear]; +"2128 Shape_1942" [id=2128, type=Shape]; +"2129 Constant_1943" [id=2129, type=Constant]; +"2130 Gather_1944" [id=2130, type=Gather]; +"2131 Shape_1945" [id=2131, type=Shape]; +"2132 Constant_1946" [id=2132, type=Constant]; +"2133 Gather_1947" [id=2133, type=Gather]; +"2134 Shape_1948" [id=2134, type=Shape]; +"2135 Constant_1949" [id=2135, type=Constant]; +"2136 Gather_1950" [id=2136, type=Gather]; +"2137 Unsqueeze_1951" [id=2137, type=Unsqueeze]; +"2138 Concat_1952" [id=2138, type=Concat]; +"2139 Reshape_1953" [id=2139, type=Reshape]; +"2140 QuantizeLinear_h.9.attn.c_attn.weight_1" [id=2140, type=QuantizeLinear]; +"2141 DequantizeLinear_h.9.attn.c_attn.weight_1" [id=2141, type=DequantizeLinear]; +"2142 Gemm_1954" [id=2142, type=Gemm]; +"2143 Unsqueeze_1955" [id=2143, type=Unsqueeze]; +"2144 Unsqueeze_1956" [id=2144, type=Unsqueeze]; +"2145 Concat_1957" [id=2145, type=Concat]; +"2146 Reshape_1958" [id=2146, type=Reshape]; +"2147 Split_1959" [id=2147, type=Split]; +"2148 QuantizeLinear_query.19_1" [id=2148, type=QuantizeLinear]; +"2149 DequantizeLinear_query.19_1" [id=2149, type=DequantizeLinear]; +"2150 Shape_1960" [id=2150, type=Shape]; +"2151 Constant_1961" [id=2151, type=Constant]; +"2152 Gather_1962" [id=2152, type=Gather]; +"2153 Shape_1963" [id=2153, type=Shape]; +"2154 Constant_1964" [id=2154, type=Constant]; +"2155 Gather_1965" [id=2155, type=Gather]; +"2156 Shape_1966" [id=2156, type=Shape]; +"2157 Constant_1967" [id=2157, type=Constant]; +"2158 Gather_1968" [id=2158, type=Gather]; +"2159 Constant_1969" [id=2159, type=Constant]; +"2160 Div_1970" [id=2160, type=Div]; +"2161 Cast_1971" [id=2161, type=Cast]; +"2162 Cast_1972" [id=2162, type=Cast]; +"2163 Unsqueeze_1973" [id=2163, type=Unsqueeze]; +"2164 Unsqueeze_1974" [id=2164, type=Unsqueeze]; +"2165 Unsqueeze_1975" [id=2165, type=Unsqueeze]; +"2166 Concat_1976" [id=2166, type=Concat]; +"2167 Reshape_1977" [id=2167, type=Reshape]; +"2168 Transpose_1978" [id=2168, type=Transpose]; +"2169 Shape_1979" [id=2169, type=Shape]; +"2170 Constant_1980" [id=2170, type=Constant]; +"2171 Gather_1981" [id=2171, type=Gather]; +"2172 Shape_1982" [id=2172, type=Shape]; +"2173 Constant_1983" [id=2173, type=Constant]; +"2174 Gather_1984" [id=2174, type=Gather]; +"2175 Shape_1985" [id=2175, type=Shape]; +"2176 Constant_1986" [id=2176, type=Constant]; +"2177 Gather_1987" [id=2177, type=Gather]; +"2178 Constant_1988" [id=2178, type=Constant]; +"2179 Div_1989" [id=2179, type=Div]; +"2180 Cast_1990" [id=2180, type=Cast]; +"2181 Cast_1991" [id=2181, type=Cast]; +"2182 Unsqueeze_1992" [id=2182, type=Unsqueeze]; +"2183 Unsqueeze_1993" [id=2183, type=Unsqueeze]; +"2184 Unsqueeze_1994" [id=2184, type=Unsqueeze]; +"2185 Concat_1995" [id=2185, type=Concat]; +"2186 Reshape_1996" [id=2186, type=Reshape]; +"2187 QuantizeLinear_2445_1" [id=2187, type=QuantizeLinear]; +"2188 DequantizeLinear_2445_1" [id=2188, type=DequantizeLinear]; +"2189 Transpose_1997" [id=2189, type=Transpose]; +"2190 Shape_1998" [id=2190, type=Shape]; +"2191 Constant_1999" [id=2191, type=Constant]; +"2192 Gather_2000" [id=2192, type=Gather]; +"2193 Shape_2001" [id=2193, type=Shape]; +"2194 Constant_2002" [id=2194, type=Constant]; +"2195 Gather_2003" [id=2195, type=Gather]; +"2196 Shape_2004" [id=2196, type=Shape]; +"2197 Constant_2005" [id=2197, type=Constant]; +"2198 Gather_2006" [id=2198, type=Gather]; +"2199 Constant_2007" [id=2199, type=Constant]; +"2200 Div_2008" [id=2200, type=Div]; +"2201 Cast_2009" [id=2201, type=Cast]; +"2202 Cast_2010" [id=2202, type=Cast]; +"2203 Unsqueeze_2011" [id=2203, type=Unsqueeze]; +"2204 Unsqueeze_2012" [id=2204, type=Unsqueeze]; +"2205 Unsqueeze_2013" [id=2205, type=Unsqueeze]; +"2206 Concat_2014" [id=2206, type=Concat]; +"2207 Reshape_2015" [id=2207, type=Reshape]; +"2208 Transpose_2016" [id=2208, type=Transpose]; +"2209 Transpose_2017" [id=2209, type=Transpose]; +"2210 Unsqueeze_2018" [id=2210, type=Unsqueeze]; +"2211 Unsqueeze_2019" [id=2211, type=Unsqueeze]; +"2212 Concat_2020" [id=2212, type=Concat]; +"2213 MatMul_2021" [id=2213, type=MatMul]; +"2214 Constant_2022" [id=2214, type=Constant]; +"2215 Div_2023" [id=2215, type=Div]; +"2216 Shape_2024" [id=2216, type=Shape]; +"2217 Constant_2025" [id=2217, type=Constant]; +"2218 Gather_2026" [id=2218, type=Gather]; +"2219 Shape_2027" [id=2219, type=Shape]; +"2220 Constant_2028" [id=2220, type=Constant]; +"2221 Gather_2029" [id=2221, type=Gather]; +"2222 Sub_2030" [id=2222, type=Sub]; +"2223 Unsqueeze_2031" [id=2223, type=Unsqueeze]; +"2224 Unsqueeze_2032" [id=2224, type=Unsqueeze]; +"2225 Constant_2033" [id=2225, type=Constant]; +"2226 Slice_2034" [id=2226, type=Slice]; +"2227 Unsqueeze_2035" [id=2227, type=Unsqueeze]; +"2228 Constant_2036" [id=2228, type=Constant]; +"2229 Slice_2037" [id=2229, type=Slice]; +"2230 Mul_2038" [id=2230, type=Mul]; +"2231 Constant_2039" [id=2231, type=Constant]; +"2232 Sub_2040" [id=2232, type=Sub]; +"2233 Constant_2041" [id=2233, type=Constant]; +"2234 Mul_2042" [id=2234, type=Mul]; +"2235 Sub_2043" [id=2235, type=Sub]; +"2236 Softmax_2044" [id=2236, type=Softmax]; +"2237 MatMul_2045" [id=2237, type=MatMul]; +"2238 QuantizeLinear_2502_1" [id=2238, type=QuantizeLinear]; +"2239 DequantizeLinear_2502_1" [id=2239, type=DequantizeLinear]; +"2240 Transpose_2046" [id=2240, type=Transpose]; +"2241 Shape_2047" [id=2241, type=Shape]; +"2242 Constant_2048" [id=2242, type=Constant]; +"2243 Gather_2049" [id=2243, type=Gather]; +"2244 Shape_2050" [id=2244, type=Shape]; +"2245 Constant_2051" [id=2245, type=Constant]; +"2246 Gather_2052" [id=2246, type=Gather]; +"2247 Shape_2053" [id=2247, type=Shape]; +"2248 Constant_2054" [id=2248, type=Constant]; +"2249 Gather_2055" [id=2249, type=Gather]; +"2250 Shape_2056" [id=2250, type=Shape]; +"2251 Constant_2057" [id=2251, type=Constant]; +"2252 Gather_2058" [id=2252, type=Gather]; +"2253 Mul_2059" [id=2253, type=Mul]; +"2254 Unsqueeze_2060" [id=2254, type=Unsqueeze]; +"2255 Unsqueeze_2061" [id=2255, type=Unsqueeze]; +"2256 Unsqueeze_2062" [id=2256, type=Unsqueeze]; +"2257 Concat_2063" [id=2257, type=Concat]; +"2258 Reshape_2064" [id=2258, type=Reshape]; +"2259 Shape_2065" [id=2259, type=Shape]; +"2260 Constant_2066" [id=2260, type=Constant]; +"2261 Gather_2067" [id=2261, type=Gather]; +"2262 Shape_2068" [id=2262, type=Shape]; +"2263 Constant_2069" [id=2263, type=Constant]; +"2264 Gather_2070" [id=2264, type=Gather]; +"2265 Shape_2071" [id=2265, type=Shape]; +"2266 Constant_2072" [id=2266, type=Constant]; +"2267 Gather_2073" [id=2267, type=Gather]; +"2268 Unsqueeze_2074" [id=2268, type=Unsqueeze]; +"2269 Concat_2075" [id=2269, type=Concat]; +"2270 Reshape_2076" [id=2270, type=Reshape]; +"2271 QuantizeLinear_h.9.attn.c_proj.weight_1" [id=2271, type=QuantizeLinear]; +"2272 DequantizeLinear_h.9.attn.c_proj.weight_1" [id=2272, type=DequantizeLinear]; +"2273 Gemm_2077" [id=2273, type=Gemm]; +"2274 Unsqueeze_2078" [id=2274, type=Unsqueeze]; +"2275 Unsqueeze_2079" [id=2275, type=Unsqueeze]; +"2276 Concat_2080" [id=2276, type=Concat]; +"2277 Reshape_2081" [id=2277, type=Reshape]; +"2278 Add_2082" [id=2278, type=Add]; +"2279 ReduceMean_2083" [id=2279, type=ReduceMean]; +"2280 Sub_2084" [id=2280, type=Sub]; +"2281 Constant_2085" [id=2281, type=Constant]; +"2282 Pow_2086" [id=2282, type=Pow]; +"2283 ReduceMean_2087" [id=2283, type=ReduceMean]; +"2284 Constant_2088" [id=2284, type=Constant]; +"2285 Add_2089" [id=2285, type=Add]; +"2286 Sqrt_2090" [id=2286, type=Sqrt]; +"2287 Div_2091" [id=2287, type=Div]; +"2288 Mul_2092" [id=2288, type=Mul]; +"2289 Add_2093" [id=2289, type=Add]; +"2290 QuantizeLinear_2554_1" [id=2290, type=QuantizeLinear]; +"2291 DequantizeLinear_2554_1" [id=2291, type=DequantizeLinear]; +"2292 Shape_2094" [id=2292, type=Shape]; +"2293 Constant_2095" [id=2293, type=Constant]; +"2294 Gather_2096" [id=2294, type=Gather]; +"2295 Shape_2097" [id=2295, type=Shape]; +"2296 Constant_2098" [id=2296, type=Constant]; +"2297 Gather_2099" [id=2297, type=Gather]; +"2298 Shape_2100" [id=2298, type=Shape]; +"2299 Constant_2101" [id=2299, type=Constant]; +"2300 Gather_2102" [id=2300, type=Gather]; +"2301 Unsqueeze_2103" [id=2301, type=Unsqueeze]; +"2302 Concat_2104" [id=2302, type=Concat]; +"2303 Reshape_2105" [id=2303, type=Reshape]; +"2304 QuantizeLinear_h.9.mlp.c_fc.weight_1" [id=2304, type=QuantizeLinear]; +"2305 DequantizeLinear_h.9.mlp.c_fc.weight_1" [id=2305, type=DequantizeLinear]; +"2306 Gemm_2106" [id=2306, type=Gemm]; +"2307 Unsqueeze_2107" [id=2307, type=Unsqueeze]; +"2308 Unsqueeze_2108" [id=2308, type=Unsqueeze]; +"2309 Concat_2109" [id=2309, type=Concat]; +"2310 Reshape_2110" [id=2310, type=Reshape]; +"2311 Constant_2111" [id=2311, type=Constant]; +"2312 Mul_2112" [id=2312, type=Mul]; +"2313 Constant_2113" [id=2313, type=Constant]; +"2314 Pow_2114" [id=2314, type=Pow]; +"2315 Constant_2115" [id=2315, type=Constant]; +"2316 Mul_2116" [id=2316, type=Mul]; +"2317 Add_2117" [id=2317, type=Add]; +"2318 Constant_2118" [id=2318, type=Constant]; +"2319 Mul_2119" [id=2319, type=Mul]; +"2320 Tanh_2120" [id=2320, type=Tanh]; +"2321 Constant_2121" [id=2321, type=Constant]; +"2322 Add_2122" [id=2322, type=Add]; +"2323 Mul_2123" [id=2323, type=Mul]; +"2324 QuantizeLinear_2588_1" [id=2324, type=QuantizeLinear]; +"2325 DequantizeLinear_2588_1" [id=2325, type=DequantizeLinear]; +"2326 Shape_2124" [id=2326, type=Shape]; +"2327 Constant_2125" [id=2327, type=Constant]; +"2328 Gather_2126" [id=2328, type=Gather]; +"2329 Shape_2127" [id=2329, type=Shape]; +"2330 Constant_2128" [id=2330, type=Constant]; +"2331 Gather_2129" [id=2331, type=Gather]; +"2332 Shape_2130" [id=2332, type=Shape]; +"2333 Constant_2131" [id=2333, type=Constant]; +"2334 Gather_2132" [id=2334, type=Gather]; +"2335 Unsqueeze_2133" [id=2335, type=Unsqueeze]; +"2336 Concat_2134" [id=2336, type=Concat]; +"2337 Reshape_2135" [id=2337, type=Reshape]; +"2338 QuantizeLinear_h.9.mlp.c_proj.weight_1" [id=2338, type=QuantizeLinear]; +"2339 DequantizeLinear_h.9.mlp.c_proj.weight_1" [id=2339, type=DequantizeLinear]; +"2340 Gemm_2136" [id=2340, type=Gemm]; +"2341 Unsqueeze_2137" [id=2341, type=Unsqueeze]; +"2342 Unsqueeze_2138" [id=2342, type=Unsqueeze]; +"2343 Concat_2139" [id=2343, type=Concat]; +"2344 Reshape_2140" [id=2344, type=Reshape]; +"2345 Add_2141" [id=2345, type=Add]; +"2346 ReduceMean_2142" [id=2346, type=ReduceMean]; +"2347 Sub_2143" [id=2347, type=Sub]; +"2348 Constant_2144" [id=2348, type=Constant]; +"2349 Pow_2145" [id=2349, type=Pow]; +"2350 ReduceMean_2146" [id=2350, type=ReduceMean]; +"2351 Constant_2147" [id=2351, type=Constant]; +"2352 Add_2148" [id=2352, type=Add]; +"2353 Sqrt_2149" [id=2353, type=Sqrt]; +"2354 Div_2150" [id=2354, type=Div]; +"2355 Mul_2151" [id=2355, type=Mul]; +"2356 Add_2152" [id=2356, type=Add]; +"2357 QuantizeLinear_2621_1" [id=2357, type=QuantizeLinear]; +"2358 DequantizeLinear_2621_1" [id=2358, type=DequantizeLinear]; +"2359 Shape_2153" [id=2359, type=Shape]; +"2360 Constant_2154" [id=2360, type=Constant]; +"2361 Gather_2155" [id=2361, type=Gather]; +"2362 Shape_2156" [id=2362, type=Shape]; +"2363 Constant_2157" [id=2363, type=Constant]; +"2364 Gather_2158" [id=2364, type=Gather]; +"2365 Shape_2159" [id=2365, type=Shape]; +"2366 Constant_2160" [id=2366, type=Constant]; +"2367 Gather_2161" [id=2367, type=Gather]; +"2368 Unsqueeze_2162" [id=2368, type=Unsqueeze]; +"2369 Concat_2163" [id=2369, type=Concat]; +"2370 Reshape_2164" [id=2370, type=Reshape]; +"2371 QuantizeLinear_h.10.attn.c_attn.weight_1" [id=2371, type=QuantizeLinear]; +"2372 DequantizeLinear_h.10.attn.c_attn.weight_1" [id=2372, type=DequantizeLinear]; +"2373 Gemm_2165" [id=2373, type=Gemm]; +"2374 Unsqueeze_2166" [id=2374, type=Unsqueeze]; +"2375 Unsqueeze_2167" [id=2375, type=Unsqueeze]; +"2376 Concat_2168" [id=2376, type=Concat]; +"2377 Reshape_2169" [id=2377, type=Reshape]; +"2378 Split_2170" [id=2378, type=Split]; +"2379 QuantizeLinear_query.21_1" [id=2379, type=QuantizeLinear]; +"2380 DequantizeLinear_query.21_1" [id=2380, type=DequantizeLinear]; +"2381 Shape_2171" [id=2381, type=Shape]; +"2382 Constant_2172" [id=2382, type=Constant]; +"2383 Gather_2173" [id=2383, type=Gather]; +"2384 Shape_2174" [id=2384, type=Shape]; +"2385 Constant_2175" [id=2385, type=Constant]; +"2386 Gather_2176" [id=2386, type=Gather]; +"2387 Shape_2177" [id=2387, type=Shape]; +"2388 Constant_2178" [id=2388, type=Constant]; +"2389 Gather_2179" [id=2389, type=Gather]; +"2390 Constant_2180" [id=2390, type=Constant]; +"2391 Div_2181" [id=2391, type=Div]; +"2392 Cast_2182" [id=2392, type=Cast]; +"2393 Cast_2183" [id=2393, type=Cast]; +"2394 Unsqueeze_2184" [id=2394, type=Unsqueeze]; +"2395 Unsqueeze_2185" [id=2395, type=Unsqueeze]; +"2396 Unsqueeze_2186" [id=2396, type=Unsqueeze]; +"2397 Concat_2187" [id=2397, type=Concat]; +"2398 Reshape_2188" [id=2398, type=Reshape]; +"2399 Transpose_2189" [id=2399, type=Transpose]; +"2400 Shape_2190" [id=2400, type=Shape]; +"2401 Constant_2191" [id=2401, type=Constant]; +"2402 Gather_2192" [id=2402, type=Gather]; +"2403 Shape_2193" [id=2403, type=Shape]; +"2404 Constant_2194" [id=2404, type=Constant]; +"2405 Gather_2195" [id=2405, type=Gather]; +"2406 Shape_2196" [id=2406, type=Shape]; +"2407 Constant_2197" [id=2407, type=Constant]; +"2408 Gather_2198" [id=2408, type=Gather]; +"2409 Constant_2199" [id=2409, type=Constant]; +"2410 Div_2200" [id=2410, type=Div]; +"2411 Cast_2201" [id=2411, type=Cast]; +"2412 Cast_2202" [id=2412, type=Cast]; +"2413 Unsqueeze_2203" [id=2413, type=Unsqueeze]; +"2414 Unsqueeze_2204" [id=2414, type=Unsqueeze]; +"2415 Unsqueeze_2205" [id=2415, type=Unsqueeze]; +"2416 Concat_2206" [id=2416, type=Concat]; +"2417 Reshape_2207" [id=2417, type=Reshape]; +"2418 QuantizeLinear_2686_1" [id=2418, type=QuantizeLinear]; +"2419 DequantizeLinear_2686_1" [id=2419, type=DequantizeLinear]; +"2420 Transpose_2208" [id=2420, type=Transpose]; +"2421 Shape_2209" [id=2421, type=Shape]; +"2422 Constant_2210" [id=2422, type=Constant]; +"2423 Gather_2211" [id=2423, type=Gather]; +"2424 Shape_2212" [id=2424, type=Shape]; +"2425 Constant_2213" [id=2425, type=Constant]; +"2426 Gather_2214" [id=2426, type=Gather]; +"2427 Shape_2215" [id=2427, type=Shape]; +"2428 Constant_2216" [id=2428, type=Constant]; +"2429 Gather_2217" [id=2429, type=Gather]; +"2430 Constant_2218" [id=2430, type=Constant]; +"2431 Div_2219" [id=2431, type=Div]; +"2432 Cast_2220" [id=2432, type=Cast]; +"2433 Cast_2221" [id=2433, type=Cast]; +"2434 Unsqueeze_2222" [id=2434, type=Unsqueeze]; +"2435 Unsqueeze_2223" [id=2435, type=Unsqueeze]; +"2436 Unsqueeze_2224" [id=2436, type=Unsqueeze]; +"2437 Concat_2225" [id=2437, type=Concat]; +"2438 Reshape_2226" [id=2438, type=Reshape]; +"2439 Transpose_2227" [id=2439, type=Transpose]; +"2440 Transpose_2228" [id=2440, type=Transpose]; +"2441 Unsqueeze_2229" [id=2441, type=Unsqueeze]; +"2442 Unsqueeze_2230" [id=2442, type=Unsqueeze]; +"2443 Concat_2231" [id=2443, type=Concat]; +"2444 MatMul_2232" [id=2444, type=MatMul]; +"2445 Constant_2233" [id=2445, type=Constant]; +"2446 Div_2234" [id=2446, type=Div]; +"2447 Shape_2235" [id=2447, type=Shape]; +"2448 Constant_2236" [id=2448, type=Constant]; +"2449 Gather_2237" [id=2449, type=Gather]; +"2450 Shape_2238" [id=2450, type=Shape]; +"2451 Constant_2239" [id=2451, type=Constant]; +"2452 Gather_2240" [id=2452, type=Gather]; +"2453 Sub_2241" [id=2453, type=Sub]; +"2454 Unsqueeze_2242" [id=2454, type=Unsqueeze]; +"2455 Unsqueeze_2243" [id=2455, type=Unsqueeze]; +"2456 Constant_2244" [id=2456, type=Constant]; +"2457 Slice_2245" [id=2457, type=Slice]; +"2458 Unsqueeze_2246" [id=2458, type=Unsqueeze]; +"2459 Constant_2247" [id=2459, type=Constant]; +"2460 Slice_2248" [id=2460, type=Slice]; +"2461 Mul_2249" [id=2461, type=Mul]; +"2462 Constant_2250" [id=2462, type=Constant]; +"2463 Sub_2251" [id=2463, type=Sub]; +"2464 Constant_2252" [id=2464, type=Constant]; +"2465 Mul_2253" [id=2465, type=Mul]; +"2466 Sub_2254" [id=2466, type=Sub]; +"2467 Softmax_2255" [id=2467, type=Softmax]; +"2468 MatMul_2256" [id=2468, type=MatMul]; +"2469 QuantizeLinear_2743_1" [id=2469, type=QuantizeLinear]; +"2470 DequantizeLinear_2743_1" [id=2470, type=DequantizeLinear]; +"2471 Transpose_2257" [id=2471, type=Transpose]; +"2472 Shape_2258" [id=2472, type=Shape]; +"2473 Constant_2259" [id=2473, type=Constant]; +"2474 Gather_2260" [id=2474, type=Gather]; +"2475 Shape_2261" [id=2475, type=Shape]; +"2476 Constant_2262" [id=2476, type=Constant]; +"2477 Gather_2263" [id=2477, type=Gather]; +"2478 Shape_2264" [id=2478, type=Shape]; +"2479 Constant_2265" [id=2479, type=Constant]; +"2480 Gather_2266" [id=2480, type=Gather]; +"2481 Shape_2267" [id=2481, type=Shape]; +"2482 Constant_2268" [id=2482, type=Constant]; +"2483 Gather_2269" [id=2483, type=Gather]; +"2484 Mul_2270" [id=2484, type=Mul]; +"2485 Unsqueeze_2271" [id=2485, type=Unsqueeze]; +"2486 Unsqueeze_2272" [id=2486, type=Unsqueeze]; +"2487 Unsqueeze_2273" [id=2487, type=Unsqueeze]; +"2488 Concat_2274" [id=2488, type=Concat]; +"2489 Reshape_2275" [id=2489, type=Reshape]; +"2490 Shape_2276" [id=2490, type=Shape]; +"2491 Constant_2277" [id=2491, type=Constant]; +"2492 Gather_2278" [id=2492, type=Gather]; +"2493 Shape_2279" [id=2493, type=Shape]; +"2494 Constant_2280" [id=2494, type=Constant]; +"2495 Gather_2281" [id=2495, type=Gather]; +"2496 Shape_2282" [id=2496, type=Shape]; +"2497 Constant_2283" [id=2497, type=Constant]; +"2498 Gather_2284" [id=2498, type=Gather]; +"2499 Unsqueeze_2285" [id=2499, type=Unsqueeze]; +"2500 Concat_2286" [id=2500, type=Concat]; +"2501 Reshape_2287" [id=2501, type=Reshape]; +"2502 QuantizeLinear_h.10.attn.c_proj.weight_1" [id=2502, type=QuantizeLinear]; +"2503 DequantizeLinear_h.10.attn.c_proj.weight_1" [id=2503, type=DequantizeLinear]; +"2504 Gemm_2288" [id=2504, type=Gemm]; +"2505 Unsqueeze_2289" [id=2505, type=Unsqueeze]; +"2506 Unsqueeze_2290" [id=2506, type=Unsqueeze]; +"2507 Concat_2291" [id=2507, type=Concat]; +"2508 Reshape_2292" [id=2508, type=Reshape]; +"2509 Add_2293" [id=2509, type=Add]; +"2510 ReduceMean_2294" [id=2510, type=ReduceMean]; +"2511 Sub_2295" [id=2511, type=Sub]; +"2512 Constant_2296" [id=2512, type=Constant]; +"2513 Pow_2297" [id=2513, type=Pow]; +"2514 ReduceMean_2298" [id=2514, type=ReduceMean]; +"2515 Constant_2299" [id=2515, type=Constant]; +"2516 Add_2300" [id=2516, type=Add]; +"2517 Sqrt_2301" [id=2517, type=Sqrt]; +"2518 Div_2302" [id=2518, type=Div]; +"2519 Mul_2303" [id=2519, type=Mul]; +"2520 Add_2304" [id=2520, type=Add]; +"2521 QuantizeLinear_2795_1" [id=2521, type=QuantizeLinear]; +"2522 DequantizeLinear_2795_1" [id=2522, type=DequantizeLinear]; +"2523 Shape_2305" [id=2523, type=Shape]; +"2524 Constant_2306" [id=2524, type=Constant]; +"2525 Gather_2307" [id=2525, type=Gather]; +"2526 Shape_2308" [id=2526, type=Shape]; +"2527 Constant_2309" [id=2527, type=Constant]; +"2528 Gather_2310" [id=2528, type=Gather]; +"2529 Shape_2311" [id=2529, type=Shape]; +"2530 Constant_2312" [id=2530, type=Constant]; +"2531 Gather_2313" [id=2531, type=Gather]; +"2532 Unsqueeze_2314" [id=2532, type=Unsqueeze]; +"2533 Concat_2315" [id=2533, type=Concat]; +"2534 Reshape_2316" [id=2534, type=Reshape]; +"2535 QuantizeLinear_h.10.mlp.c_fc.weight_1" [id=2535, type=QuantizeLinear]; +"2536 DequantizeLinear_h.10.mlp.c_fc.weight_1" [id=2536, type=DequantizeLinear]; +"2537 Gemm_2317" [id=2537, type=Gemm]; +"2538 Unsqueeze_2318" [id=2538, type=Unsqueeze]; +"2539 Unsqueeze_2319" [id=2539, type=Unsqueeze]; +"2540 Concat_2320" [id=2540, type=Concat]; +"2541 Reshape_2321" [id=2541, type=Reshape]; +"2542 Constant_2322" [id=2542, type=Constant]; +"2543 Mul_2323" [id=2543, type=Mul]; +"2544 Constant_2324" [id=2544, type=Constant]; +"2545 Pow_2325" [id=2545, type=Pow]; +"2546 Constant_2326" [id=2546, type=Constant]; +"2547 Mul_2327" [id=2547, type=Mul]; +"2548 Add_2328" [id=2548, type=Add]; +"2549 Constant_2329" [id=2549, type=Constant]; +"2550 Mul_2330" [id=2550, type=Mul]; +"2551 Tanh_2331" [id=2551, type=Tanh]; +"2552 Constant_2332" [id=2552, type=Constant]; +"2553 Add_2333" [id=2553, type=Add]; +"2554 Mul_2334" [id=2554, type=Mul]; +"2555 QuantizeLinear_2829_1" [id=2555, type=QuantizeLinear]; +"2556 DequantizeLinear_2829_1" [id=2556, type=DequantizeLinear]; +"2557 Shape_2335" [id=2557, type=Shape]; +"2558 Constant_2336" [id=2558, type=Constant]; +"2559 Gather_2337" [id=2559, type=Gather]; +"2560 Shape_2338" [id=2560, type=Shape]; +"2561 Constant_2339" [id=2561, type=Constant]; +"2562 Gather_2340" [id=2562, type=Gather]; +"2563 Shape_2341" [id=2563, type=Shape]; +"2564 Constant_2342" [id=2564, type=Constant]; +"2565 Gather_2343" [id=2565, type=Gather]; +"2566 Unsqueeze_2344" [id=2566, type=Unsqueeze]; +"2567 Concat_2345" [id=2567, type=Concat]; +"2568 Reshape_2346" [id=2568, type=Reshape]; +"2569 QuantizeLinear_h.10.mlp.c_proj.weight_1" [id=2569, type=QuantizeLinear]; +"2570 DequantizeLinear_h.10.mlp.c_proj.weight_1" [id=2570, type=DequantizeLinear]; +"2571 Gemm_2347" [id=2571, type=Gemm]; +"2572 Unsqueeze_2348" [id=2572, type=Unsqueeze]; +"2573 Unsqueeze_2349" [id=2573, type=Unsqueeze]; +"2574 Concat_2350" [id=2574, type=Concat]; +"2575 Reshape_2351" [id=2575, type=Reshape]; +"2576 Add_2352" [id=2576, type=Add]; +"2577 ReduceMean_2353" [id=2577, type=ReduceMean]; +"2578 Sub_2354" [id=2578, type=Sub]; +"2579 Constant_2355" [id=2579, type=Constant]; +"2580 Pow_2356" [id=2580, type=Pow]; +"2581 ReduceMean_2357" [id=2581, type=ReduceMean]; +"2582 Constant_2358" [id=2582, type=Constant]; +"2583 Add_2359" [id=2583, type=Add]; +"2584 Sqrt_2360" [id=2584, type=Sqrt]; +"2585 Div_2361" [id=2585, type=Div]; +"2586 Mul_2362" [id=2586, type=Mul]; +"2587 Add_2363" [id=2587, type=Add]; +"2588 QuantizeLinear_2862_1" [id=2588, type=QuantizeLinear]; +"2589 DequantizeLinear_2862_1" [id=2589, type=DequantizeLinear]; +"2590 Shape_2364" [id=2590, type=Shape]; +"2591 Constant_2365" [id=2591, type=Constant]; +"2592 Gather_2366" [id=2592, type=Gather]; +"2593 Shape_2367" [id=2593, type=Shape]; +"2594 Constant_2368" [id=2594, type=Constant]; +"2595 Gather_2369" [id=2595, type=Gather]; +"2596 Shape_2370" [id=2596, type=Shape]; +"2597 Constant_2371" [id=2597, type=Constant]; +"2598 Gather_2372" [id=2598, type=Gather]; +"2599 Unsqueeze_2373" [id=2599, type=Unsqueeze]; +"2600 Concat_2374" [id=2600, type=Concat]; +"2601 Reshape_2375" [id=2601, type=Reshape]; +"2602 QuantizeLinear_h.11.attn.c_attn.weight_1" [id=2602, type=QuantizeLinear]; +"2603 DequantizeLinear_h.11.attn.c_attn.weight_1" [id=2603, type=DequantizeLinear]; +"2604 Gemm_2376" [id=2604, type=Gemm]; +"2605 Unsqueeze_2377" [id=2605, type=Unsqueeze]; +"2606 Unsqueeze_2378" [id=2606, type=Unsqueeze]; +"2607 Concat_2379" [id=2607, type=Concat]; +"2608 Reshape_2380" [id=2608, type=Reshape]; +"2609 Split_2381" [id=2609, type=Split]; +"2610 QuantizeLinear_query.23_1" [id=2610, type=QuantizeLinear]; +"2611 DequantizeLinear_query.23_1" [id=2611, type=DequantizeLinear]; +"2612 Shape_2382" [id=2612, type=Shape]; +"2613 Constant_2383" [id=2613, type=Constant]; +"2614 Gather_2384" [id=2614, type=Gather]; +"2615 Shape_2385" [id=2615, type=Shape]; +"2616 Constant_2386" [id=2616, type=Constant]; +"2617 Gather_2387" [id=2617, type=Gather]; +"2618 Shape_2388" [id=2618, type=Shape]; +"2619 Constant_2389" [id=2619, type=Constant]; +"2620 Gather_2390" [id=2620, type=Gather]; +"2621 Constant_2391" [id=2621, type=Constant]; +"2622 Div_2392" [id=2622, type=Div]; +"2623 Cast_2393" [id=2623, type=Cast]; +"2624 Cast_2394" [id=2624, type=Cast]; +"2625 Unsqueeze_2395" [id=2625, type=Unsqueeze]; +"2626 Unsqueeze_2396" [id=2626, type=Unsqueeze]; +"2627 Unsqueeze_2397" [id=2627, type=Unsqueeze]; +"2628 Concat_2398" [id=2628, type=Concat]; +"2629 Reshape_2399" [id=2629, type=Reshape]; +"2630 Transpose_2400" [id=2630, type=Transpose]; +"2631 Shape_2401" [id=2631, type=Shape]; +"2632 Constant_2402" [id=2632, type=Constant]; +"2633 Gather_2403" [id=2633, type=Gather]; +"2634 Shape_2404" [id=2634, type=Shape]; +"2635 Constant_2405" [id=2635, type=Constant]; +"2636 Gather_2406" [id=2636, type=Gather]; +"2637 Shape_2407" [id=2637, type=Shape]; +"2638 Constant_2408" [id=2638, type=Constant]; +"2639 Gather_2409" [id=2639, type=Gather]; +"2640 Constant_2410" [id=2640, type=Constant]; +"2641 Div_2411" [id=2641, type=Div]; +"2642 Cast_2412" [id=2642, type=Cast]; +"2643 Cast_2413" [id=2643, type=Cast]; +"2644 Unsqueeze_2414" [id=2644, type=Unsqueeze]; +"2645 Unsqueeze_2415" [id=2645, type=Unsqueeze]; +"2646 Unsqueeze_2416" [id=2646, type=Unsqueeze]; +"2647 Concat_2417" [id=2647, type=Concat]; +"2648 Reshape_2418" [id=2648, type=Reshape]; +"2649 QuantizeLinear_2927_1" [id=2649, type=QuantizeLinear]; +"2650 DequantizeLinear_2927_1" [id=2650, type=DequantizeLinear]; +"2651 Transpose_2419" [id=2651, type=Transpose]; +"2652 Shape_2420" [id=2652, type=Shape]; +"2653 Constant_2421" [id=2653, type=Constant]; +"2654 Gather_2422" [id=2654, type=Gather]; +"2655 Shape_2423" [id=2655, type=Shape]; +"2656 Constant_2424" [id=2656, type=Constant]; +"2657 Gather_2425" [id=2657, type=Gather]; +"2658 Shape_2426" [id=2658, type=Shape]; +"2659 Constant_2427" [id=2659, type=Constant]; +"2660 Gather_2428" [id=2660, type=Gather]; +"2661 Constant_2429" [id=2661, type=Constant]; +"2662 Div_2430" [id=2662, type=Div]; +"2663 Cast_2431" [id=2663, type=Cast]; +"2664 Cast_2432" [id=2664, type=Cast]; +"2665 Unsqueeze_2433" [id=2665, type=Unsqueeze]; +"2666 Unsqueeze_2434" [id=2666, type=Unsqueeze]; +"2667 Unsqueeze_2435" [id=2667, type=Unsqueeze]; +"2668 Concat_2436" [id=2668, type=Concat]; +"2669 Reshape_2437" [id=2669, type=Reshape]; +"2670 Transpose_2438" [id=2670, type=Transpose]; +"2671 Transpose_2439" [id=2671, type=Transpose]; +"2672 Unsqueeze_2440" [id=2672, type=Unsqueeze]; +"2673 Unsqueeze_2441" [id=2673, type=Unsqueeze]; +"2674 Concat_2442" [id=2674, type=Concat]; +"2675 MatMul_2443" [id=2675, type=MatMul]; +"2676 Constant_2444" [id=2676, type=Constant]; +"2677 Div_2445" [id=2677, type=Div]; +"2678 Shape_2446" [id=2678, type=Shape]; +"2679 Constant_2447" [id=2679, type=Constant]; +"2680 Gather_2448" [id=2680, type=Gather]; +"2681 Shape_2449" [id=2681, type=Shape]; +"2682 Constant_2450" [id=2682, type=Constant]; +"2683 Gather_2451" [id=2683, type=Gather]; +"2684 Sub_2452" [id=2684, type=Sub]; +"2685 Unsqueeze_2453" [id=2685, type=Unsqueeze]; +"2686 Unsqueeze_2454" [id=2686, type=Unsqueeze]; +"2687 Constant_2455" [id=2687, type=Constant]; +"2688 Slice_2456" [id=2688, type=Slice]; +"2689 Unsqueeze_2457" [id=2689, type=Unsqueeze]; +"2690 Constant_2458" [id=2690, type=Constant]; +"2691 Slice_2459" [id=2691, type=Slice]; +"2692 Mul_2460" [id=2692, type=Mul]; +"2693 Constant_2461" [id=2693, type=Constant]; +"2694 Sub_2462" [id=2694, type=Sub]; +"2695 Constant_2463" [id=2695, type=Constant]; +"2696 Mul_2464" [id=2696, type=Mul]; +"2697 Sub_2465" [id=2697, type=Sub]; +"2698 Softmax_2466" [id=2698, type=Softmax]; +"2699 MatMul_2467" [id=2699, type=MatMul]; +"2700 QuantizeLinear_2984_1" [id=2700, type=QuantizeLinear]; +"2701 DequantizeLinear_2984_1" [id=2701, type=DequantizeLinear]; +"2702 Transpose_2468" [id=2702, type=Transpose]; +"2703 Shape_2469" [id=2703, type=Shape]; +"2704 Constant_2470" [id=2704, type=Constant]; +"2705 Gather_2471" [id=2705, type=Gather]; +"2706 Shape_2472" [id=2706, type=Shape]; +"2707 Constant_2473" [id=2707, type=Constant]; +"2708 Gather_2474" [id=2708, type=Gather]; +"2709 Shape_2475" [id=2709, type=Shape]; +"2710 Constant_2476" [id=2710, type=Constant]; +"2711 Gather_2477" [id=2711, type=Gather]; +"2712 Shape_2478" [id=2712, type=Shape]; +"2713 Constant_2479" [id=2713, type=Constant]; +"2714 Gather_2480" [id=2714, type=Gather]; +"2715 Mul_2481" [id=2715, type=Mul]; +"2716 Unsqueeze_2482" [id=2716, type=Unsqueeze]; +"2717 Unsqueeze_2483" [id=2717, type=Unsqueeze]; +"2718 Unsqueeze_2484" [id=2718, type=Unsqueeze]; +"2719 Concat_2485" [id=2719, type=Concat]; +"2720 Reshape_2486" [id=2720, type=Reshape]; +"2721 Shape_2487" [id=2721, type=Shape]; +"2722 Constant_2488" [id=2722, type=Constant]; +"2723 Gather_2489" [id=2723, type=Gather]; +"2724 Shape_2490" [id=2724, type=Shape]; +"2725 Constant_2491" [id=2725, type=Constant]; +"2726 Gather_2492" [id=2726, type=Gather]; +"2727 Shape_2493" [id=2727, type=Shape]; +"2728 Constant_2494" [id=2728, type=Constant]; +"2729 Gather_2495" [id=2729, type=Gather]; +"2730 Unsqueeze_2496" [id=2730, type=Unsqueeze]; +"2731 Concat_2497" [id=2731, type=Concat]; +"2732 Reshape_2498" [id=2732, type=Reshape]; +"2733 QuantizeLinear_h.11.attn.c_proj.weight_1" [id=2733, type=QuantizeLinear]; +"2734 DequantizeLinear_h.11.attn.c_proj.weight_1" [id=2734, type=DequantizeLinear]; +"2735 Gemm_2499" [id=2735, type=Gemm]; +"2736 Unsqueeze_2500" [id=2736, type=Unsqueeze]; +"2737 Unsqueeze_2501" [id=2737, type=Unsqueeze]; +"2738 Concat_2502" [id=2738, type=Concat]; +"2739 Reshape_2503" [id=2739, type=Reshape]; +"2740 Add_2504" [id=2740, type=Add]; +"2741 ReduceMean_2505" [id=2741, type=ReduceMean]; +"2742 Sub_2506" [id=2742, type=Sub]; +"2743 Constant_2507" [id=2743, type=Constant]; +"2744 Pow_2508" [id=2744, type=Pow]; +"2745 ReduceMean_2509" [id=2745, type=ReduceMean]; +"2746 Constant_2510" [id=2746, type=Constant]; +"2747 Add_2511" [id=2747, type=Add]; +"2748 Sqrt_2512" [id=2748, type=Sqrt]; +"2749 Div_2513" [id=2749, type=Div]; +"2750 Mul_2514" [id=2750, type=Mul]; +"2751 Add_2515" [id=2751, type=Add]; +"2752 QuantizeLinear_3036_1" [id=2752, type=QuantizeLinear]; +"2753 DequantizeLinear_3036_1" [id=2753, type=DequantizeLinear]; +"2754 Shape_2516" [id=2754, type=Shape]; +"2755 Constant_2517" [id=2755, type=Constant]; +"2756 Gather_2518" [id=2756, type=Gather]; +"2757 Shape_2519" [id=2757, type=Shape]; +"2758 Constant_2520" [id=2758, type=Constant]; +"2759 Gather_2521" [id=2759, type=Gather]; +"2760 Shape_2522" [id=2760, type=Shape]; +"2761 Constant_2523" [id=2761, type=Constant]; +"2762 Gather_2524" [id=2762, type=Gather]; +"2763 Unsqueeze_2525" [id=2763, type=Unsqueeze]; +"2764 Concat_2526" [id=2764, type=Concat]; +"2765 Reshape_2527" [id=2765, type=Reshape]; +"2766 QuantizeLinear_h.11.mlp.c_fc.weight_1" [id=2766, type=QuantizeLinear]; +"2767 DequantizeLinear_h.11.mlp.c_fc.weight_1" [id=2767, type=DequantizeLinear]; +"2768 Gemm_2528" [id=2768, type=Gemm]; +"2769 Unsqueeze_2529" [id=2769, type=Unsqueeze]; +"2770 Unsqueeze_2530" [id=2770, type=Unsqueeze]; +"2771 Concat_2531" [id=2771, type=Concat]; +"2772 Reshape_2532" [id=2772, type=Reshape]; +"2773 Constant_2533" [id=2773, type=Constant]; +"2774 Mul_2534" [id=2774, type=Mul]; +"2775 Constant_2535" [id=2775, type=Constant]; +"2776 Pow_2536" [id=2776, type=Pow]; +"2777 Constant_2537" [id=2777, type=Constant]; +"2778 Mul_2538" [id=2778, type=Mul]; +"2779 Add_2539" [id=2779, type=Add]; +"2780 Constant_2540" [id=2780, type=Constant]; +"2781 Mul_2541" [id=2781, type=Mul]; +"2782 Tanh_2542" [id=2782, type=Tanh]; +"2783 Constant_2543" [id=2783, type=Constant]; +"2784 Add_2544" [id=2784, type=Add]; +"2785 Mul_2545" [id=2785, type=Mul]; +"2786 QuantizeLinear_3070_1" [id=2786, type=QuantizeLinear]; +"2787 DequantizeLinear_3070_1" [id=2787, type=DequantizeLinear]; +"2788 Shape_2546" [id=2788, type=Shape]; +"2789 Constant_2547" [id=2789, type=Constant]; +"2790 Gather_2548" [id=2790, type=Gather]; +"2791 Shape_2549" [id=2791, type=Shape]; +"2792 Constant_2550" [id=2792, type=Constant]; +"2793 Gather_2551" [id=2793, type=Gather]; +"2794 Shape_2552" [id=2794, type=Shape]; +"2795 Constant_2553" [id=2795, type=Constant]; +"2796 Gather_2554" [id=2796, type=Gather]; +"2797 Unsqueeze_2555" [id=2797, type=Unsqueeze]; +"2798 Concat_2556" [id=2798, type=Concat]; +"2799 Reshape_2557" [id=2799, type=Reshape]; +"2800 QuantizeLinear_h.11.mlp.c_proj.weight_1" [id=2800, type=QuantizeLinear]; +"2801 DequantizeLinear_h.11.mlp.c_proj.weight_1" [id=2801, type=DequantizeLinear]; +"2802 Gemm_2558" [id=2802, type=Gemm]; +"2803 Unsqueeze_2559" [id=2803, type=Unsqueeze]; +"2804 Unsqueeze_2560" [id=2804, type=Unsqueeze]; +"2805 Concat_2561" [id=2805, type=Concat]; +"2806 Reshape_2562" [id=2806, type=Reshape]; +"2807 Add_2563" [id=2807, type=Add]; +"2808 ReduceMean_2564" [id=2808, type=ReduceMean]; +"2809 Sub_2565" [id=2809, type=Sub]; +"2810 Constant_2566" [id=2810, type=Constant]; +"2811 Pow_2567" [id=2811, type=Pow]; +"2812 ReduceMean_2568" [id=2812, type=ReduceMean]; +"2813 Constant_2569" [id=2813, type=Constant]; +"2814 Add_2570" [id=2814, type=Add]; +"2815 Sqrt_2571" [id=2815, type=Sqrt]; +"2816 Div_2572" [id=2816, type=Div]; +"2817 Mul_2573" [id=2817, type=Mul]; +"2818 Add_2574" [id=2818, type=Add]; +"2819 Unsqueeze_2575" [id=2819, type=Unsqueeze]; +"2820 Unsqueeze_2576" [id=2820, type=Unsqueeze]; +"2821 Unsqueeze_2577" [id=2821, type=Unsqueeze]; +"2822 Unsqueeze_2578" [id=2822, type=Unsqueeze]; +"2823 Concat_2579" [id=2823, type=Concat]; +"2824 Reshape_2580" [id=2824, type=Reshape]; +"2825 nncf_model_input_0" [id=2825, type=nncf_model_input]; +"2826 nncf_model_output_0" [id=2826, type=nncf_model_output]; +"2827 nncf_model_output_1" [id=2827, type=nncf_model_output]; +"2828 nncf_model_output_2" [id=2828, type=nncf_model_output]; +"2829 nncf_model_output_3" [id=2829, type=nncf_model_output]; +"2830 nncf_model_output_4" [id=2830, type=nncf_model_output]; +"2831 nncf_model_output_5" [id=2831, type=nncf_model_output]; +"2832 nncf_model_output_6" [id=2832, type=nncf_model_output]; +"2833 nncf_model_output_7" [id=2833, type=nncf_model_output]; +"2834 nncf_model_output_8" [id=2834, type=nncf_model_output]; +"2835 nncf_model_output_9" [id=2835, type=nncf_model_output]; +"2836 nncf_model_output_10" [id=2836, type=nncf_model_output]; +"2837 nncf_model_output_11" [id=2837, type=nncf_model_output]; +"2838 nncf_model_output_12" [id=2838, type=nncf_model_output]; +"0 Shape_0" -> "2 Gather_2" [label="[3]", style=dashed]; +"1 Constant_1" -> "2 Gather_2" [label="[]", style=dashed]; +"2 Gather_2" -> "2819 Unsqueeze_2575" [label="[]", style=dashed]; +"3 Shape_3" -> "5 Gather_5" [label="[3]", style=dashed]; +"4 Constant_4" -> "5 Gather_5" [label="[]", style=dashed]; +"5 Gather_5" -> "2820 Unsqueeze_2576" [label="[]", style=dashed]; +"6 Shape_6" -> "8 Gather_8" [label="[3]", style=dashed]; +"7 Constant_7" -> "8 Gather_8" [label="[]", style=dashed]; +"8 Gather_8" -> "9 Unsqueeze_9" [label="[]", style=dashed]; +"8 Gather_8" -> "12 Unsqueeze_12" [label="[]", style=dashed]; +"8 Gather_8" -> "23 Unsqueeze_23" [label="[]", style=dashed]; +"8 Gather_8" -> "2821 Unsqueeze_2577" [label="[]", style=dashed]; +"9 Unsqueeze_9" -> "10 Concat_10" [label="[1]", style=dashed]; +"10 Concat_10" -> "11 Reshape_11" [label="[2]", style=dashed]; +"11 Reshape_11" -> "28 Gather_26" [label="[]", style=dashed]; +"12 Unsqueeze_12" -> "13 Sub_13" [label="[1]", style=dashed]; +"13 Sub_13" -> "14 Div_14" [label="[1]", style=dashed]; +"14 Div_14" -> "15 ConstantOfShape_15" [label="[1]", style=dashed]; +"15 ConstantOfShape_15" -> "16 NonZero_16" [label="[-1]", style=dashed]; +"16 NonZero_16" -> "17 Transpose_17" [label="[1, -1]", style=dashed]; +"17 Transpose_17" -> "18 Squeeze_18" [label="[-1, 1]", style=dashed]; +"18 Squeeze_18" -> "19 Mul_19" [label="[-1]", style=dashed]; +"19 Mul_19" -> "20 Add_20" [label="[-1]", style=dashed]; +"20 Add_20" -> "21 Cast_21" [label="[-1]", style=dashed]; +"21 Cast_21" -> "22 Unsqueeze_22" [label="[-1]", style=dashed]; +"22 Unsqueeze_22" -> "25 Reshape_25" [label="[1, -1]", style=dashed]; +"23 Unsqueeze_23" -> "24 Concat_24" [label="[1]", style=dashed]; +"24 Concat_24" -> "25 Reshape_25" [label="[2]", style=dashed]; +"25 Reshape_25" -> "31 Gather_27" [label="[]", style=dashed]; +"26 QuantizeLinear_wte.weight_1" -> "27 DequantizeLinear_wte.weight_1" [label="[50257, 768]", style=dashed]; +"27 DequantizeLinear_wte.weight_1" -> "28 Gather_26" [label="[50257, 768]", style=solid]; +"28 Gather_26" -> "32 Add_28" [label="[]", style=solid]; +"29 QuantizeLinear_wpe.weight_1" -> "30 DequantizeLinear_wpe.weight_1" [label="[1024, 768]", style=dashed]; +"30 DequantizeLinear_wpe.weight_1" -> "31 Gather_27" [label="[1024, 768]", style=solid]; +"31 Gather_27" -> "32 Add_28" [label="[]", style=solid]; +"32 Add_28" -> "33 Shape_29" [label="[]", style=solid]; +"32 Add_28" -> "36 ReduceMean_32" [label="[]", style=solid]; +"32 Add_28" -> "37 Sub_33" [label="[]", style=solid]; +"32 Add_28" -> "199 Add_183" [label="[]", style=solid]; +"33 Shape_29" -> "35 Gather_31" [label="[-1]", style=dashed]; +"34 Constant_30" -> "35 Gather_31" [label="[]", style=dashed]; +"35 Gather_31" -> "2822 Unsqueeze_2578" [label="[]", style=dashed]; +"36 ReduceMean_32" -> "37 Sub_33" [label="[]", style=solid]; +"37 Sub_33" -> "39 Pow_35" [label="[]", style=solid]; +"37 Sub_33" -> "44 Div_40" [label="[]", style=solid]; +"38 Constant_34" -> "39 Pow_35" [label="[]", style=solid]; +"39 Pow_35" -> "40 ReduceMean_36" [label="[]", style=solid]; +"40 ReduceMean_36" -> "42 Add_38" [label="[]", style=solid]; +"41 Constant_37" -> "42 Add_38" [label="[]", style=solid]; +"42 Add_38" -> "43 Sqrt_39" [label="[]", style=solid]; +"43 Sqrt_39" -> "44 Div_40" [label="[]", style=solid]; +"44 Div_40" -> "45 Mul_41" [label="[]", style=solid]; +"45 Mul_41" -> "46 Add_42" [label="[]", style=solid]; +"46 Add_42" -> "47 QuantizeLinear_211_1" [label="[]", style=solid]; +"47 QuantizeLinear_211_1" -> "48 DequantizeLinear_211_1" [label="[]", style=dashed]; +"48 DequantizeLinear_211_1" -> "49 Shape_43" [label="[]", style=solid]; +"48 DequantizeLinear_211_1" -> "52 Shape_46" [label="[]", style=solid]; +"48 DequantizeLinear_211_1" -> "55 Shape_49" [label="[]", style=solid]; +"48 DequantizeLinear_211_1" -> "60 Reshape_54" [label="[]", style=solid]; +"49 Shape_43" -> "51 Gather_45" [label="[-1]", style=dashed]; +"50 Constant_44" -> "51 Gather_45" [label="[]", style=dashed]; +"51 Gather_45" -> "64 Unsqueeze_56" [label="[]", style=dashed]; +"52 Shape_46" -> "54 Gather_48" [label="[-1]", style=dashed]; +"53 Constant_47" -> "54 Gather_48" [label="[]", style=dashed]; +"54 Gather_48" -> "65 Unsqueeze_57" [label="[]", style=dashed]; +"55 Shape_49" -> "57 Gather_51" [label="[-1]", style=dashed]; +"56 Constant_50" -> "57 Gather_51" [label="[]", style=dashed]; +"57 Gather_51" -> "58 Unsqueeze_52" [label="[]", style=dashed]; +"58 Unsqueeze_52" -> "59 Concat_53" [label="[1]", style=dashed]; +"59 Concat_53" -> "60 Reshape_54" [label="[2]", style=dashed]; +"60 Reshape_54" -> "63 Gemm_55" [label="[]", style=solid]; +"61 QuantizeLinear_h.0.attn.c_attn.weight_1" -> "62 DequantizeLinear_h.0.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"62 DequantizeLinear_h.0.attn.c_attn.weight_1" -> "63 Gemm_55" [label="[768, 2304]", style=solid]; +"63 Gemm_55" -> "67 Reshape_59" [label="[]", style=solid]; +"64 Unsqueeze_56" -> "66 Concat_58" [label="[1]", style=dashed]; +"65 Unsqueeze_57" -> "66 Concat_58" [label="[1]", style=dashed]; +"66 Concat_58" -> "67 Reshape_59" [label="[3]", style=dashed]; +"67 Reshape_59" -> "68 Split_60" [label="[]", style=solid]; +"68 Split_60" -> "69 QuantizeLinear_query.1_1" [label="[]", style=solid]; +"68 Split_60" -> "71 Shape_61" [label="[]", style=solid]; +"68 Split_60" -> "74 Shape_64" [label="[]", style=solid]; +"68 Split_60" -> "77 Shape_67" [label="[]", style=solid]; +"68 Split_60" -> "90 Shape_80" [label="[]", style=solid]; +"68 Split_60" -> "93 Shape_83" [label="[]", style=solid]; +"68 Split_60" -> "96 Shape_86" [label="[]", style=solid]; +"68 Split_60" -> "107 Reshape_97" [label="[]", style=solid]; +"68 Split_60" -> "111 Shape_99" [label="[]", style=solid]; +"68 Split_60" -> "114 Shape_102" [label="[]", style=solid]; +"68 Split_60" -> "117 Shape_105" [label="[]", style=solid]; +"68 Split_60" -> "128 Reshape_116" [label="[]", style=solid]; +"69 QuantizeLinear_query.1_1" -> "70 DequantizeLinear_query.1_1" [label="[]", style=dashed]; +"70 DequantizeLinear_query.1_1" -> "88 Reshape_78" [label="[]", style=solid]; +"71 Shape_61" -> "73 Gather_63" [label="[-1]", style=dashed]; +"72 Constant_62" -> "73 Gather_63" [label="[]", style=dashed]; +"73 Gather_63" -> "84 Unsqueeze_74" [label="[]", style=dashed]; +"74 Shape_64" -> "76 Gather_66" [label="[-1]", style=dashed]; +"75 Constant_65" -> "76 Gather_66" [label="[]", style=dashed]; +"76 Gather_66" -> "85 Unsqueeze_75" [label="[]", style=dashed]; +"77 Shape_67" -> "79 Gather_69" [label="[-1]", style=dashed]; +"78 Constant_68" -> "79 Gather_69" [label="[]", style=dashed]; +"79 Gather_69" -> "81 Div_71" [label="[]", style=dashed]; +"80 Constant_70" -> "81 Div_71" [label="[]", style=dashed]; +"81 Div_71" -> "82 Cast_72" [label="[]", style=dashed]; +"82 Cast_72" -> "83 Cast_73" [label="[]", style=dashed]; +"83 Cast_73" -> "86 Unsqueeze_76" [label="[]", style=dashed]; +"84 Unsqueeze_74" -> "87 Concat_77" [label="[1]", style=dashed]; +"85 Unsqueeze_75" -> "87 Concat_77" [label="[1]", style=dashed]; +"86 Unsqueeze_76" -> "87 Concat_77" [label="[1]", style=dashed]; +"87 Concat_77" -> "88 Reshape_78" [label="[4]", style=dashed]; +"88 Reshape_78" -> "89 Transpose_79" [label="[]", style=solid]; +"89 Transpose_79" -> "134 MatMul_122" [label="[]", style=solid]; +"90 Shape_80" -> "92 Gather_82" [label="[-1]", style=dashed]; +"91 Constant_81" -> "92 Gather_82" [label="[]", style=dashed]; +"92 Gather_82" -> "103 Unsqueeze_93" [label="[]", style=dashed]; +"93 Shape_83" -> "95 Gather_85" [label="[-1]", style=dashed]; +"94 Constant_84" -> "95 Gather_85" [label="[]", style=dashed]; +"95 Gather_85" -> "104 Unsqueeze_94" [label="[]", style=dashed]; +"96 Shape_86" -> "98 Gather_88" [label="[-1]", style=dashed]; +"97 Constant_87" -> "98 Gather_88" [label="[]", style=dashed]; +"98 Gather_88" -> "100 Div_90" [label="[]", style=dashed]; +"99 Constant_89" -> "100 Div_90" [label="[]", style=dashed]; +"100 Div_90" -> "101 Cast_91" [label="[]", style=dashed]; +"101 Cast_91" -> "102 Cast_92" [label="[]", style=dashed]; +"102 Cast_92" -> "105 Unsqueeze_95" [label="[]", style=dashed]; +"103 Unsqueeze_93" -> "106 Concat_96" [label="[1]", style=dashed]; +"104 Unsqueeze_94" -> "106 Concat_96" [label="[1]", style=dashed]; +"105 Unsqueeze_95" -> "106 Concat_96" [label="[1]", style=dashed]; +"106 Concat_96" -> "107 Reshape_97" [label="[4]", style=dashed]; +"107 Reshape_97" -> "108 QuantizeLinear_276_1" [label="[]", style=solid]; +"107 Reshape_97" -> "130 Transpose_118" [label="[]", style=solid]; +"108 QuantizeLinear_276_1" -> "109 DequantizeLinear_276_1" [label="[]", style=dashed]; +"109 DequantizeLinear_276_1" -> "110 Transpose_98" [label="[]", style=solid]; +"110 Transpose_98" -> "134 MatMul_122" [label="[]", style=solid]; +"111 Shape_99" -> "113 Gather_101" [label="[-1]", style=dashed]; +"112 Constant_100" -> "113 Gather_101" [label="[]", style=dashed]; +"113 Gather_101" -> "124 Unsqueeze_112" [label="[]", style=dashed]; +"114 Shape_102" -> "116 Gather_104" [label="[-1]", style=dashed]; +"115 Constant_103" -> "116 Gather_104" [label="[]", style=dashed]; +"116 Gather_104" -> "125 Unsqueeze_113" [label="[]", style=dashed]; +"117 Shape_105" -> "119 Gather_107" [label="[-1]", style=dashed]; +"118 Constant_106" -> "119 Gather_107" [label="[]", style=dashed]; +"119 Gather_107" -> "121 Div_109" [label="[]", style=dashed]; +"120 Constant_108" -> "121 Div_109" [label="[]", style=dashed]; +"121 Div_109" -> "122 Cast_110" [label="[]", style=dashed]; +"122 Cast_110" -> "123 Cast_111" [label="[]", style=dashed]; +"123 Cast_111" -> "126 Unsqueeze_114" [label="[]", style=dashed]; +"124 Unsqueeze_112" -> "127 Concat_115" [label="[1]", style=dashed]; +"125 Unsqueeze_113" -> "127 Concat_115" [label="[1]", style=dashed]; +"126 Unsqueeze_114" -> "127 Concat_115" [label="[1]", style=dashed]; +"127 Concat_115" -> "128 Reshape_116" [label="[4]", style=dashed]; +"128 Reshape_116" -> "129 Transpose_117" [label="[]", style=solid]; +"129 Transpose_117" -> "132 Unsqueeze_120" [label="[]", style=solid]; +"129 Transpose_117" -> "158 MatMul_146" [label="[]", style=solid]; +"130 Transpose_118" -> "131 Unsqueeze_119" [label="[]", style=solid]; +"131 Unsqueeze_119" -> "133 Concat_121" [label="[]", style=solid]; +"132 Unsqueeze_120" -> "133 Concat_121" [label="[]", style=solid]; +"133 Concat_121" -> "2827 nncf_model_output_1" [label="[2, 1, 12, 8, 64]", style=solid]; +"134 MatMul_122" -> "136 Div_124" [label="[]", style=solid]; +"135 Constant_123" -> "136 Div_124" [label="[]", style=solid]; +"136 Div_124" -> "137 Shape_125" [label="[]", style=solid]; +"136 Div_124" -> "140 Shape_128" [label="[]", style=solid]; +"136 Div_124" -> "151 Mul_139" [label="[]", style=solid]; +"137 Shape_125" -> "139 Gather_127" [label="[-1]", style=dashed]; +"138 Constant_126" -> "139 Gather_127" [label="[]", style=dashed]; +"139 Gather_127" -> "143 Sub_131" [label="[]", style=dashed]; +"140 Shape_128" -> "142 Gather_130" [label="[-1]", style=dashed]; +"141 Constant_129" -> "142 Gather_130" [label="[]", style=dashed]; +"142 Gather_130" -> "143 Sub_131" [label="[]", style=dashed]; +"142 Gather_130" -> "145 Unsqueeze_133" [label="[]", style=dashed]; +"142 Gather_130" -> "148 Unsqueeze_136" [label="[]", style=dashed]; +"143 Sub_131" -> "144 Unsqueeze_132" [label="[]", style=dashed]; +"144 Unsqueeze_132" -> "147 Slice_135" [label="[1]", style=dashed]; +"145 Unsqueeze_133" -> "147 Slice_135" [label="[1]", style=dashed]; +"146 Constant_134" -> "147 Slice_135" [label="[1]", style=dashed]; +"147 Slice_135" -> "150 Slice_138" [label="[]", style=solid]; +"148 Unsqueeze_136" -> "150 Slice_138" [label="[1]", style=dashed]; +"149 Constant_137" -> "150 Slice_138" [label="[1]", style=dashed]; +"150 Slice_138" -> "151 Mul_139" [label="[]", style=solid]; +"150 Slice_138" -> "153 Sub_141" [label="[]", style=solid]; +"151 Mul_139" -> "156 Sub_144" [label="[]", style=solid]; +"152 Constant_140" -> "153 Sub_141" [label="[]", style=solid]; +"153 Sub_141" -> "155 Mul_143" [label="[]", style=solid]; +"154 Constant_142" -> "155 Mul_143" [label="[]", style=solid]; +"155 Mul_143" -> "156 Sub_144" [label="[]", style=solid]; +"156 Sub_144" -> "157 Softmax_145" [label="[]", style=solid]; +"157 Softmax_145" -> "158 MatMul_146" [label="[]", style=solid]; +"158 MatMul_146" -> "159 QuantizeLinear_333_1" [label="[]", style=solid]; +"159 QuantizeLinear_333_1" -> "160 DequantizeLinear_333_1" [label="[]", style=dashed]; +"160 DequantizeLinear_333_1" -> "161 Transpose_147" [label="[]", style=solid]; +"161 Transpose_147" -> "162 Shape_148" [label="[]", style=solid]; +"161 Transpose_147" -> "165 Shape_151" [label="[]", style=solid]; +"161 Transpose_147" -> "168 Shape_154" [label="[]", style=solid]; +"161 Transpose_147" -> "171 Shape_157" [label="[]", style=solid]; +"161 Transpose_147" -> "179 Reshape_165" [label="[]", style=solid]; +"162 Shape_148" -> "164 Gather_150" [label="[-1]", style=dashed]; +"163 Constant_149" -> "164 Gather_150" [label="[]", style=dashed]; +"164 Gather_150" -> "175 Unsqueeze_161" [label="[]", style=dashed]; +"165 Shape_151" -> "167 Gather_153" [label="[-1]", style=dashed]; +"166 Constant_152" -> "167 Gather_153" [label="[]", style=dashed]; +"167 Gather_153" -> "176 Unsqueeze_162" [label="[]", style=dashed]; +"168 Shape_154" -> "170 Gather_156" [label="[-1]", style=dashed]; +"169 Constant_155" -> "170 Gather_156" [label="[]", style=dashed]; +"170 Gather_156" -> "174 Mul_160" [label="[]", style=dashed]; +"171 Shape_157" -> "173 Gather_159" [label="[-1]", style=dashed]; +"172 Constant_158" -> "173 Gather_159" [label="[]", style=dashed]; +"173 Gather_159" -> "174 Mul_160" [label="[]", style=dashed]; +"174 Mul_160" -> "177 Unsqueeze_163" [label="[]", style=dashed]; +"175 Unsqueeze_161" -> "178 Concat_164" [label="[1]", style=dashed]; +"176 Unsqueeze_162" -> "178 Concat_164" [label="[1]", style=dashed]; +"177 Unsqueeze_163" -> "178 Concat_164" [label="[1]", style=dashed]; +"178 Concat_164" -> "179 Reshape_165" [label="[3]", style=dashed]; +"179 Reshape_165" -> "180 Shape_166" [label="[]", style=solid]; +"179 Reshape_165" -> "183 Shape_169" [label="[]", style=solid]; +"179 Reshape_165" -> "186 Shape_172" [label="[]", style=solid]; +"179 Reshape_165" -> "191 Reshape_177" [label="[]", style=solid]; +"180 Shape_166" -> "182 Gather_168" [label="[-1]", style=dashed]; +"181 Constant_167" -> "182 Gather_168" [label="[]", style=dashed]; +"182 Gather_168" -> "195 Unsqueeze_179" [label="[]", style=dashed]; +"183 Shape_169" -> "185 Gather_171" [label="[-1]", style=dashed]; +"184 Constant_170" -> "185 Gather_171" [label="[]", style=dashed]; +"185 Gather_171" -> "196 Unsqueeze_180" [label="[]", style=dashed]; +"186 Shape_172" -> "188 Gather_174" [label="[-1]", style=dashed]; +"187 Constant_173" -> "188 Gather_174" [label="[]", style=dashed]; +"188 Gather_174" -> "189 Unsqueeze_175" [label="[]", style=dashed]; +"189 Unsqueeze_175" -> "190 Concat_176" [label="[1]", style=dashed]; +"190 Concat_176" -> "191 Reshape_177" [label="[2]", style=dashed]; +"191 Reshape_177" -> "194 Gemm_178" [label="[]", style=solid]; +"192 QuantizeLinear_h.0.attn.c_proj.weight_1" -> "193 DequantizeLinear_h.0.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"193 DequantizeLinear_h.0.attn.c_proj.weight_1" -> "194 Gemm_178" [label="[768, 768]", style=solid]; +"194 Gemm_178" -> "198 Reshape_182" [label="[]", style=solid]; +"195 Unsqueeze_179" -> "197 Concat_181" [label="[1]", style=dashed]; +"196 Unsqueeze_180" -> "197 Concat_181" [label="[1]", style=dashed]; +"197 Concat_181" -> "198 Reshape_182" [label="[3]", style=dashed]; +"198 Reshape_182" -> "199 Add_183" [label="[]", style=solid]; +"199 Add_183" -> "200 ReduceMean_184" [label="[]", style=solid]; +"199 Add_183" -> "201 Sub_185" [label="[]", style=solid]; +"199 Add_183" -> "266 Add_242" [label="[]", style=solid]; +"200 ReduceMean_184" -> "201 Sub_185" [label="[]", style=solid]; +"201 Sub_185" -> "203 Pow_187" [label="[]", style=solid]; +"201 Sub_185" -> "208 Div_192" [label="[]", style=solid]; +"202 Constant_186" -> "203 Pow_187" [label="[]", style=solid]; +"203 Pow_187" -> "204 ReduceMean_188" [label="[]", style=solid]; +"204 ReduceMean_188" -> "206 Add_190" [label="[]", style=solid]; +"205 Constant_189" -> "206 Add_190" [label="[]", style=solid]; +"206 Add_190" -> "207 Sqrt_191" [label="[]", style=solid]; +"207 Sqrt_191" -> "208 Div_192" [label="[]", style=solid]; +"208 Div_192" -> "209 Mul_193" [label="[]", style=solid]; +"209 Mul_193" -> "210 Add_194" [label="[]", style=solid]; +"210 Add_194" -> "211 QuantizeLinear_385_1" [label="[]", style=solid]; +"211 QuantizeLinear_385_1" -> "212 DequantizeLinear_385_1" [label="[]", style=dashed]; +"212 DequantizeLinear_385_1" -> "213 Shape_195" [label="[]", style=solid]; +"212 DequantizeLinear_385_1" -> "216 Shape_198" [label="[]", style=solid]; +"212 DequantizeLinear_385_1" -> "219 Shape_201" [label="[]", style=solid]; +"212 DequantizeLinear_385_1" -> "224 Reshape_206" [label="[]", style=solid]; +"213 Shape_195" -> "215 Gather_197" [label="[-1]", style=dashed]; +"214 Constant_196" -> "215 Gather_197" [label="[]", style=dashed]; +"215 Gather_197" -> "228 Unsqueeze_208" [label="[]", style=dashed]; +"216 Shape_198" -> "218 Gather_200" [label="[-1]", style=dashed]; +"217 Constant_199" -> "218 Gather_200" [label="[]", style=dashed]; +"218 Gather_200" -> "229 Unsqueeze_209" [label="[]", style=dashed]; +"219 Shape_201" -> "221 Gather_203" [label="[-1]", style=dashed]; +"220 Constant_202" -> "221 Gather_203" [label="[]", style=dashed]; +"221 Gather_203" -> "222 Unsqueeze_204" [label="[]", style=dashed]; +"222 Unsqueeze_204" -> "223 Concat_205" [label="[1]", style=dashed]; +"223 Concat_205" -> "224 Reshape_206" [label="[2]", style=dashed]; +"224 Reshape_206" -> "227 Gemm_207" [label="[]", style=solid]; +"225 QuantizeLinear_h.0.mlp.c_fc.weight_1" -> "226 DequantizeLinear_h.0.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"226 DequantizeLinear_h.0.mlp.c_fc.weight_1" -> "227 Gemm_207" [label="[768, 3072]", style=solid]; +"227 Gemm_207" -> "231 Reshape_211" [label="[]", style=solid]; +"228 Unsqueeze_208" -> "230 Concat_210" [label="[1]", style=dashed]; +"229 Unsqueeze_209" -> "230 Concat_210" [label="[1]", style=dashed]; +"230 Concat_210" -> "231 Reshape_211" [label="[3]", style=dashed]; +"231 Reshape_211" -> "233 Mul_213" [label="[]", style=solid]; +"231 Reshape_211" -> "235 Pow_215" [label="[]", style=solid]; +"231 Reshape_211" -> "238 Add_218" [label="[]", style=solid]; +"232 Constant_212" -> "233 Mul_213" [label="[]", style=solid]; +"233 Mul_213" -> "244 Mul_224" [label="[]", style=solid]; +"234 Constant_214" -> "235 Pow_215" [label="[]", style=solid]; +"235 Pow_215" -> "237 Mul_217" [label="[]", style=solid]; +"236 Constant_216" -> "237 Mul_217" [label="[]", style=solid]; +"237 Mul_217" -> "238 Add_218" [label="[]", style=solid]; +"238 Add_218" -> "240 Mul_220" [label="[]", style=solid]; +"239 Constant_219" -> "240 Mul_220" [label="[]", style=solid]; +"240 Mul_220" -> "241 Tanh_221" [label="[]", style=solid]; +"241 Tanh_221" -> "243 Add_223" [label="[]", style=solid]; +"242 Constant_222" -> "243 Add_223" [label="[]", style=solid]; +"243 Add_223" -> "244 Mul_224" [label="[]", style=solid]; +"244 Mul_224" -> "245 QuantizeLinear_419_1" [label="[]", style=solid]; +"245 QuantizeLinear_419_1" -> "246 DequantizeLinear_419_1" [label="[]", style=dashed]; +"246 DequantizeLinear_419_1" -> "247 Shape_225" [label="[]", style=solid]; +"246 DequantizeLinear_419_1" -> "250 Shape_228" [label="[]", style=solid]; +"246 DequantizeLinear_419_1" -> "253 Shape_231" [label="[]", style=solid]; +"246 DequantizeLinear_419_1" -> "258 Reshape_236" [label="[]", style=solid]; +"247 Shape_225" -> "249 Gather_227" [label="[-1]", style=dashed]; +"248 Constant_226" -> "249 Gather_227" [label="[]", style=dashed]; +"249 Gather_227" -> "262 Unsqueeze_238" [label="[]", style=dashed]; +"250 Shape_228" -> "252 Gather_230" [label="[-1]", style=dashed]; +"251 Constant_229" -> "252 Gather_230" [label="[]", style=dashed]; +"252 Gather_230" -> "263 Unsqueeze_239" [label="[]", style=dashed]; +"253 Shape_231" -> "255 Gather_233" [label="[-1]", style=dashed]; +"254 Constant_232" -> "255 Gather_233" [label="[]", style=dashed]; +"255 Gather_233" -> "256 Unsqueeze_234" [label="[]", style=dashed]; +"256 Unsqueeze_234" -> "257 Concat_235" [label="[1]", style=dashed]; +"257 Concat_235" -> "258 Reshape_236" [label="[2]", style=dashed]; +"258 Reshape_236" -> "261 Gemm_237" [label="[]", style=solid]; +"259 QuantizeLinear_h.0.mlp.c_proj.weight_1" -> "260 DequantizeLinear_h.0.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"260 DequantizeLinear_h.0.mlp.c_proj.weight_1" -> "261 Gemm_237" [label="[3072, 768]", style=solid]; +"261 Gemm_237" -> "265 Reshape_241" [label="[]", style=solid]; +"262 Unsqueeze_238" -> "264 Concat_240" [label="[1]", style=dashed]; +"263 Unsqueeze_239" -> "264 Concat_240" [label="[1]", style=dashed]; +"264 Concat_240" -> "265 Reshape_241" [label="[3]", style=dashed]; +"265 Reshape_241" -> "266 Add_242" [label="[]", style=solid]; +"266 Add_242" -> "267 ReduceMean_243" [label="[]", style=solid]; +"266 Add_242" -> "268 Sub_244" [label="[]", style=solid]; +"266 Add_242" -> "430 Add_394" [label="[]", style=solid]; +"267 ReduceMean_243" -> "268 Sub_244" [label="[]", style=solid]; +"268 Sub_244" -> "270 Pow_246" [label="[]", style=solid]; +"268 Sub_244" -> "275 Div_251" [label="[]", style=solid]; +"269 Constant_245" -> "270 Pow_246" [label="[]", style=solid]; +"270 Pow_246" -> "271 ReduceMean_247" [label="[]", style=solid]; +"271 ReduceMean_247" -> "273 Add_249" [label="[]", style=solid]; +"272 Constant_248" -> "273 Add_249" [label="[]", style=solid]; +"273 Add_249" -> "274 Sqrt_250" [label="[]", style=solid]; +"274 Sqrt_250" -> "275 Div_251" [label="[]", style=solid]; +"275 Div_251" -> "276 Mul_252" [label="[]", style=solid]; +"276 Mul_252" -> "277 Add_253" [label="[]", style=solid]; +"277 Add_253" -> "278 QuantizeLinear_452_1" [label="[]", style=solid]; +"278 QuantizeLinear_452_1" -> "279 DequantizeLinear_452_1" [label="[]", style=dashed]; +"279 DequantizeLinear_452_1" -> "280 Shape_254" [label="[]", style=solid]; +"279 DequantizeLinear_452_1" -> "283 Shape_257" [label="[]", style=solid]; +"279 DequantizeLinear_452_1" -> "286 Shape_260" [label="[]", style=solid]; +"279 DequantizeLinear_452_1" -> "291 Reshape_265" [label="[]", style=solid]; +"280 Shape_254" -> "282 Gather_256" [label="[-1]", style=dashed]; +"281 Constant_255" -> "282 Gather_256" [label="[]", style=dashed]; +"282 Gather_256" -> "295 Unsqueeze_267" [label="[]", style=dashed]; +"283 Shape_257" -> "285 Gather_259" [label="[-1]", style=dashed]; +"284 Constant_258" -> "285 Gather_259" [label="[]", style=dashed]; +"285 Gather_259" -> "296 Unsqueeze_268" [label="[]", style=dashed]; +"286 Shape_260" -> "288 Gather_262" [label="[-1]", style=dashed]; +"287 Constant_261" -> "288 Gather_262" [label="[]", style=dashed]; +"288 Gather_262" -> "289 Unsqueeze_263" [label="[]", style=dashed]; +"289 Unsqueeze_263" -> "290 Concat_264" [label="[1]", style=dashed]; +"290 Concat_264" -> "291 Reshape_265" [label="[2]", style=dashed]; +"291 Reshape_265" -> "294 Gemm_266" [label="[]", style=solid]; +"292 QuantizeLinear_h.1.attn.c_attn.weight_1" -> "293 DequantizeLinear_h.1.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"293 DequantizeLinear_h.1.attn.c_attn.weight_1" -> "294 Gemm_266" [label="[768, 2304]", style=solid]; +"294 Gemm_266" -> "298 Reshape_270" [label="[]", style=solid]; +"295 Unsqueeze_267" -> "297 Concat_269" [label="[1]", style=dashed]; +"296 Unsqueeze_268" -> "297 Concat_269" [label="[1]", style=dashed]; +"297 Concat_269" -> "298 Reshape_270" [label="[3]", style=dashed]; +"298 Reshape_270" -> "299 Split_271" [label="[]", style=solid]; +"299 Split_271" -> "300 QuantizeLinear_query.3_1" [label="[]", style=solid]; +"299 Split_271" -> "302 Shape_272" [label="[]", style=solid]; +"299 Split_271" -> "305 Shape_275" [label="[]", style=solid]; +"299 Split_271" -> "308 Shape_278" [label="[]", style=solid]; +"299 Split_271" -> "321 Shape_291" [label="[]", style=solid]; +"299 Split_271" -> "324 Shape_294" [label="[]", style=solid]; +"299 Split_271" -> "327 Shape_297" [label="[]", style=solid]; +"299 Split_271" -> "338 Reshape_308" [label="[]", style=solid]; +"299 Split_271" -> "342 Shape_310" [label="[]", style=solid]; +"299 Split_271" -> "345 Shape_313" [label="[]", style=solid]; +"299 Split_271" -> "348 Shape_316" [label="[]", style=solid]; +"299 Split_271" -> "359 Reshape_327" [label="[]", style=solid]; +"300 QuantizeLinear_query.3_1" -> "301 DequantizeLinear_query.3_1" [label="[]", style=dashed]; +"301 DequantizeLinear_query.3_1" -> "319 Reshape_289" [label="[]", style=solid]; +"302 Shape_272" -> "304 Gather_274" [label="[-1]", style=dashed]; +"303 Constant_273" -> "304 Gather_274" [label="[]", style=dashed]; +"304 Gather_274" -> "315 Unsqueeze_285" [label="[]", style=dashed]; +"305 Shape_275" -> "307 Gather_277" [label="[-1]", style=dashed]; +"306 Constant_276" -> "307 Gather_277" [label="[]", style=dashed]; +"307 Gather_277" -> "316 Unsqueeze_286" [label="[]", style=dashed]; +"308 Shape_278" -> "310 Gather_280" [label="[-1]", style=dashed]; +"309 Constant_279" -> "310 Gather_280" [label="[]", style=dashed]; +"310 Gather_280" -> "312 Div_282" [label="[]", style=dashed]; +"311 Constant_281" -> "312 Div_282" [label="[]", style=dashed]; +"312 Div_282" -> "313 Cast_283" [label="[]", style=dashed]; +"313 Cast_283" -> "314 Cast_284" [label="[]", style=dashed]; +"314 Cast_284" -> "317 Unsqueeze_287" [label="[]", style=dashed]; +"315 Unsqueeze_285" -> "318 Concat_288" [label="[1]", style=dashed]; +"316 Unsqueeze_286" -> "318 Concat_288" [label="[1]", style=dashed]; +"317 Unsqueeze_287" -> "318 Concat_288" [label="[1]", style=dashed]; +"318 Concat_288" -> "319 Reshape_289" [label="[4]", style=dashed]; +"319 Reshape_289" -> "320 Transpose_290" [label="[]", style=solid]; +"320 Transpose_290" -> "365 MatMul_333" [label="[]", style=solid]; +"321 Shape_291" -> "323 Gather_293" [label="[-1]", style=dashed]; +"322 Constant_292" -> "323 Gather_293" [label="[]", style=dashed]; +"323 Gather_293" -> "334 Unsqueeze_304" [label="[]", style=dashed]; +"324 Shape_294" -> "326 Gather_296" [label="[-1]", style=dashed]; +"325 Constant_295" -> "326 Gather_296" [label="[]", style=dashed]; +"326 Gather_296" -> "335 Unsqueeze_305" [label="[]", style=dashed]; +"327 Shape_297" -> "329 Gather_299" [label="[-1]", style=dashed]; +"328 Constant_298" -> "329 Gather_299" [label="[]", style=dashed]; +"329 Gather_299" -> "331 Div_301" [label="[]", style=dashed]; +"330 Constant_300" -> "331 Div_301" [label="[]", style=dashed]; +"331 Div_301" -> "332 Cast_302" [label="[]", style=dashed]; +"332 Cast_302" -> "333 Cast_303" [label="[]", style=dashed]; +"333 Cast_303" -> "336 Unsqueeze_306" [label="[]", style=dashed]; +"334 Unsqueeze_304" -> "337 Concat_307" [label="[1]", style=dashed]; +"335 Unsqueeze_305" -> "337 Concat_307" [label="[1]", style=dashed]; +"336 Unsqueeze_306" -> "337 Concat_307" [label="[1]", style=dashed]; +"337 Concat_307" -> "338 Reshape_308" [label="[4]", style=dashed]; +"338 Reshape_308" -> "339 QuantizeLinear_517_1" [label="[]", style=solid]; +"338 Reshape_308" -> "361 Transpose_329" [label="[]", style=solid]; +"339 QuantizeLinear_517_1" -> "340 DequantizeLinear_517_1" [label="[]", style=dashed]; +"340 DequantizeLinear_517_1" -> "341 Transpose_309" [label="[]", style=solid]; +"341 Transpose_309" -> "365 MatMul_333" [label="[]", style=solid]; +"342 Shape_310" -> "344 Gather_312" [label="[-1]", style=dashed]; +"343 Constant_311" -> "344 Gather_312" [label="[]", style=dashed]; +"344 Gather_312" -> "355 Unsqueeze_323" [label="[]", style=dashed]; +"345 Shape_313" -> "347 Gather_315" [label="[-1]", style=dashed]; +"346 Constant_314" -> "347 Gather_315" [label="[]", style=dashed]; +"347 Gather_315" -> "356 Unsqueeze_324" [label="[]", style=dashed]; +"348 Shape_316" -> "350 Gather_318" [label="[-1]", style=dashed]; +"349 Constant_317" -> "350 Gather_318" [label="[]", style=dashed]; +"350 Gather_318" -> "352 Div_320" [label="[]", style=dashed]; +"351 Constant_319" -> "352 Div_320" [label="[]", style=dashed]; +"352 Div_320" -> "353 Cast_321" [label="[]", style=dashed]; +"353 Cast_321" -> "354 Cast_322" [label="[]", style=dashed]; +"354 Cast_322" -> "357 Unsqueeze_325" [label="[]", style=dashed]; +"355 Unsqueeze_323" -> "358 Concat_326" [label="[1]", style=dashed]; +"356 Unsqueeze_324" -> "358 Concat_326" [label="[1]", style=dashed]; +"357 Unsqueeze_325" -> "358 Concat_326" [label="[1]", style=dashed]; +"358 Concat_326" -> "359 Reshape_327" [label="[4]", style=dashed]; +"359 Reshape_327" -> "360 Transpose_328" [label="[]", style=solid]; +"360 Transpose_328" -> "363 Unsqueeze_331" [label="[]", style=solid]; +"360 Transpose_328" -> "389 MatMul_357" [label="[]", style=solid]; +"361 Transpose_329" -> "362 Unsqueeze_330" [label="[]", style=solid]; +"362 Unsqueeze_330" -> "364 Concat_332" [label="[]", style=solid]; +"363 Unsqueeze_331" -> "364 Concat_332" [label="[]", style=solid]; +"364 Concat_332" -> "2828 nncf_model_output_2" [label="[2, 1, 12, 8, 64]", style=solid]; +"365 MatMul_333" -> "367 Div_335" [label="[]", style=solid]; +"366 Constant_334" -> "367 Div_335" [label="[]", style=solid]; +"367 Div_335" -> "368 Shape_336" [label="[]", style=solid]; +"367 Div_335" -> "371 Shape_339" [label="[]", style=solid]; +"367 Div_335" -> "382 Mul_350" [label="[]", style=solid]; +"368 Shape_336" -> "370 Gather_338" [label="[-1]", style=dashed]; +"369 Constant_337" -> "370 Gather_338" [label="[]", style=dashed]; +"370 Gather_338" -> "374 Sub_342" [label="[]", style=dashed]; +"371 Shape_339" -> "373 Gather_341" [label="[-1]", style=dashed]; +"372 Constant_340" -> "373 Gather_341" [label="[]", style=dashed]; +"373 Gather_341" -> "374 Sub_342" [label="[]", style=dashed]; +"373 Gather_341" -> "376 Unsqueeze_344" [label="[]", style=dashed]; +"373 Gather_341" -> "379 Unsqueeze_347" [label="[]", style=dashed]; +"374 Sub_342" -> "375 Unsqueeze_343" [label="[]", style=dashed]; +"375 Unsqueeze_343" -> "378 Slice_346" [label="[1]", style=dashed]; +"376 Unsqueeze_344" -> "378 Slice_346" [label="[1]", style=dashed]; +"377 Constant_345" -> "378 Slice_346" [label="[1]", style=dashed]; +"378 Slice_346" -> "381 Slice_349" [label="[]", style=solid]; +"379 Unsqueeze_347" -> "381 Slice_349" [label="[1]", style=dashed]; +"380 Constant_348" -> "381 Slice_349" [label="[1]", style=dashed]; +"381 Slice_349" -> "382 Mul_350" [label="[]", style=solid]; +"381 Slice_349" -> "384 Sub_352" [label="[]", style=solid]; +"382 Mul_350" -> "387 Sub_355" [label="[]", style=solid]; +"383 Constant_351" -> "384 Sub_352" [label="[]", style=solid]; +"384 Sub_352" -> "386 Mul_354" [label="[]", style=solid]; +"385 Constant_353" -> "386 Mul_354" [label="[]", style=solid]; +"386 Mul_354" -> "387 Sub_355" [label="[]", style=solid]; +"387 Sub_355" -> "388 Softmax_356" [label="[]", style=solid]; +"388 Softmax_356" -> "389 MatMul_357" [label="[]", style=solid]; +"389 MatMul_357" -> "390 QuantizeLinear_574_1" [label="[]", style=solid]; +"390 QuantizeLinear_574_1" -> "391 DequantizeLinear_574_1" [label="[]", style=dashed]; +"391 DequantizeLinear_574_1" -> "392 Transpose_358" [label="[]", style=solid]; +"392 Transpose_358" -> "393 Shape_359" [label="[]", style=solid]; +"392 Transpose_358" -> "396 Shape_362" [label="[]", style=solid]; +"392 Transpose_358" -> "399 Shape_365" [label="[]", style=solid]; +"392 Transpose_358" -> "402 Shape_368" [label="[]", style=solid]; +"392 Transpose_358" -> "410 Reshape_376" [label="[]", style=solid]; +"393 Shape_359" -> "395 Gather_361" [label="[-1]", style=dashed]; +"394 Constant_360" -> "395 Gather_361" [label="[]", style=dashed]; +"395 Gather_361" -> "406 Unsqueeze_372" [label="[]", style=dashed]; +"396 Shape_362" -> "398 Gather_364" [label="[-1]", style=dashed]; +"397 Constant_363" -> "398 Gather_364" [label="[]", style=dashed]; +"398 Gather_364" -> "407 Unsqueeze_373" [label="[]", style=dashed]; +"399 Shape_365" -> "401 Gather_367" [label="[-1]", style=dashed]; +"400 Constant_366" -> "401 Gather_367" [label="[]", style=dashed]; +"401 Gather_367" -> "405 Mul_371" [label="[]", style=dashed]; +"402 Shape_368" -> "404 Gather_370" [label="[-1]", style=dashed]; +"403 Constant_369" -> "404 Gather_370" [label="[]", style=dashed]; +"404 Gather_370" -> "405 Mul_371" [label="[]", style=dashed]; +"405 Mul_371" -> "408 Unsqueeze_374" [label="[]", style=dashed]; +"406 Unsqueeze_372" -> "409 Concat_375" [label="[1]", style=dashed]; +"407 Unsqueeze_373" -> "409 Concat_375" [label="[1]", style=dashed]; +"408 Unsqueeze_374" -> "409 Concat_375" [label="[1]", style=dashed]; +"409 Concat_375" -> "410 Reshape_376" [label="[3]", style=dashed]; +"410 Reshape_376" -> "411 Shape_377" [label="[]", style=solid]; +"410 Reshape_376" -> "414 Shape_380" [label="[]", style=solid]; +"410 Reshape_376" -> "417 Shape_383" [label="[]", style=solid]; +"410 Reshape_376" -> "422 Reshape_388" [label="[]", style=solid]; +"411 Shape_377" -> "413 Gather_379" [label="[-1]", style=dashed]; +"412 Constant_378" -> "413 Gather_379" [label="[]", style=dashed]; +"413 Gather_379" -> "426 Unsqueeze_390" [label="[]", style=dashed]; +"414 Shape_380" -> "416 Gather_382" [label="[-1]", style=dashed]; +"415 Constant_381" -> "416 Gather_382" [label="[]", style=dashed]; +"416 Gather_382" -> "427 Unsqueeze_391" [label="[]", style=dashed]; +"417 Shape_383" -> "419 Gather_385" [label="[-1]", style=dashed]; +"418 Constant_384" -> "419 Gather_385" [label="[]", style=dashed]; +"419 Gather_385" -> "420 Unsqueeze_386" [label="[]", style=dashed]; +"420 Unsqueeze_386" -> "421 Concat_387" [label="[1]", style=dashed]; +"421 Concat_387" -> "422 Reshape_388" [label="[2]", style=dashed]; +"422 Reshape_388" -> "425 Gemm_389" [label="[]", style=solid]; +"423 QuantizeLinear_h.1.attn.c_proj.weight_1" -> "424 DequantizeLinear_h.1.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"424 DequantizeLinear_h.1.attn.c_proj.weight_1" -> "425 Gemm_389" [label="[768, 768]", style=solid]; +"425 Gemm_389" -> "429 Reshape_393" [label="[]", style=solid]; +"426 Unsqueeze_390" -> "428 Concat_392" [label="[1]", style=dashed]; +"427 Unsqueeze_391" -> "428 Concat_392" [label="[1]", style=dashed]; +"428 Concat_392" -> "429 Reshape_393" [label="[3]", style=dashed]; +"429 Reshape_393" -> "430 Add_394" [label="[]", style=solid]; +"430 Add_394" -> "431 ReduceMean_395" [label="[]", style=solid]; +"430 Add_394" -> "432 Sub_396" [label="[]", style=solid]; +"430 Add_394" -> "497 Add_453" [label="[]", style=solid]; +"431 ReduceMean_395" -> "432 Sub_396" [label="[]", style=solid]; +"432 Sub_396" -> "434 Pow_398" [label="[]", style=solid]; +"432 Sub_396" -> "439 Div_403" [label="[]", style=solid]; +"433 Constant_397" -> "434 Pow_398" [label="[]", style=solid]; +"434 Pow_398" -> "435 ReduceMean_399" [label="[]", style=solid]; +"435 ReduceMean_399" -> "437 Add_401" [label="[]", style=solid]; +"436 Constant_400" -> "437 Add_401" [label="[]", style=solid]; +"437 Add_401" -> "438 Sqrt_402" [label="[]", style=solid]; +"438 Sqrt_402" -> "439 Div_403" [label="[]", style=solid]; +"439 Div_403" -> "440 Mul_404" [label="[]", style=solid]; +"440 Mul_404" -> "441 Add_405" [label="[]", style=solid]; +"441 Add_405" -> "442 QuantizeLinear_626_1" [label="[]", style=solid]; +"442 QuantizeLinear_626_1" -> "443 DequantizeLinear_626_1" [label="[]", style=dashed]; +"443 DequantizeLinear_626_1" -> "444 Shape_406" [label="[]", style=solid]; +"443 DequantizeLinear_626_1" -> "447 Shape_409" [label="[]", style=solid]; +"443 DequantizeLinear_626_1" -> "450 Shape_412" [label="[]", style=solid]; +"443 DequantizeLinear_626_1" -> "455 Reshape_417" [label="[]", style=solid]; +"444 Shape_406" -> "446 Gather_408" [label="[-1]", style=dashed]; +"445 Constant_407" -> "446 Gather_408" [label="[]", style=dashed]; +"446 Gather_408" -> "459 Unsqueeze_419" [label="[]", style=dashed]; +"447 Shape_409" -> "449 Gather_411" [label="[-1]", style=dashed]; +"448 Constant_410" -> "449 Gather_411" [label="[]", style=dashed]; +"449 Gather_411" -> "460 Unsqueeze_420" [label="[]", style=dashed]; +"450 Shape_412" -> "452 Gather_414" [label="[-1]", style=dashed]; +"451 Constant_413" -> "452 Gather_414" [label="[]", style=dashed]; +"452 Gather_414" -> "453 Unsqueeze_415" [label="[]", style=dashed]; +"453 Unsqueeze_415" -> "454 Concat_416" [label="[1]", style=dashed]; +"454 Concat_416" -> "455 Reshape_417" [label="[2]", style=dashed]; +"455 Reshape_417" -> "458 Gemm_418" [label="[]", style=solid]; +"456 QuantizeLinear_h.1.mlp.c_fc.weight_1" -> "457 DequantizeLinear_h.1.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"457 DequantizeLinear_h.1.mlp.c_fc.weight_1" -> "458 Gemm_418" [label="[768, 3072]", style=solid]; +"458 Gemm_418" -> "462 Reshape_422" [label="[]", style=solid]; +"459 Unsqueeze_419" -> "461 Concat_421" [label="[1]", style=dashed]; +"460 Unsqueeze_420" -> "461 Concat_421" [label="[1]", style=dashed]; +"461 Concat_421" -> "462 Reshape_422" [label="[3]", style=dashed]; +"462 Reshape_422" -> "464 Mul_424" [label="[]", style=solid]; +"462 Reshape_422" -> "466 Pow_426" [label="[]", style=solid]; +"462 Reshape_422" -> "469 Add_429" [label="[]", style=solid]; +"463 Constant_423" -> "464 Mul_424" [label="[]", style=solid]; +"464 Mul_424" -> "475 Mul_435" [label="[]", style=solid]; +"465 Constant_425" -> "466 Pow_426" [label="[]", style=solid]; +"466 Pow_426" -> "468 Mul_428" [label="[]", style=solid]; +"467 Constant_427" -> "468 Mul_428" [label="[]", style=solid]; +"468 Mul_428" -> "469 Add_429" [label="[]", style=solid]; +"469 Add_429" -> "471 Mul_431" [label="[]", style=solid]; +"470 Constant_430" -> "471 Mul_431" [label="[]", style=solid]; +"471 Mul_431" -> "472 Tanh_432" [label="[]", style=solid]; +"472 Tanh_432" -> "474 Add_434" [label="[]", style=solid]; +"473 Constant_433" -> "474 Add_434" [label="[]", style=solid]; +"474 Add_434" -> "475 Mul_435" [label="[]", style=solid]; +"475 Mul_435" -> "476 QuantizeLinear_660_1" [label="[]", style=solid]; +"476 QuantizeLinear_660_1" -> "477 DequantizeLinear_660_1" [label="[]", style=dashed]; +"477 DequantizeLinear_660_1" -> "478 Shape_436" [label="[]", style=solid]; +"477 DequantizeLinear_660_1" -> "481 Shape_439" [label="[]", style=solid]; +"477 DequantizeLinear_660_1" -> "484 Shape_442" [label="[]", style=solid]; +"477 DequantizeLinear_660_1" -> "489 Reshape_447" [label="[]", style=solid]; +"478 Shape_436" -> "480 Gather_438" [label="[-1]", style=dashed]; +"479 Constant_437" -> "480 Gather_438" [label="[]", style=dashed]; +"480 Gather_438" -> "493 Unsqueeze_449" [label="[]", style=dashed]; +"481 Shape_439" -> "483 Gather_441" [label="[-1]", style=dashed]; +"482 Constant_440" -> "483 Gather_441" [label="[]", style=dashed]; +"483 Gather_441" -> "494 Unsqueeze_450" [label="[]", style=dashed]; +"484 Shape_442" -> "486 Gather_444" [label="[-1]", style=dashed]; +"485 Constant_443" -> "486 Gather_444" [label="[]", style=dashed]; +"486 Gather_444" -> "487 Unsqueeze_445" [label="[]", style=dashed]; +"487 Unsqueeze_445" -> "488 Concat_446" [label="[1]", style=dashed]; +"488 Concat_446" -> "489 Reshape_447" [label="[2]", style=dashed]; +"489 Reshape_447" -> "492 Gemm_448" [label="[]", style=solid]; +"490 QuantizeLinear_h.1.mlp.c_proj.weight_1" -> "491 DequantizeLinear_h.1.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"491 DequantizeLinear_h.1.mlp.c_proj.weight_1" -> "492 Gemm_448" [label="[3072, 768]", style=solid]; +"492 Gemm_448" -> "496 Reshape_452" [label="[]", style=solid]; +"493 Unsqueeze_449" -> "495 Concat_451" [label="[1]", style=dashed]; +"494 Unsqueeze_450" -> "495 Concat_451" [label="[1]", style=dashed]; +"495 Concat_451" -> "496 Reshape_452" [label="[3]", style=dashed]; +"496 Reshape_452" -> "497 Add_453" [label="[]", style=solid]; +"497 Add_453" -> "498 ReduceMean_454" [label="[]", style=solid]; +"497 Add_453" -> "499 Sub_455" [label="[]", style=solid]; +"497 Add_453" -> "661 Add_605" [label="[]", style=solid]; +"498 ReduceMean_454" -> "499 Sub_455" [label="[]", style=solid]; +"499 Sub_455" -> "501 Pow_457" [label="[]", style=solid]; +"499 Sub_455" -> "506 Div_462" [label="[]", style=solid]; +"500 Constant_456" -> "501 Pow_457" [label="[]", style=solid]; +"501 Pow_457" -> "502 ReduceMean_458" [label="[]", style=solid]; +"502 ReduceMean_458" -> "504 Add_460" [label="[]", style=solid]; +"503 Constant_459" -> "504 Add_460" [label="[]", style=solid]; +"504 Add_460" -> "505 Sqrt_461" [label="[]", style=solid]; +"505 Sqrt_461" -> "506 Div_462" [label="[]", style=solid]; +"506 Div_462" -> "507 Mul_463" [label="[]", style=solid]; +"507 Mul_463" -> "508 Add_464" [label="[]", style=solid]; +"508 Add_464" -> "509 QuantizeLinear_693_1" [label="[]", style=solid]; +"509 QuantizeLinear_693_1" -> "510 DequantizeLinear_693_1" [label="[]", style=dashed]; +"510 DequantizeLinear_693_1" -> "511 Shape_465" [label="[]", style=solid]; +"510 DequantizeLinear_693_1" -> "514 Shape_468" [label="[]", style=solid]; +"510 DequantizeLinear_693_1" -> "517 Shape_471" [label="[]", style=solid]; +"510 DequantizeLinear_693_1" -> "522 Reshape_476" [label="[]", style=solid]; +"511 Shape_465" -> "513 Gather_467" [label="[-1]", style=dashed]; +"512 Constant_466" -> "513 Gather_467" [label="[]", style=dashed]; +"513 Gather_467" -> "526 Unsqueeze_478" [label="[]", style=dashed]; +"514 Shape_468" -> "516 Gather_470" [label="[-1]", style=dashed]; +"515 Constant_469" -> "516 Gather_470" [label="[]", style=dashed]; +"516 Gather_470" -> "527 Unsqueeze_479" [label="[]", style=dashed]; +"517 Shape_471" -> "519 Gather_473" [label="[-1]", style=dashed]; +"518 Constant_472" -> "519 Gather_473" [label="[]", style=dashed]; +"519 Gather_473" -> "520 Unsqueeze_474" [label="[]", style=dashed]; +"520 Unsqueeze_474" -> "521 Concat_475" [label="[1]", style=dashed]; +"521 Concat_475" -> "522 Reshape_476" [label="[2]", style=dashed]; +"522 Reshape_476" -> "525 Gemm_477" [label="[]", style=solid]; +"523 QuantizeLinear_h.2.attn.c_attn.weight_1" -> "524 DequantizeLinear_h.2.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"524 DequantizeLinear_h.2.attn.c_attn.weight_1" -> "525 Gemm_477" [label="[768, 2304]", style=solid]; +"525 Gemm_477" -> "529 Reshape_481" [label="[]", style=solid]; +"526 Unsqueeze_478" -> "528 Concat_480" [label="[1]", style=dashed]; +"527 Unsqueeze_479" -> "528 Concat_480" [label="[1]", style=dashed]; +"528 Concat_480" -> "529 Reshape_481" [label="[3]", style=dashed]; +"529 Reshape_481" -> "530 Split_482" [label="[]", style=solid]; +"530 Split_482" -> "531 QuantizeLinear_query.5_1" [label="[]", style=solid]; +"530 Split_482" -> "533 Shape_483" [label="[]", style=solid]; +"530 Split_482" -> "536 Shape_486" [label="[]", style=solid]; +"530 Split_482" -> "539 Shape_489" [label="[]", style=solid]; +"530 Split_482" -> "552 Shape_502" [label="[]", style=solid]; +"530 Split_482" -> "555 Shape_505" [label="[]", style=solid]; +"530 Split_482" -> "558 Shape_508" [label="[]", style=solid]; +"530 Split_482" -> "569 Reshape_519" [label="[]", style=solid]; +"530 Split_482" -> "573 Shape_521" [label="[]", style=solid]; +"530 Split_482" -> "576 Shape_524" [label="[]", style=solid]; +"530 Split_482" -> "579 Shape_527" [label="[]", style=solid]; +"530 Split_482" -> "590 Reshape_538" [label="[]", style=solid]; +"531 QuantizeLinear_query.5_1" -> "532 DequantizeLinear_query.5_1" [label="[]", style=dashed]; +"532 DequantizeLinear_query.5_1" -> "550 Reshape_500" [label="[]", style=solid]; +"533 Shape_483" -> "535 Gather_485" [label="[-1]", style=dashed]; +"534 Constant_484" -> "535 Gather_485" [label="[]", style=dashed]; +"535 Gather_485" -> "546 Unsqueeze_496" [label="[]", style=dashed]; +"536 Shape_486" -> "538 Gather_488" [label="[-1]", style=dashed]; +"537 Constant_487" -> "538 Gather_488" [label="[]", style=dashed]; +"538 Gather_488" -> "547 Unsqueeze_497" [label="[]", style=dashed]; +"539 Shape_489" -> "541 Gather_491" [label="[-1]", style=dashed]; +"540 Constant_490" -> "541 Gather_491" [label="[]", style=dashed]; +"541 Gather_491" -> "543 Div_493" [label="[]", style=dashed]; +"542 Constant_492" -> "543 Div_493" [label="[]", style=dashed]; +"543 Div_493" -> "544 Cast_494" [label="[]", style=dashed]; +"544 Cast_494" -> "545 Cast_495" [label="[]", style=dashed]; +"545 Cast_495" -> "548 Unsqueeze_498" [label="[]", style=dashed]; +"546 Unsqueeze_496" -> "549 Concat_499" [label="[1]", style=dashed]; +"547 Unsqueeze_497" -> "549 Concat_499" [label="[1]", style=dashed]; +"548 Unsqueeze_498" -> "549 Concat_499" [label="[1]", style=dashed]; +"549 Concat_499" -> "550 Reshape_500" [label="[4]", style=dashed]; +"550 Reshape_500" -> "551 Transpose_501" [label="[]", style=solid]; +"551 Transpose_501" -> "596 MatMul_544" [label="[]", style=solid]; +"552 Shape_502" -> "554 Gather_504" [label="[-1]", style=dashed]; +"553 Constant_503" -> "554 Gather_504" [label="[]", style=dashed]; +"554 Gather_504" -> "565 Unsqueeze_515" [label="[]", style=dashed]; +"555 Shape_505" -> "557 Gather_507" [label="[-1]", style=dashed]; +"556 Constant_506" -> "557 Gather_507" [label="[]", style=dashed]; +"557 Gather_507" -> "566 Unsqueeze_516" [label="[]", style=dashed]; +"558 Shape_508" -> "560 Gather_510" [label="[-1]", style=dashed]; +"559 Constant_509" -> "560 Gather_510" [label="[]", style=dashed]; +"560 Gather_510" -> "562 Div_512" [label="[]", style=dashed]; +"561 Constant_511" -> "562 Div_512" [label="[]", style=dashed]; +"562 Div_512" -> "563 Cast_513" [label="[]", style=dashed]; +"563 Cast_513" -> "564 Cast_514" [label="[]", style=dashed]; +"564 Cast_514" -> "567 Unsqueeze_517" [label="[]", style=dashed]; +"565 Unsqueeze_515" -> "568 Concat_518" [label="[1]", style=dashed]; +"566 Unsqueeze_516" -> "568 Concat_518" [label="[1]", style=dashed]; +"567 Unsqueeze_517" -> "568 Concat_518" [label="[1]", style=dashed]; +"568 Concat_518" -> "569 Reshape_519" [label="[4]", style=dashed]; +"569 Reshape_519" -> "570 QuantizeLinear_758_1" [label="[]", style=solid]; +"569 Reshape_519" -> "592 Transpose_540" [label="[]", style=solid]; +"570 QuantizeLinear_758_1" -> "571 DequantizeLinear_758_1" [label="[]", style=dashed]; +"571 DequantizeLinear_758_1" -> "572 Transpose_520" [label="[]", style=solid]; +"572 Transpose_520" -> "596 MatMul_544" [label="[]", style=solid]; +"573 Shape_521" -> "575 Gather_523" [label="[-1]", style=dashed]; +"574 Constant_522" -> "575 Gather_523" [label="[]", style=dashed]; +"575 Gather_523" -> "586 Unsqueeze_534" [label="[]", style=dashed]; +"576 Shape_524" -> "578 Gather_526" [label="[-1]", style=dashed]; +"577 Constant_525" -> "578 Gather_526" [label="[]", style=dashed]; +"578 Gather_526" -> "587 Unsqueeze_535" [label="[]", style=dashed]; +"579 Shape_527" -> "581 Gather_529" [label="[-1]", style=dashed]; +"580 Constant_528" -> "581 Gather_529" [label="[]", style=dashed]; +"581 Gather_529" -> "583 Div_531" [label="[]", style=dashed]; +"582 Constant_530" -> "583 Div_531" [label="[]", style=dashed]; +"583 Div_531" -> "584 Cast_532" [label="[]", style=dashed]; +"584 Cast_532" -> "585 Cast_533" [label="[]", style=dashed]; +"585 Cast_533" -> "588 Unsqueeze_536" [label="[]", style=dashed]; +"586 Unsqueeze_534" -> "589 Concat_537" [label="[1]", style=dashed]; +"587 Unsqueeze_535" -> "589 Concat_537" [label="[1]", style=dashed]; +"588 Unsqueeze_536" -> "589 Concat_537" [label="[1]", style=dashed]; +"589 Concat_537" -> "590 Reshape_538" [label="[4]", style=dashed]; +"590 Reshape_538" -> "591 Transpose_539" [label="[]", style=solid]; +"591 Transpose_539" -> "594 Unsqueeze_542" [label="[]", style=solid]; +"591 Transpose_539" -> "620 MatMul_568" [label="[]", style=solid]; +"592 Transpose_540" -> "593 Unsqueeze_541" [label="[]", style=solid]; +"593 Unsqueeze_541" -> "595 Concat_543" [label="[]", style=solid]; +"594 Unsqueeze_542" -> "595 Concat_543" [label="[]", style=solid]; +"595 Concat_543" -> "2829 nncf_model_output_3" [label="[2, 1, 12, 8, 64]", style=solid]; +"596 MatMul_544" -> "598 Div_546" [label="[]", style=solid]; +"597 Constant_545" -> "598 Div_546" [label="[]", style=solid]; +"598 Div_546" -> "599 Shape_547" [label="[]", style=solid]; +"598 Div_546" -> "602 Shape_550" [label="[]", style=solid]; +"598 Div_546" -> "613 Mul_561" [label="[]", style=solid]; +"599 Shape_547" -> "601 Gather_549" [label="[-1]", style=dashed]; +"600 Constant_548" -> "601 Gather_549" [label="[]", style=dashed]; +"601 Gather_549" -> "605 Sub_553" [label="[]", style=dashed]; +"602 Shape_550" -> "604 Gather_552" [label="[-1]", style=dashed]; +"603 Constant_551" -> "604 Gather_552" [label="[]", style=dashed]; +"604 Gather_552" -> "605 Sub_553" [label="[]", style=dashed]; +"604 Gather_552" -> "607 Unsqueeze_555" [label="[]", style=dashed]; +"604 Gather_552" -> "610 Unsqueeze_558" [label="[]", style=dashed]; +"605 Sub_553" -> "606 Unsqueeze_554" [label="[]", style=dashed]; +"606 Unsqueeze_554" -> "609 Slice_557" [label="[1]", style=dashed]; +"607 Unsqueeze_555" -> "609 Slice_557" [label="[1]", style=dashed]; +"608 Constant_556" -> "609 Slice_557" [label="[1]", style=dashed]; +"609 Slice_557" -> "612 Slice_560" [label="[]", style=solid]; +"610 Unsqueeze_558" -> "612 Slice_560" [label="[1]", style=dashed]; +"611 Constant_559" -> "612 Slice_560" [label="[1]", style=dashed]; +"612 Slice_560" -> "613 Mul_561" [label="[]", style=solid]; +"612 Slice_560" -> "615 Sub_563" [label="[]", style=solid]; +"613 Mul_561" -> "618 Sub_566" [label="[]", style=solid]; +"614 Constant_562" -> "615 Sub_563" [label="[]", style=solid]; +"615 Sub_563" -> "617 Mul_565" [label="[]", style=solid]; +"616 Constant_564" -> "617 Mul_565" [label="[]", style=solid]; +"617 Mul_565" -> "618 Sub_566" [label="[]", style=solid]; +"618 Sub_566" -> "619 Softmax_567" [label="[]", style=solid]; +"619 Softmax_567" -> "620 MatMul_568" [label="[]", style=solid]; +"620 MatMul_568" -> "621 QuantizeLinear_815_1" [label="[]", style=solid]; +"621 QuantizeLinear_815_1" -> "622 DequantizeLinear_815_1" [label="[]", style=dashed]; +"622 DequantizeLinear_815_1" -> "623 Transpose_569" [label="[]", style=solid]; +"623 Transpose_569" -> "624 Shape_570" [label="[]", style=solid]; +"623 Transpose_569" -> "627 Shape_573" [label="[]", style=solid]; +"623 Transpose_569" -> "630 Shape_576" [label="[]", style=solid]; +"623 Transpose_569" -> "633 Shape_579" [label="[]", style=solid]; +"623 Transpose_569" -> "641 Reshape_587" [label="[]", style=solid]; +"624 Shape_570" -> "626 Gather_572" [label="[-1]", style=dashed]; +"625 Constant_571" -> "626 Gather_572" [label="[]", style=dashed]; +"626 Gather_572" -> "637 Unsqueeze_583" [label="[]", style=dashed]; +"627 Shape_573" -> "629 Gather_575" [label="[-1]", style=dashed]; +"628 Constant_574" -> "629 Gather_575" [label="[]", style=dashed]; +"629 Gather_575" -> "638 Unsqueeze_584" [label="[]", style=dashed]; +"630 Shape_576" -> "632 Gather_578" [label="[-1]", style=dashed]; +"631 Constant_577" -> "632 Gather_578" [label="[]", style=dashed]; +"632 Gather_578" -> "636 Mul_582" [label="[]", style=dashed]; +"633 Shape_579" -> "635 Gather_581" [label="[-1]", style=dashed]; +"634 Constant_580" -> "635 Gather_581" [label="[]", style=dashed]; +"635 Gather_581" -> "636 Mul_582" [label="[]", style=dashed]; +"636 Mul_582" -> "639 Unsqueeze_585" [label="[]", style=dashed]; +"637 Unsqueeze_583" -> "640 Concat_586" [label="[1]", style=dashed]; +"638 Unsqueeze_584" -> "640 Concat_586" [label="[1]", style=dashed]; +"639 Unsqueeze_585" -> "640 Concat_586" [label="[1]", style=dashed]; +"640 Concat_586" -> "641 Reshape_587" [label="[3]", style=dashed]; +"641 Reshape_587" -> "642 Shape_588" [label="[]", style=solid]; +"641 Reshape_587" -> "645 Shape_591" [label="[]", style=solid]; +"641 Reshape_587" -> "648 Shape_594" [label="[]", style=solid]; +"641 Reshape_587" -> "653 Reshape_599" [label="[]", style=solid]; +"642 Shape_588" -> "644 Gather_590" [label="[-1]", style=dashed]; +"643 Constant_589" -> "644 Gather_590" [label="[]", style=dashed]; +"644 Gather_590" -> "657 Unsqueeze_601" [label="[]", style=dashed]; +"645 Shape_591" -> "647 Gather_593" [label="[-1]", style=dashed]; +"646 Constant_592" -> "647 Gather_593" [label="[]", style=dashed]; +"647 Gather_593" -> "658 Unsqueeze_602" [label="[]", style=dashed]; +"648 Shape_594" -> "650 Gather_596" [label="[-1]", style=dashed]; +"649 Constant_595" -> "650 Gather_596" [label="[]", style=dashed]; +"650 Gather_596" -> "651 Unsqueeze_597" [label="[]", style=dashed]; +"651 Unsqueeze_597" -> "652 Concat_598" [label="[1]", style=dashed]; +"652 Concat_598" -> "653 Reshape_599" [label="[2]", style=dashed]; +"653 Reshape_599" -> "656 Gemm_600" [label="[]", style=solid]; +"654 QuantizeLinear_h.2.attn.c_proj.weight_1" -> "655 DequantizeLinear_h.2.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"655 DequantizeLinear_h.2.attn.c_proj.weight_1" -> "656 Gemm_600" [label="[768, 768]", style=solid]; +"656 Gemm_600" -> "660 Reshape_604" [label="[]", style=solid]; +"657 Unsqueeze_601" -> "659 Concat_603" [label="[1]", style=dashed]; +"658 Unsqueeze_602" -> "659 Concat_603" [label="[1]", style=dashed]; +"659 Concat_603" -> "660 Reshape_604" [label="[3]", style=dashed]; +"660 Reshape_604" -> "661 Add_605" [label="[]", style=solid]; +"661 Add_605" -> "662 ReduceMean_606" [label="[]", style=solid]; +"661 Add_605" -> "663 Sub_607" [label="[]", style=solid]; +"661 Add_605" -> "728 Add_664" [label="[]", style=solid]; +"662 ReduceMean_606" -> "663 Sub_607" [label="[]", style=solid]; +"663 Sub_607" -> "665 Pow_609" [label="[]", style=solid]; +"663 Sub_607" -> "670 Div_614" [label="[]", style=solid]; +"664 Constant_608" -> "665 Pow_609" [label="[]", style=solid]; +"665 Pow_609" -> "666 ReduceMean_610" [label="[]", style=solid]; +"666 ReduceMean_610" -> "668 Add_612" [label="[]", style=solid]; +"667 Constant_611" -> "668 Add_612" [label="[]", style=solid]; +"668 Add_612" -> "669 Sqrt_613" [label="[]", style=solid]; +"669 Sqrt_613" -> "670 Div_614" [label="[]", style=solid]; +"670 Div_614" -> "671 Mul_615" [label="[]", style=solid]; +"671 Mul_615" -> "672 Add_616" [label="[]", style=solid]; +"672 Add_616" -> "673 QuantizeLinear_867_1" [label="[]", style=solid]; +"673 QuantizeLinear_867_1" -> "674 DequantizeLinear_867_1" [label="[]", style=dashed]; +"674 DequantizeLinear_867_1" -> "675 Shape_617" [label="[]", style=solid]; +"674 DequantizeLinear_867_1" -> "678 Shape_620" [label="[]", style=solid]; +"674 DequantizeLinear_867_1" -> "681 Shape_623" [label="[]", style=solid]; +"674 DequantizeLinear_867_1" -> "686 Reshape_628" [label="[]", style=solid]; +"675 Shape_617" -> "677 Gather_619" [label="[-1]", style=dashed]; +"676 Constant_618" -> "677 Gather_619" [label="[]", style=dashed]; +"677 Gather_619" -> "690 Unsqueeze_630" [label="[]", style=dashed]; +"678 Shape_620" -> "680 Gather_622" [label="[-1]", style=dashed]; +"679 Constant_621" -> "680 Gather_622" [label="[]", style=dashed]; +"680 Gather_622" -> "691 Unsqueeze_631" [label="[]", style=dashed]; +"681 Shape_623" -> "683 Gather_625" [label="[-1]", style=dashed]; +"682 Constant_624" -> "683 Gather_625" [label="[]", style=dashed]; +"683 Gather_625" -> "684 Unsqueeze_626" [label="[]", style=dashed]; +"684 Unsqueeze_626" -> "685 Concat_627" [label="[1]", style=dashed]; +"685 Concat_627" -> "686 Reshape_628" [label="[2]", style=dashed]; +"686 Reshape_628" -> "689 Gemm_629" [label="[]", style=solid]; +"687 QuantizeLinear_h.2.mlp.c_fc.weight_1" -> "688 DequantizeLinear_h.2.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"688 DequantizeLinear_h.2.mlp.c_fc.weight_1" -> "689 Gemm_629" [label="[768, 3072]", style=solid]; +"689 Gemm_629" -> "693 Reshape_633" [label="[]", style=solid]; +"690 Unsqueeze_630" -> "692 Concat_632" [label="[1]", style=dashed]; +"691 Unsqueeze_631" -> "692 Concat_632" [label="[1]", style=dashed]; +"692 Concat_632" -> "693 Reshape_633" [label="[3]", style=dashed]; +"693 Reshape_633" -> "695 Mul_635" [label="[]", style=solid]; +"693 Reshape_633" -> "697 Pow_637" [label="[]", style=solid]; +"693 Reshape_633" -> "700 Add_640" [label="[]", style=solid]; +"694 Constant_634" -> "695 Mul_635" [label="[]", style=solid]; +"695 Mul_635" -> "706 Mul_646" [label="[]", style=solid]; +"696 Constant_636" -> "697 Pow_637" [label="[]", style=solid]; +"697 Pow_637" -> "699 Mul_639" [label="[]", style=solid]; +"698 Constant_638" -> "699 Mul_639" [label="[]", style=solid]; +"699 Mul_639" -> "700 Add_640" [label="[]", style=solid]; +"700 Add_640" -> "702 Mul_642" [label="[]", style=solid]; +"701 Constant_641" -> "702 Mul_642" [label="[]", style=solid]; +"702 Mul_642" -> "703 Tanh_643" [label="[]", style=solid]; +"703 Tanh_643" -> "705 Add_645" [label="[]", style=solid]; +"704 Constant_644" -> "705 Add_645" [label="[]", style=solid]; +"705 Add_645" -> "706 Mul_646" [label="[]", style=solid]; +"706 Mul_646" -> "707 QuantizeLinear_901_1" [label="[]", style=solid]; +"707 QuantizeLinear_901_1" -> "708 DequantizeLinear_901_1" [label="[]", style=dashed]; +"708 DequantizeLinear_901_1" -> "709 Shape_647" [label="[]", style=solid]; +"708 DequantizeLinear_901_1" -> "712 Shape_650" [label="[]", style=solid]; +"708 DequantizeLinear_901_1" -> "715 Shape_653" [label="[]", style=solid]; +"708 DequantizeLinear_901_1" -> "720 Reshape_658" [label="[]", style=solid]; +"709 Shape_647" -> "711 Gather_649" [label="[-1]", style=dashed]; +"710 Constant_648" -> "711 Gather_649" [label="[]", style=dashed]; +"711 Gather_649" -> "724 Unsqueeze_660" [label="[]", style=dashed]; +"712 Shape_650" -> "714 Gather_652" [label="[-1]", style=dashed]; +"713 Constant_651" -> "714 Gather_652" [label="[]", style=dashed]; +"714 Gather_652" -> "725 Unsqueeze_661" [label="[]", style=dashed]; +"715 Shape_653" -> "717 Gather_655" [label="[-1]", style=dashed]; +"716 Constant_654" -> "717 Gather_655" [label="[]", style=dashed]; +"717 Gather_655" -> "718 Unsqueeze_656" [label="[]", style=dashed]; +"718 Unsqueeze_656" -> "719 Concat_657" [label="[1]", style=dashed]; +"719 Concat_657" -> "720 Reshape_658" [label="[2]", style=dashed]; +"720 Reshape_658" -> "723 Gemm_659" [label="[]", style=solid]; +"721 QuantizeLinear_h.2.mlp.c_proj.weight_1" -> "722 DequantizeLinear_h.2.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"722 DequantizeLinear_h.2.mlp.c_proj.weight_1" -> "723 Gemm_659" [label="[3072, 768]", style=solid]; +"723 Gemm_659" -> "727 Reshape_663" [label="[]", style=solid]; +"724 Unsqueeze_660" -> "726 Concat_662" [label="[1]", style=dashed]; +"725 Unsqueeze_661" -> "726 Concat_662" [label="[1]", style=dashed]; +"726 Concat_662" -> "727 Reshape_663" [label="[3]", style=dashed]; +"727 Reshape_663" -> "728 Add_664" [label="[]", style=solid]; +"728 Add_664" -> "729 ReduceMean_665" [label="[]", style=solid]; +"728 Add_664" -> "730 Sub_666" [label="[]", style=solid]; +"728 Add_664" -> "892 Add_816" [label="[]", style=solid]; +"729 ReduceMean_665" -> "730 Sub_666" [label="[]", style=solid]; +"730 Sub_666" -> "732 Pow_668" [label="[]", style=solid]; +"730 Sub_666" -> "737 Div_673" [label="[]", style=solid]; +"731 Constant_667" -> "732 Pow_668" [label="[]", style=solid]; +"732 Pow_668" -> "733 ReduceMean_669" [label="[]", style=solid]; +"733 ReduceMean_669" -> "735 Add_671" [label="[]", style=solid]; +"734 Constant_670" -> "735 Add_671" [label="[]", style=solid]; +"735 Add_671" -> "736 Sqrt_672" [label="[]", style=solid]; +"736 Sqrt_672" -> "737 Div_673" [label="[]", style=solid]; +"737 Div_673" -> "738 Mul_674" [label="[]", style=solid]; +"738 Mul_674" -> "739 Add_675" [label="[]", style=solid]; +"739 Add_675" -> "740 QuantizeLinear_934_1" [label="[]", style=solid]; +"740 QuantizeLinear_934_1" -> "741 DequantizeLinear_934_1" [label="[]", style=dashed]; +"741 DequantizeLinear_934_1" -> "742 Shape_676" [label="[]", style=solid]; +"741 DequantizeLinear_934_1" -> "745 Shape_679" [label="[]", style=solid]; +"741 DequantizeLinear_934_1" -> "748 Shape_682" [label="[]", style=solid]; +"741 DequantizeLinear_934_1" -> "753 Reshape_687" [label="[]", style=solid]; +"742 Shape_676" -> "744 Gather_678" [label="[-1]", style=dashed]; +"743 Constant_677" -> "744 Gather_678" [label="[]", style=dashed]; +"744 Gather_678" -> "757 Unsqueeze_689" [label="[]", style=dashed]; +"745 Shape_679" -> "747 Gather_681" [label="[-1]", style=dashed]; +"746 Constant_680" -> "747 Gather_681" [label="[]", style=dashed]; +"747 Gather_681" -> "758 Unsqueeze_690" [label="[]", style=dashed]; +"748 Shape_682" -> "750 Gather_684" [label="[-1]", style=dashed]; +"749 Constant_683" -> "750 Gather_684" [label="[]", style=dashed]; +"750 Gather_684" -> "751 Unsqueeze_685" [label="[]", style=dashed]; +"751 Unsqueeze_685" -> "752 Concat_686" [label="[1]", style=dashed]; +"752 Concat_686" -> "753 Reshape_687" [label="[2]", style=dashed]; +"753 Reshape_687" -> "756 Gemm_688" [label="[]", style=solid]; +"754 QuantizeLinear_h.3.attn.c_attn.weight_1" -> "755 DequantizeLinear_h.3.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"755 DequantizeLinear_h.3.attn.c_attn.weight_1" -> "756 Gemm_688" [label="[768, 2304]", style=solid]; +"756 Gemm_688" -> "760 Reshape_692" [label="[]", style=solid]; +"757 Unsqueeze_689" -> "759 Concat_691" [label="[1]", style=dashed]; +"758 Unsqueeze_690" -> "759 Concat_691" [label="[1]", style=dashed]; +"759 Concat_691" -> "760 Reshape_692" [label="[3]", style=dashed]; +"760 Reshape_692" -> "761 Split_693" [label="[]", style=solid]; +"761 Split_693" -> "762 QuantizeLinear_query.7_1" [label="[]", style=solid]; +"761 Split_693" -> "764 Shape_694" [label="[]", style=solid]; +"761 Split_693" -> "767 Shape_697" [label="[]", style=solid]; +"761 Split_693" -> "770 Shape_700" [label="[]", style=solid]; +"761 Split_693" -> "783 Shape_713" [label="[]", style=solid]; +"761 Split_693" -> "786 Shape_716" [label="[]", style=solid]; +"761 Split_693" -> "789 Shape_719" [label="[]", style=solid]; +"761 Split_693" -> "800 Reshape_730" [label="[]", style=solid]; +"761 Split_693" -> "804 Shape_732" [label="[]", style=solid]; +"761 Split_693" -> "807 Shape_735" [label="[]", style=solid]; +"761 Split_693" -> "810 Shape_738" [label="[]", style=solid]; +"761 Split_693" -> "821 Reshape_749" [label="[]", style=solid]; +"762 QuantizeLinear_query.7_1" -> "763 DequantizeLinear_query.7_1" [label="[]", style=dashed]; +"763 DequantizeLinear_query.7_1" -> "781 Reshape_711" [label="[]", style=solid]; +"764 Shape_694" -> "766 Gather_696" [label="[-1]", style=dashed]; +"765 Constant_695" -> "766 Gather_696" [label="[]", style=dashed]; +"766 Gather_696" -> "777 Unsqueeze_707" [label="[]", style=dashed]; +"767 Shape_697" -> "769 Gather_699" [label="[-1]", style=dashed]; +"768 Constant_698" -> "769 Gather_699" [label="[]", style=dashed]; +"769 Gather_699" -> "778 Unsqueeze_708" [label="[]", style=dashed]; +"770 Shape_700" -> "772 Gather_702" [label="[-1]", style=dashed]; +"771 Constant_701" -> "772 Gather_702" [label="[]", style=dashed]; +"772 Gather_702" -> "774 Div_704" [label="[]", style=dashed]; +"773 Constant_703" -> "774 Div_704" [label="[]", style=dashed]; +"774 Div_704" -> "775 Cast_705" [label="[]", style=dashed]; +"775 Cast_705" -> "776 Cast_706" [label="[]", style=dashed]; +"776 Cast_706" -> "779 Unsqueeze_709" [label="[]", style=dashed]; +"777 Unsqueeze_707" -> "780 Concat_710" [label="[1]", style=dashed]; +"778 Unsqueeze_708" -> "780 Concat_710" [label="[1]", style=dashed]; +"779 Unsqueeze_709" -> "780 Concat_710" [label="[1]", style=dashed]; +"780 Concat_710" -> "781 Reshape_711" [label="[4]", style=dashed]; +"781 Reshape_711" -> "782 Transpose_712" [label="[]", style=solid]; +"782 Transpose_712" -> "827 MatMul_755" [label="[]", style=solid]; +"783 Shape_713" -> "785 Gather_715" [label="[-1]", style=dashed]; +"784 Constant_714" -> "785 Gather_715" [label="[]", style=dashed]; +"785 Gather_715" -> "796 Unsqueeze_726" [label="[]", style=dashed]; +"786 Shape_716" -> "788 Gather_718" [label="[-1]", style=dashed]; +"787 Constant_717" -> "788 Gather_718" [label="[]", style=dashed]; +"788 Gather_718" -> "797 Unsqueeze_727" [label="[]", style=dashed]; +"789 Shape_719" -> "791 Gather_721" [label="[-1]", style=dashed]; +"790 Constant_720" -> "791 Gather_721" [label="[]", style=dashed]; +"791 Gather_721" -> "793 Div_723" [label="[]", style=dashed]; +"792 Constant_722" -> "793 Div_723" [label="[]", style=dashed]; +"793 Div_723" -> "794 Cast_724" [label="[]", style=dashed]; +"794 Cast_724" -> "795 Cast_725" [label="[]", style=dashed]; +"795 Cast_725" -> "798 Unsqueeze_728" [label="[]", style=dashed]; +"796 Unsqueeze_726" -> "799 Concat_729" [label="[1]", style=dashed]; +"797 Unsqueeze_727" -> "799 Concat_729" [label="[1]", style=dashed]; +"798 Unsqueeze_728" -> "799 Concat_729" [label="[1]", style=dashed]; +"799 Concat_729" -> "800 Reshape_730" [label="[4]", style=dashed]; +"800 Reshape_730" -> "801 QuantizeLinear_999_1" [label="[]", style=solid]; +"800 Reshape_730" -> "823 Transpose_751" [label="[]", style=solid]; +"801 QuantizeLinear_999_1" -> "802 DequantizeLinear_999_1" [label="[]", style=dashed]; +"802 DequantizeLinear_999_1" -> "803 Transpose_731" [label="[]", style=solid]; +"803 Transpose_731" -> "827 MatMul_755" [label="[]", style=solid]; +"804 Shape_732" -> "806 Gather_734" [label="[-1]", style=dashed]; +"805 Constant_733" -> "806 Gather_734" [label="[]", style=dashed]; +"806 Gather_734" -> "817 Unsqueeze_745" [label="[]", style=dashed]; +"807 Shape_735" -> "809 Gather_737" [label="[-1]", style=dashed]; +"808 Constant_736" -> "809 Gather_737" [label="[]", style=dashed]; +"809 Gather_737" -> "818 Unsqueeze_746" [label="[]", style=dashed]; +"810 Shape_738" -> "812 Gather_740" [label="[-1]", style=dashed]; +"811 Constant_739" -> "812 Gather_740" [label="[]", style=dashed]; +"812 Gather_740" -> "814 Div_742" [label="[]", style=dashed]; +"813 Constant_741" -> "814 Div_742" [label="[]", style=dashed]; +"814 Div_742" -> "815 Cast_743" [label="[]", style=dashed]; +"815 Cast_743" -> "816 Cast_744" [label="[]", style=dashed]; +"816 Cast_744" -> "819 Unsqueeze_747" [label="[]", style=dashed]; +"817 Unsqueeze_745" -> "820 Concat_748" [label="[1]", style=dashed]; +"818 Unsqueeze_746" -> "820 Concat_748" [label="[1]", style=dashed]; +"819 Unsqueeze_747" -> "820 Concat_748" [label="[1]", style=dashed]; +"820 Concat_748" -> "821 Reshape_749" [label="[4]", style=dashed]; +"821 Reshape_749" -> "822 Transpose_750" [label="[]", style=solid]; +"822 Transpose_750" -> "825 Unsqueeze_753" [label="[]", style=solid]; +"822 Transpose_750" -> "851 MatMul_779" [label="[]", style=solid]; +"823 Transpose_751" -> "824 Unsqueeze_752" [label="[]", style=solid]; +"824 Unsqueeze_752" -> "826 Concat_754" [label="[]", style=solid]; +"825 Unsqueeze_753" -> "826 Concat_754" [label="[]", style=solid]; +"826 Concat_754" -> "2830 nncf_model_output_4" [label="[2, 1, 12, 8, 64]", style=solid]; +"827 MatMul_755" -> "829 Div_757" [label="[]", style=solid]; +"828 Constant_756" -> "829 Div_757" [label="[]", style=solid]; +"829 Div_757" -> "830 Shape_758" [label="[]", style=solid]; +"829 Div_757" -> "833 Shape_761" [label="[]", style=solid]; +"829 Div_757" -> "844 Mul_772" [label="[]", style=solid]; +"830 Shape_758" -> "832 Gather_760" [label="[-1]", style=dashed]; +"831 Constant_759" -> "832 Gather_760" [label="[]", style=dashed]; +"832 Gather_760" -> "836 Sub_764" [label="[]", style=dashed]; +"833 Shape_761" -> "835 Gather_763" [label="[-1]", style=dashed]; +"834 Constant_762" -> "835 Gather_763" [label="[]", style=dashed]; +"835 Gather_763" -> "836 Sub_764" [label="[]", style=dashed]; +"835 Gather_763" -> "838 Unsqueeze_766" [label="[]", style=dashed]; +"835 Gather_763" -> "841 Unsqueeze_769" [label="[]", style=dashed]; +"836 Sub_764" -> "837 Unsqueeze_765" [label="[]", style=dashed]; +"837 Unsqueeze_765" -> "840 Slice_768" [label="[1]", style=dashed]; +"838 Unsqueeze_766" -> "840 Slice_768" [label="[1]", style=dashed]; +"839 Constant_767" -> "840 Slice_768" [label="[1]", style=dashed]; +"840 Slice_768" -> "843 Slice_771" [label="[]", style=solid]; +"841 Unsqueeze_769" -> "843 Slice_771" [label="[1]", style=dashed]; +"842 Constant_770" -> "843 Slice_771" [label="[1]", style=dashed]; +"843 Slice_771" -> "844 Mul_772" [label="[]", style=solid]; +"843 Slice_771" -> "846 Sub_774" [label="[]", style=solid]; +"844 Mul_772" -> "849 Sub_777" [label="[]", style=solid]; +"845 Constant_773" -> "846 Sub_774" [label="[]", style=solid]; +"846 Sub_774" -> "848 Mul_776" [label="[]", style=solid]; +"847 Constant_775" -> "848 Mul_776" [label="[]", style=solid]; +"848 Mul_776" -> "849 Sub_777" [label="[]", style=solid]; +"849 Sub_777" -> "850 Softmax_778" [label="[]", style=solid]; +"850 Softmax_778" -> "851 MatMul_779" [label="[]", style=solid]; +"851 MatMul_779" -> "852 QuantizeLinear_1056_1" [label="[]", style=solid]; +"852 QuantizeLinear_1056_1" -> "853 DequantizeLinear_1056_1" [label="[]", style=dashed]; +"853 DequantizeLinear_1056_1" -> "854 Transpose_780" [label="[]", style=solid]; +"854 Transpose_780" -> "855 Shape_781" [label="[]", style=solid]; +"854 Transpose_780" -> "858 Shape_784" [label="[]", style=solid]; +"854 Transpose_780" -> "861 Shape_787" [label="[]", style=solid]; +"854 Transpose_780" -> "864 Shape_790" [label="[]", style=solid]; +"854 Transpose_780" -> "872 Reshape_798" [label="[]", style=solid]; +"855 Shape_781" -> "857 Gather_783" [label="[-1]", style=dashed]; +"856 Constant_782" -> "857 Gather_783" [label="[]", style=dashed]; +"857 Gather_783" -> "868 Unsqueeze_794" [label="[]", style=dashed]; +"858 Shape_784" -> "860 Gather_786" [label="[-1]", style=dashed]; +"859 Constant_785" -> "860 Gather_786" [label="[]", style=dashed]; +"860 Gather_786" -> "869 Unsqueeze_795" [label="[]", style=dashed]; +"861 Shape_787" -> "863 Gather_789" [label="[-1]", style=dashed]; +"862 Constant_788" -> "863 Gather_789" [label="[]", style=dashed]; +"863 Gather_789" -> "867 Mul_793" [label="[]", style=dashed]; +"864 Shape_790" -> "866 Gather_792" [label="[-1]", style=dashed]; +"865 Constant_791" -> "866 Gather_792" [label="[]", style=dashed]; +"866 Gather_792" -> "867 Mul_793" [label="[]", style=dashed]; +"867 Mul_793" -> "870 Unsqueeze_796" [label="[]", style=dashed]; +"868 Unsqueeze_794" -> "871 Concat_797" [label="[1]", style=dashed]; +"869 Unsqueeze_795" -> "871 Concat_797" [label="[1]", style=dashed]; +"870 Unsqueeze_796" -> "871 Concat_797" [label="[1]", style=dashed]; +"871 Concat_797" -> "872 Reshape_798" [label="[3]", style=dashed]; +"872 Reshape_798" -> "873 Shape_799" [label="[]", style=solid]; +"872 Reshape_798" -> "876 Shape_802" [label="[]", style=solid]; +"872 Reshape_798" -> "879 Shape_805" [label="[]", style=solid]; +"872 Reshape_798" -> "884 Reshape_810" [label="[]", style=solid]; +"873 Shape_799" -> "875 Gather_801" [label="[-1]", style=dashed]; +"874 Constant_800" -> "875 Gather_801" [label="[]", style=dashed]; +"875 Gather_801" -> "888 Unsqueeze_812" [label="[]", style=dashed]; +"876 Shape_802" -> "878 Gather_804" [label="[-1]", style=dashed]; +"877 Constant_803" -> "878 Gather_804" [label="[]", style=dashed]; +"878 Gather_804" -> "889 Unsqueeze_813" [label="[]", style=dashed]; +"879 Shape_805" -> "881 Gather_807" [label="[-1]", style=dashed]; +"880 Constant_806" -> "881 Gather_807" [label="[]", style=dashed]; +"881 Gather_807" -> "882 Unsqueeze_808" [label="[]", style=dashed]; +"882 Unsqueeze_808" -> "883 Concat_809" [label="[1]", style=dashed]; +"883 Concat_809" -> "884 Reshape_810" [label="[2]", style=dashed]; +"884 Reshape_810" -> "887 Gemm_811" [label="[]", style=solid]; +"885 QuantizeLinear_h.3.attn.c_proj.weight_1" -> "886 DequantizeLinear_h.3.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"886 DequantizeLinear_h.3.attn.c_proj.weight_1" -> "887 Gemm_811" [label="[768, 768]", style=solid]; +"887 Gemm_811" -> "891 Reshape_815" [label="[]", style=solid]; +"888 Unsqueeze_812" -> "890 Concat_814" [label="[1]", style=dashed]; +"889 Unsqueeze_813" -> "890 Concat_814" [label="[1]", style=dashed]; +"890 Concat_814" -> "891 Reshape_815" [label="[3]", style=dashed]; +"891 Reshape_815" -> "892 Add_816" [label="[]", style=solid]; +"892 Add_816" -> "893 ReduceMean_817" [label="[]", style=solid]; +"892 Add_816" -> "894 Sub_818" [label="[]", style=solid]; +"892 Add_816" -> "959 Add_875" [label="[]", style=solid]; +"893 ReduceMean_817" -> "894 Sub_818" [label="[]", style=solid]; +"894 Sub_818" -> "896 Pow_820" [label="[]", style=solid]; +"894 Sub_818" -> "901 Div_825" [label="[]", style=solid]; +"895 Constant_819" -> "896 Pow_820" [label="[]", style=solid]; +"896 Pow_820" -> "897 ReduceMean_821" [label="[]", style=solid]; +"897 ReduceMean_821" -> "899 Add_823" [label="[]", style=solid]; +"898 Constant_822" -> "899 Add_823" [label="[]", style=solid]; +"899 Add_823" -> "900 Sqrt_824" [label="[]", style=solid]; +"900 Sqrt_824" -> "901 Div_825" [label="[]", style=solid]; +"901 Div_825" -> "902 Mul_826" [label="[]", style=solid]; +"902 Mul_826" -> "903 Add_827" [label="[]", style=solid]; +"903 Add_827" -> "904 QuantizeLinear_1108_1" [label="[]", style=solid]; +"904 QuantizeLinear_1108_1" -> "905 DequantizeLinear_1108_1" [label="[]", style=dashed]; +"905 DequantizeLinear_1108_1" -> "906 Shape_828" [label="[]", style=solid]; +"905 DequantizeLinear_1108_1" -> "909 Shape_831" [label="[]", style=solid]; +"905 DequantizeLinear_1108_1" -> "912 Shape_834" [label="[]", style=solid]; +"905 DequantizeLinear_1108_1" -> "917 Reshape_839" [label="[]", style=solid]; +"906 Shape_828" -> "908 Gather_830" [label="[-1]", style=dashed]; +"907 Constant_829" -> "908 Gather_830" [label="[]", style=dashed]; +"908 Gather_830" -> "921 Unsqueeze_841" [label="[]", style=dashed]; +"909 Shape_831" -> "911 Gather_833" [label="[-1]", style=dashed]; +"910 Constant_832" -> "911 Gather_833" [label="[]", style=dashed]; +"911 Gather_833" -> "922 Unsqueeze_842" [label="[]", style=dashed]; +"912 Shape_834" -> "914 Gather_836" [label="[-1]", style=dashed]; +"913 Constant_835" -> "914 Gather_836" [label="[]", style=dashed]; +"914 Gather_836" -> "915 Unsqueeze_837" [label="[]", style=dashed]; +"915 Unsqueeze_837" -> "916 Concat_838" [label="[1]", style=dashed]; +"916 Concat_838" -> "917 Reshape_839" [label="[2]", style=dashed]; +"917 Reshape_839" -> "920 Gemm_840" [label="[]", style=solid]; +"918 QuantizeLinear_h.3.mlp.c_fc.weight_1" -> "919 DequantizeLinear_h.3.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"919 DequantizeLinear_h.3.mlp.c_fc.weight_1" -> "920 Gemm_840" [label="[768, 3072]", style=solid]; +"920 Gemm_840" -> "924 Reshape_844" [label="[]", style=solid]; +"921 Unsqueeze_841" -> "923 Concat_843" [label="[1]", style=dashed]; +"922 Unsqueeze_842" -> "923 Concat_843" [label="[1]", style=dashed]; +"923 Concat_843" -> "924 Reshape_844" [label="[3]", style=dashed]; +"924 Reshape_844" -> "926 Mul_846" [label="[]", style=solid]; +"924 Reshape_844" -> "928 Pow_848" [label="[]", style=solid]; +"924 Reshape_844" -> "931 Add_851" [label="[]", style=solid]; +"925 Constant_845" -> "926 Mul_846" [label="[]", style=solid]; +"926 Mul_846" -> "937 Mul_857" [label="[]", style=solid]; +"927 Constant_847" -> "928 Pow_848" [label="[]", style=solid]; +"928 Pow_848" -> "930 Mul_850" [label="[]", style=solid]; +"929 Constant_849" -> "930 Mul_850" [label="[]", style=solid]; +"930 Mul_850" -> "931 Add_851" [label="[]", style=solid]; +"931 Add_851" -> "933 Mul_853" [label="[]", style=solid]; +"932 Constant_852" -> "933 Mul_853" [label="[]", style=solid]; +"933 Mul_853" -> "934 Tanh_854" [label="[]", style=solid]; +"934 Tanh_854" -> "936 Add_856" [label="[]", style=solid]; +"935 Constant_855" -> "936 Add_856" [label="[]", style=solid]; +"936 Add_856" -> "937 Mul_857" [label="[]", style=solid]; +"937 Mul_857" -> "938 QuantizeLinear_1142_1" [label="[]", style=solid]; +"938 QuantizeLinear_1142_1" -> "939 DequantizeLinear_1142_1" [label="[]", style=dashed]; +"939 DequantizeLinear_1142_1" -> "940 Shape_858" [label="[]", style=solid]; +"939 DequantizeLinear_1142_1" -> "943 Shape_861" [label="[]", style=solid]; +"939 DequantizeLinear_1142_1" -> "946 Shape_864" [label="[]", style=solid]; +"939 DequantizeLinear_1142_1" -> "951 Reshape_869" [label="[]", style=solid]; +"940 Shape_858" -> "942 Gather_860" [label="[-1]", style=dashed]; +"941 Constant_859" -> "942 Gather_860" [label="[]", style=dashed]; +"942 Gather_860" -> "955 Unsqueeze_871" [label="[]", style=dashed]; +"943 Shape_861" -> "945 Gather_863" [label="[-1]", style=dashed]; +"944 Constant_862" -> "945 Gather_863" [label="[]", style=dashed]; +"945 Gather_863" -> "956 Unsqueeze_872" [label="[]", style=dashed]; +"946 Shape_864" -> "948 Gather_866" [label="[-1]", style=dashed]; +"947 Constant_865" -> "948 Gather_866" [label="[]", style=dashed]; +"948 Gather_866" -> "949 Unsqueeze_867" [label="[]", style=dashed]; +"949 Unsqueeze_867" -> "950 Concat_868" [label="[1]", style=dashed]; +"950 Concat_868" -> "951 Reshape_869" [label="[2]", style=dashed]; +"951 Reshape_869" -> "954 Gemm_870" [label="[]", style=solid]; +"952 QuantizeLinear_h.3.mlp.c_proj.weight_1" -> "953 DequantizeLinear_h.3.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"953 DequantizeLinear_h.3.mlp.c_proj.weight_1" -> "954 Gemm_870" [label="[3072, 768]", style=solid]; +"954 Gemm_870" -> "958 Reshape_874" [label="[]", style=solid]; +"955 Unsqueeze_871" -> "957 Concat_873" [label="[1]", style=dashed]; +"956 Unsqueeze_872" -> "957 Concat_873" [label="[1]", style=dashed]; +"957 Concat_873" -> "958 Reshape_874" [label="[3]", style=dashed]; +"958 Reshape_874" -> "959 Add_875" [label="[]", style=solid]; +"959 Add_875" -> "960 ReduceMean_876" [label="[]", style=solid]; +"959 Add_875" -> "961 Sub_877" [label="[]", style=solid]; +"959 Add_875" -> "1123 Add_1027" [label="[]", style=solid]; +"960 ReduceMean_876" -> "961 Sub_877" [label="[]", style=solid]; +"961 Sub_877" -> "963 Pow_879" [label="[]", style=solid]; +"961 Sub_877" -> "968 Div_884" [label="[]", style=solid]; +"962 Constant_878" -> "963 Pow_879" [label="[]", style=solid]; +"963 Pow_879" -> "964 ReduceMean_880" [label="[]", style=solid]; +"964 ReduceMean_880" -> "966 Add_882" [label="[]", style=solid]; +"965 Constant_881" -> "966 Add_882" [label="[]", style=solid]; +"966 Add_882" -> "967 Sqrt_883" [label="[]", style=solid]; +"967 Sqrt_883" -> "968 Div_884" [label="[]", style=solid]; +"968 Div_884" -> "969 Mul_885" [label="[]", style=solid]; +"969 Mul_885" -> "970 Add_886" [label="[]", style=solid]; +"970 Add_886" -> "971 QuantizeLinear_1175_1" [label="[]", style=solid]; +"971 QuantizeLinear_1175_1" -> "972 DequantizeLinear_1175_1" [label="[]", style=dashed]; +"972 DequantizeLinear_1175_1" -> "973 Shape_887" [label="[]", style=solid]; +"972 DequantizeLinear_1175_1" -> "976 Shape_890" [label="[]", style=solid]; +"972 DequantizeLinear_1175_1" -> "979 Shape_893" [label="[]", style=solid]; +"972 DequantizeLinear_1175_1" -> "984 Reshape_898" [label="[]", style=solid]; +"973 Shape_887" -> "975 Gather_889" [label="[-1]", style=dashed]; +"974 Constant_888" -> "975 Gather_889" [label="[]", style=dashed]; +"975 Gather_889" -> "988 Unsqueeze_900" [label="[]", style=dashed]; +"976 Shape_890" -> "978 Gather_892" [label="[-1]", style=dashed]; +"977 Constant_891" -> "978 Gather_892" [label="[]", style=dashed]; +"978 Gather_892" -> "989 Unsqueeze_901" [label="[]", style=dashed]; +"979 Shape_893" -> "981 Gather_895" [label="[-1]", style=dashed]; +"980 Constant_894" -> "981 Gather_895" [label="[]", style=dashed]; +"981 Gather_895" -> "982 Unsqueeze_896" [label="[]", style=dashed]; +"982 Unsqueeze_896" -> "983 Concat_897" [label="[1]", style=dashed]; +"983 Concat_897" -> "984 Reshape_898" [label="[2]", style=dashed]; +"984 Reshape_898" -> "987 Gemm_899" [label="[]", style=solid]; +"985 QuantizeLinear_h.4.attn.c_attn.weight_1" -> "986 DequantizeLinear_h.4.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"986 DequantizeLinear_h.4.attn.c_attn.weight_1" -> "987 Gemm_899" [label="[768, 2304]", style=solid]; +"987 Gemm_899" -> "991 Reshape_903" [label="[]", style=solid]; +"988 Unsqueeze_900" -> "990 Concat_902" [label="[1]", style=dashed]; +"989 Unsqueeze_901" -> "990 Concat_902" [label="[1]", style=dashed]; +"990 Concat_902" -> "991 Reshape_903" [label="[3]", style=dashed]; +"991 Reshape_903" -> "992 Split_904" [label="[]", style=solid]; +"992 Split_904" -> "993 QuantizeLinear_query.9_1" [label="[]", style=solid]; +"992 Split_904" -> "995 Shape_905" [label="[]", style=solid]; +"992 Split_904" -> "998 Shape_908" [label="[]", style=solid]; +"992 Split_904" -> "1001 Shape_911" [label="[]", style=solid]; +"992 Split_904" -> "1014 Shape_924" [label="[]", style=solid]; +"992 Split_904" -> "1017 Shape_927" [label="[]", style=solid]; +"992 Split_904" -> "1020 Shape_930" [label="[]", style=solid]; +"992 Split_904" -> "1031 Reshape_941" [label="[]", style=solid]; +"992 Split_904" -> "1035 Shape_943" [label="[]", style=solid]; +"992 Split_904" -> "1038 Shape_946" [label="[]", style=solid]; +"992 Split_904" -> "1041 Shape_949" [label="[]", style=solid]; +"992 Split_904" -> "1052 Reshape_960" [label="[]", style=solid]; +"993 QuantizeLinear_query.9_1" -> "994 DequantizeLinear_query.9_1" [label="[]", style=dashed]; +"994 DequantizeLinear_query.9_1" -> "1012 Reshape_922" [label="[]", style=solid]; +"995 Shape_905" -> "997 Gather_907" [label="[-1]", style=dashed]; +"996 Constant_906" -> "997 Gather_907" [label="[]", style=dashed]; +"997 Gather_907" -> "1008 Unsqueeze_918" [label="[]", style=dashed]; +"998 Shape_908" -> "1000 Gather_910" [label="[-1]", style=dashed]; +"999 Constant_909" -> "1000 Gather_910" [label="[]", style=dashed]; +"1000 Gather_910" -> "1009 Unsqueeze_919" [label="[]", style=dashed]; +"1001 Shape_911" -> "1003 Gather_913" [label="[-1]", style=dashed]; +"1002 Constant_912" -> "1003 Gather_913" [label="[]", style=dashed]; +"1003 Gather_913" -> "1005 Div_915" [label="[]", style=dashed]; +"1004 Constant_914" -> "1005 Div_915" [label="[]", style=dashed]; +"1005 Div_915" -> "1006 Cast_916" [label="[]", style=dashed]; +"1006 Cast_916" -> "1007 Cast_917" [label="[]", style=dashed]; +"1007 Cast_917" -> "1010 Unsqueeze_920" [label="[]", style=dashed]; +"1008 Unsqueeze_918" -> "1011 Concat_921" [label="[1]", style=dashed]; +"1009 Unsqueeze_919" -> "1011 Concat_921" [label="[1]", style=dashed]; +"1010 Unsqueeze_920" -> "1011 Concat_921" [label="[1]", style=dashed]; +"1011 Concat_921" -> "1012 Reshape_922" [label="[4]", style=dashed]; +"1012 Reshape_922" -> "1013 Transpose_923" [label="[]", style=solid]; +"1013 Transpose_923" -> "1058 MatMul_966" [label="[]", style=solid]; +"1014 Shape_924" -> "1016 Gather_926" [label="[-1]", style=dashed]; +"1015 Constant_925" -> "1016 Gather_926" [label="[]", style=dashed]; +"1016 Gather_926" -> "1027 Unsqueeze_937" [label="[]", style=dashed]; +"1017 Shape_927" -> "1019 Gather_929" [label="[-1]", style=dashed]; +"1018 Constant_928" -> "1019 Gather_929" [label="[]", style=dashed]; +"1019 Gather_929" -> "1028 Unsqueeze_938" [label="[]", style=dashed]; +"1020 Shape_930" -> "1022 Gather_932" [label="[-1]", style=dashed]; +"1021 Constant_931" -> "1022 Gather_932" [label="[]", style=dashed]; +"1022 Gather_932" -> "1024 Div_934" [label="[]", style=dashed]; +"1023 Constant_933" -> "1024 Div_934" [label="[]", style=dashed]; +"1024 Div_934" -> "1025 Cast_935" [label="[]", style=dashed]; +"1025 Cast_935" -> "1026 Cast_936" [label="[]", style=dashed]; +"1026 Cast_936" -> "1029 Unsqueeze_939" [label="[]", style=dashed]; +"1027 Unsqueeze_937" -> "1030 Concat_940" [label="[1]", style=dashed]; +"1028 Unsqueeze_938" -> "1030 Concat_940" [label="[1]", style=dashed]; +"1029 Unsqueeze_939" -> "1030 Concat_940" [label="[1]", style=dashed]; +"1030 Concat_940" -> "1031 Reshape_941" [label="[4]", style=dashed]; +"1031 Reshape_941" -> "1032 QuantizeLinear_1240_1" [label="[]", style=solid]; +"1031 Reshape_941" -> "1054 Transpose_962" [label="[]", style=solid]; +"1032 QuantizeLinear_1240_1" -> "1033 DequantizeLinear_1240_1" [label="[]", style=dashed]; +"1033 DequantizeLinear_1240_1" -> "1034 Transpose_942" [label="[]", style=solid]; +"1034 Transpose_942" -> "1058 MatMul_966" [label="[]", style=solid]; +"1035 Shape_943" -> "1037 Gather_945" [label="[-1]", style=dashed]; +"1036 Constant_944" -> "1037 Gather_945" [label="[]", style=dashed]; +"1037 Gather_945" -> "1048 Unsqueeze_956" [label="[]", style=dashed]; +"1038 Shape_946" -> "1040 Gather_948" [label="[-1]", style=dashed]; +"1039 Constant_947" -> "1040 Gather_948" [label="[]", style=dashed]; +"1040 Gather_948" -> "1049 Unsqueeze_957" [label="[]", style=dashed]; +"1041 Shape_949" -> "1043 Gather_951" [label="[-1]", style=dashed]; +"1042 Constant_950" -> "1043 Gather_951" [label="[]", style=dashed]; +"1043 Gather_951" -> "1045 Div_953" [label="[]", style=dashed]; +"1044 Constant_952" -> "1045 Div_953" [label="[]", style=dashed]; +"1045 Div_953" -> "1046 Cast_954" [label="[]", style=dashed]; +"1046 Cast_954" -> "1047 Cast_955" [label="[]", style=dashed]; +"1047 Cast_955" -> "1050 Unsqueeze_958" [label="[]", style=dashed]; +"1048 Unsqueeze_956" -> "1051 Concat_959" [label="[1]", style=dashed]; +"1049 Unsqueeze_957" -> "1051 Concat_959" [label="[1]", style=dashed]; +"1050 Unsqueeze_958" -> "1051 Concat_959" [label="[1]", style=dashed]; +"1051 Concat_959" -> "1052 Reshape_960" [label="[4]", style=dashed]; +"1052 Reshape_960" -> "1053 Transpose_961" [label="[]", style=solid]; +"1053 Transpose_961" -> "1056 Unsqueeze_964" [label="[]", style=solid]; +"1053 Transpose_961" -> "1082 MatMul_990" [label="[]", style=solid]; +"1054 Transpose_962" -> "1055 Unsqueeze_963" [label="[]", style=solid]; +"1055 Unsqueeze_963" -> "1057 Concat_965" [label="[]", style=solid]; +"1056 Unsqueeze_964" -> "1057 Concat_965" [label="[]", style=solid]; +"1057 Concat_965" -> "2831 nncf_model_output_5" [label="[2, 1, 12, 8, 64]", style=solid]; +"1058 MatMul_966" -> "1060 Div_968" [label="[]", style=solid]; +"1059 Constant_967" -> "1060 Div_968" [label="[]", style=solid]; +"1060 Div_968" -> "1061 Shape_969" [label="[]", style=solid]; +"1060 Div_968" -> "1064 Shape_972" [label="[]", style=solid]; +"1060 Div_968" -> "1075 Mul_983" [label="[]", style=solid]; +"1061 Shape_969" -> "1063 Gather_971" [label="[-1]", style=dashed]; +"1062 Constant_970" -> "1063 Gather_971" [label="[]", style=dashed]; +"1063 Gather_971" -> "1067 Sub_975" [label="[]", style=dashed]; +"1064 Shape_972" -> "1066 Gather_974" [label="[-1]", style=dashed]; +"1065 Constant_973" -> "1066 Gather_974" [label="[]", style=dashed]; +"1066 Gather_974" -> "1067 Sub_975" [label="[]", style=dashed]; +"1066 Gather_974" -> "1069 Unsqueeze_977" [label="[]", style=dashed]; +"1066 Gather_974" -> "1072 Unsqueeze_980" [label="[]", style=dashed]; +"1067 Sub_975" -> "1068 Unsqueeze_976" [label="[]", style=dashed]; +"1068 Unsqueeze_976" -> "1071 Slice_979" [label="[1]", style=dashed]; +"1069 Unsqueeze_977" -> "1071 Slice_979" [label="[1]", style=dashed]; +"1070 Constant_978" -> "1071 Slice_979" [label="[1]", style=dashed]; +"1071 Slice_979" -> "1074 Slice_982" [label="[]", style=solid]; +"1072 Unsqueeze_980" -> "1074 Slice_982" [label="[1]", style=dashed]; +"1073 Constant_981" -> "1074 Slice_982" [label="[1]", style=dashed]; +"1074 Slice_982" -> "1075 Mul_983" [label="[]", style=solid]; +"1074 Slice_982" -> "1077 Sub_985" [label="[]", style=solid]; +"1075 Mul_983" -> "1080 Sub_988" [label="[]", style=solid]; +"1076 Constant_984" -> "1077 Sub_985" [label="[]", style=solid]; +"1077 Sub_985" -> "1079 Mul_987" [label="[]", style=solid]; +"1078 Constant_986" -> "1079 Mul_987" [label="[]", style=solid]; +"1079 Mul_987" -> "1080 Sub_988" [label="[]", style=solid]; +"1080 Sub_988" -> "1081 Softmax_989" [label="[]", style=solid]; +"1081 Softmax_989" -> "1082 MatMul_990" [label="[]", style=solid]; +"1082 MatMul_990" -> "1083 QuantizeLinear_1297_1" [label="[]", style=solid]; +"1083 QuantizeLinear_1297_1" -> "1084 DequantizeLinear_1297_1" [label="[]", style=dashed]; +"1084 DequantizeLinear_1297_1" -> "1085 Transpose_991" [label="[]", style=solid]; +"1085 Transpose_991" -> "1086 Shape_992" [label="[]", style=solid]; +"1085 Transpose_991" -> "1089 Shape_995" [label="[]", style=solid]; +"1085 Transpose_991" -> "1092 Shape_998" [label="[]", style=solid]; +"1085 Transpose_991" -> "1095 Shape_1001" [label="[]", style=solid]; +"1085 Transpose_991" -> "1103 Reshape_1009" [label="[]", style=solid]; +"1086 Shape_992" -> "1088 Gather_994" [label="[-1]", style=dashed]; +"1087 Constant_993" -> "1088 Gather_994" [label="[]", style=dashed]; +"1088 Gather_994" -> "1099 Unsqueeze_1005" [label="[]", style=dashed]; +"1089 Shape_995" -> "1091 Gather_997" [label="[-1]", style=dashed]; +"1090 Constant_996" -> "1091 Gather_997" [label="[]", style=dashed]; +"1091 Gather_997" -> "1100 Unsqueeze_1006" [label="[]", style=dashed]; +"1092 Shape_998" -> "1094 Gather_1000" [label="[-1]", style=dashed]; +"1093 Constant_999" -> "1094 Gather_1000" [label="[]", style=dashed]; +"1094 Gather_1000" -> "1098 Mul_1004" [label="[]", style=dashed]; +"1095 Shape_1001" -> "1097 Gather_1003" [label="[-1]", style=dashed]; +"1096 Constant_1002" -> "1097 Gather_1003" [label="[]", style=dashed]; +"1097 Gather_1003" -> "1098 Mul_1004" [label="[]", style=dashed]; +"1098 Mul_1004" -> "1101 Unsqueeze_1007" [label="[]", style=dashed]; +"1099 Unsqueeze_1005" -> "1102 Concat_1008" [label="[1]", style=dashed]; +"1100 Unsqueeze_1006" -> "1102 Concat_1008" [label="[1]", style=dashed]; +"1101 Unsqueeze_1007" -> "1102 Concat_1008" [label="[1]", style=dashed]; +"1102 Concat_1008" -> "1103 Reshape_1009" [label="[3]", style=dashed]; +"1103 Reshape_1009" -> "1104 Shape_1010" [label="[]", style=solid]; +"1103 Reshape_1009" -> "1107 Shape_1013" [label="[]", style=solid]; +"1103 Reshape_1009" -> "1110 Shape_1016" [label="[]", style=solid]; +"1103 Reshape_1009" -> "1115 Reshape_1021" [label="[]", style=solid]; +"1104 Shape_1010" -> "1106 Gather_1012" [label="[-1]", style=dashed]; +"1105 Constant_1011" -> "1106 Gather_1012" [label="[]", style=dashed]; +"1106 Gather_1012" -> "1119 Unsqueeze_1023" [label="[]", style=dashed]; +"1107 Shape_1013" -> "1109 Gather_1015" [label="[-1]", style=dashed]; +"1108 Constant_1014" -> "1109 Gather_1015" [label="[]", style=dashed]; +"1109 Gather_1015" -> "1120 Unsqueeze_1024" [label="[]", style=dashed]; +"1110 Shape_1016" -> "1112 Gather_1018" [label="[-1]", style=dashed]; +"1111 Constant_1017" -> "1112 Gather_1018" [label="[]", style=dashed]; +"1112 Gather_1018" -> "1113 Unsqueeze_1019" [label="[]", style=dashed]; +"1113 Unsqueeze_1019" -> "1114 Concat_1020" [label="[1]", style=dashed]; +"1114 Concat_1020" -> "1115 Reshape_1021" [label="[2]", style=dashed]; +"1115 Reshape_1021" -> "1118 Gemm_1022" [label="[]", style=solid]; +"1116 QuantizeLinear_h.4.attn.c_proj.weight_1" -> "1117 DequantizeLinear_h.4.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"1117 DequantizeLinear_h.4.attn.c_proj.weight_1" -> "1118 Gemm_1022" [label="[768, 768]", style=solid]; +"1118 Gemm_1022" -> "1122 Reshape_1026" [label="[]", style=solid]; +"1119 Unsqueeze_1023" -> "1121 Concat_1025" [label="[1]", style=dashed]; +"1120 Unsqueeze_1024" -> "1121 Concat_1025" [label="[1]", style=dashed]; +"1121 Concat_1025" -> "1122 Reshape_1026" [label="[3]", style=dashed]; +"1122 Reshape_1026" -> "1123 Add_1027" [label="[]", style=solid]; +"1123 Add_1027" -> "1124 ReduceMean_1028" [label="[]", style=solid]; +"1123 Add_1027" -> "1125 Sub_1029" [label="[]", style=solid]; +"1123 Add_1027" -> "1190 Add_1086" [label="[]", style=solid]; +"1124 ReduceMean_1028" -> "1125 Sub_1029" [label="[]", style=solid]; +"1125 Sub_1029" -> "1127 Pow_1031" [label="[]", style=solid]; +"1125 Sub_1029" -> "1132 Div_1036" [label="[]", style=solid]; +"1126 Constant_1030" -> "1127 Pow_1031" [label="[]", style=solid]; +"1127 Pow_1031" -> "1128 ReduceMean_1032" [label="[]", style=solid]; +"1128 ReduceMean_1032" -> "1130 Add_1034" [label="[]", style=solid]; +"1129 Constant_1033" -> "1130 Add_1034" [label="[]", style=solid]; +"1130 Add_1034" -> "1131 Sqrt_1035" [label="[]", style=solid]; +"1131 Sqrt_1035" -> "1132 Div_1036" [label="[]", style=solid]; +"1132 Div_1036" -> "1133 Mul_1037" [label="[]", style=solid]; +"1133 Mul_1037" -> "1134 Add_1038" [label="[]", style=solid]; +"1134 Add_1038" -> "1135 QuantizeLinear_1349_1" [label="[]", style=solid]; +"1135 QuantizeLinear_1349_1" -> "1136 DequantizeLinear_1349_1" [label="[]", style=dashed]; +"1136 DequantizeLinear_1349_1" -> "1137 Shape_1039" [label="[]", style=solid]; +"1136 DequantizeLinear_1349_1" -> "1140 Shape_1042" [label="[]", style=solid]; +"1136 DequantizeLinear_1349_1" -> "1143 Shape_1045" [label="[]", style=solid]; +"1136 DequantizeLinear_1349_1" -> "1148 Reshape_1050" [label="[]", style=solid]; +"1137 Shape_1039" -> "1139 Gather_1041" [label="[-1]", style=dashed]; +"1138 Constant_1040" -> "1139 Gather_1041" [label="[]", style=dashed]; +"1139 Gather_1041" -> "1152 Unsqueeze_1052" [label="[]", style=dashed]; +"1140 Shape_1042" -> "1142 Gather_1044" [label="[-1]", style=dashed]; +"1141 Constant_1043" -> "1142 Gather_1044" [label="[]", style=dashed]; +"1142 Gather_1044" -> "1153 Unsqueeze_1053" [label="[]", style=dashed]; +"1143 Shape_1045" -> "1145 Gather_1047" [label="[-1]", style=dashed]; +"1144 Constant_1046" -> "1145 Gather_1047" [label="[]", style=dashed]; +"1145 Gather_1047" -> "1146 Unsqueeze_1048" [label="[]", style=dashed]; +"1146 Unsqueeze_1048" -> "1147 Concat_1049" [label="[1]", style=dashed]; +"1147 Concat_1049" -> "1148 Reshape_1050" [label="[2]", style=dashed]; +"1148 Reshape_1050" -> "1151 Gemm_1051" [label="[]", style=solid]; +"1149 QuantizeLinear_h.4.mlp.c_fc.weight_1" -> "1150 DequantizeLinear_h.4.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"1150 DequantizeLinear_h.4.mlp.c_fc.weight_1" -> "1151 Gemm_1051" [label="[768, 3072]", style=solid]; +"1151 Gemm_1051" -> "1155 Reshape_1055" [label="[]", style=solid]; +"1152 Unsqueeze_1052" -> "1154 Concat_1054" [label="[1]", style=dashed]; +"1153 Unsqueeze_1053" -> "1154 Concat_1054" [label="[1]", style=dashed]; +"1154 Concat_1054" -> "1155 Reshape_1055" [label="[3]", style=dashed]; +"1155 Reshape_1055" -> "1157 Mul_1057" [label="[]", style=solid]; +"1155 Reshape_1055" -> "1159 Pow_1059" [label="[]", style=solid]; +"1155 Reshape_1055" -> "1162 Add_1062" [label="[]", style=solid]; +"1156 Constant_1056" -> "1157 Mul_1057" [label="[]", style=solid]; +"1157 Mul_1057" -> "1168 Mul_1068" [label="[]", style=solid]; +"1158 Constant_1058" -> "1159 Pow_1059" [label="[]", style=solid]; +"1159 Pow_1059" -> "1161 Mul_1061" [label="[]", style=solid]; +"1160 Constant_1060" -> "1161 Mul_1061" [label="[]", style=solid]; +"1161 Mul_1061" -> "1162 Add_1062" [label="[]", style=solid]; +"1162 Add_1062" -> "1164 Mul_1064" [label="[]", style=solid]; +"1163 Constant_1063" -> "1164 Mul_1064" [label="[]", style=solid]; +"1164 Mul_1064" -> "1165 Tanh_1065" [label="[]", style=solid]; +"1165 Tanh_1065" -> "1167 Add_1067" [label="[]", style=solid]; +"1166 Constant_1066" -> "1167 Add_1067" [label="[]", style=solid]; +"1167 Add_1067" -> "1168 Mul_1068" [label="[]", style=solid]; +"1168 Mul_1068" -> "1169 QuantizeLinear_1383_1" [label="[]", style=solid]; +"1169 QuantizeLinear_1383_1" -> "1170 DequantizeLinear_1383_1" [label="[]", style=dashed]; +"1170 DequantizeLinear_1383_1" -> "1171 Shape_1069" [label="[]", style=solid]; +"1170 DequantizeLinear_1383_1" -> "1174 Shape_1072" [label="[]", style=solid]; +"1170 DequantizeLinear_1383_1" -> "1177 Shape_1075" [label="[]", style=solid]; +"1170 DequantizeLinear_1383_1" -> "1182 Reshape_1080" [label="[]", style=solid]; +"1171 Shape_1069" -> "1173 Gather_1071" [label="[-1]", style=dashed]; +"1172 Constant_1070" -> "1173 Gather_1071" [label="[]", style=dashed]; +"1173 Gather_1071" -> "1186 Unsqueeze_1082" [label="[]", style=dashed]; +"1174 Shape_1072" -> "1176 Gather_1074" [label="[-1]", style=dashed]; +"1175 Constant_1073" -> "1176 Gather_1074" [label="[]", style=dashed]; +"1176 Gather_1074" -> "1187 Unsqueeze_1083" [label="[]", style=dashed]; +"1177 Shape_1075" -> "1179 Gather_1077" [label="[-1]", style=dashed]; +"1178 Constant_1076" -> "1179 Gather_1077" [label="[]", style=dashed]; +"1179 Gather_1077" -> "1180 Unsqueeze_1078" [label="[]", style=dashed]; +"1180 Unsqueeze_1078" -> "1181 Concat_1079" [label="[1]", style=dashed]; +"1181 Concat_1079" -> "1182 Reshape_1080" [label="[2]", style=dashed]; +"1182 Reshape_1080" -> "1185 Gemm_1081" [label="[]", style=solid]; +"1183 QuantizeLinear_h.4.mlp.c_proj.weight_1" -> "1184 DequantizeLinear_h.4.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"1184 DequantizeLinear_h.4.mlp.c_proj.weight_1" -> "1185 Gemm_1081" [label="[3072, 768]", style=solid]; +"1185 Gemm_1081" -> "1189 Reshape_1085" [label="[]", style=solid]; +"1186 Unsqueeze_1082" -> "1188 Concat_1084" [label="[1]", style=dashed]; +"1187 Unsqueeze_1083" -> "1188 Concat_1084" [label="[1]", style=dashed]; +"1188 Concat_1084" -> "1189 Reshape_1085" [label="[3]", style=dashed]; +"1189 Reshape_1085" -> "1190 Add_1086" [label="[]", style=solid]; +"1190 Add_1086" -> "1191 ReduceMean_1087" [label="[]", style=solid]; +"1190 Add_1086" -> "1192 Sub_1088" [label="[]", style=solid]; +"1190 Add_1086" -> "1354 Add_1238" [label="[]", style=solid]; +"1191 ReduceMean_1087" -> "1192 Sub_1088" [label="[]", style=solid]; +"1192 Sub_1088" -> "1194 Pow_1090" [label="[]", style=solid]; +"1192 Sub_1088" -> "1199 Div_1095" [label="[]", style=solid]; +"1193 Constant_1089" -> "1194 Pow_1090" [label="[]", style=solid]; +"1194 Pow_1090" -> "1195 ReduceMean_1091" [label="[]", style=solid]; +"1195 ReduceMean_1091" -> "1197 Add_1093" [label="[]", style=solid]; +"1196 Constant_1092" -> "1197 Add_1093" [label="[]", style=solid]; +"1197 Add_1093" -> "1198 Sqrt_1094" [label="[]", style=solid]; +"1198 Sqrt_1094" -> "1199 Div_1095" [label="[]", style=solid]; +"1199 Div_1095" -> "1200 Mul_1096" [label="[]", style=solid]; +"1200 Mul_1096" -> "1201 Add_1097" [label="[]", style=solid]; +"1201 Add_1097" -> "1202 QuantizeLinear_1416_1" [label="[]", style=solid]; +"1202 QuantizeLinear_1416_1" -> "1203 DequantizeLinear_1416_1" [label="[]", style=dashed]; +"1203 DequantizeLinear_1416_1" -> "1204 Shape_1098" [label="[]", style=solid]; +"1203 DequantizeLinear_1416_1" -> "1207 Shape_1101" [label="[]", style=solid]; +"1203 DequantizeLinear_1416_1" -> "1210 Shape_1104" [label="[]", style=solid]; +"1203 DequantizeLinear_1416_1" -> "1215 Reshape_1109" [label="[]", style=solid]; +"1204 Shape_1098" -> "1206 Gather_1100" [label="[-1]", style=dashed]; +"1205 Constant_1099" -> "1206 Gather_1100" [label="[]", style=dashed]; +"1206 Gather_1100" -> "1219 Unsqueeze_1111" [label="[]", style=dashed]; +"1207 Shape_1101" -> "1209 Gather_1103" [label="[-1]", style=dashed]; +"1208 Constant_1102" -> "1209 Gather_1103" [label="[]", style=dashed]; +"1209 Gather_1103" -> "1220 Unsqueeze_1112" [label="[]", style=dashed]; +"1210 Shape_1104" -> "1212 Gather_1106" [label="[-1]", style=dashed]; +"1211 Constant_1105" -> "1212 Gather_1106" [label="[]", style=dashed]; +"1212 Gather_1106" -> "1213 Unsqueeze_1107" [label="[]", style=dashed]; +"1213 Unsqueeze_1107" -> "1214 Concat_1108" [label="[1]", style=dashed]; +"1214 Concat_1108" -> "1215 Reshape_1109" [label="[2]", style=dashed]; +"1215 Reshape_1109" -> "1218 Gemm_1110" [label="[]", style=solid]; +"1216 QuantizeLinear_h.5.attn.c_attn.weight_1" -> "1217 DequantizeLinear_h.5.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"1217 DequantizeLinear_h.5.attn.c_attn.weight_1" -> "1218 Gemm_1110" [label="[768, 2304]", style=solid]; +"1218 Gemm_1110" -> "1222 Reshape_1114" [label="[]", style=solid]; +"1219 Unsqueeze_1111" -> "1221 Concat_1113" [label="[1]", style=dashed]; +"1220 Unsqueeze_1112" -> "1221 Concat_1113" [label="[1]", style=dashed]; +"1221 Concat_1113" -> "1222 Reshape_1114" [label="[3]", style=dashed]; +"1222 Reshape_1114" -> "1223 Split_1115" [label="[]", style=solid]; +"1223 Split_1115" -> "1224 QuantizeLinear_query.11_1" [label="[]", style=solid]; +"1223 Split_1115" -> "1226 Shape_1116" [label="[]", style=solid]; +"1223 Split_1115" -> "1229 Shape_1119" [label="[]", style=solid]; +"1223 Split_1115" -> "1232 Shape_1122" [label="[]", style=solid]; +"1223 Split_1115" -> "1245 Shape_1135" [label="[]", style=solid]; +"1223 Split_1115" -> "1248 Shape_1138" [label="[]", style=solid]; +"1223 Split_1115" -> "1251 Shape_1141" [label="[]", style=solid]; +"1223 Split_1115" -> "1262 Reshape_1152" [label="[]", style=solid]; +"1223 Split_1115" -> "1266 Shape_1154" [label="[]", style=solid]; +"1223 Split_1115" -> "1269 Shape_1157" [label="[]", style=solid]; +"1223 Split_1115" -> "1272 Shape_1160" [label="[]", style=solid]; +"1223 Split_1115" -> "1283 Reshape_1171" [label="[]", style=solid]; +"1224 QuantizeLinear_query.11_1" -> "1225 DequantizeLinear_query.11_1" [label="[]", style=dashed]; +"1225 DequantizeLinear_query.11_1" -> "1243 Reshape_1133" [label="[]", style=solid]; +"1226 Shape_1116" -> "1228 Gather_1118" [label="[-1]", style=dashed]; +"1227 Constant_1117" -> "1228 Gather_1118" [label="[]", style=dashed]; +"1228 Gather_1118" -> "1239 Unsqueeze_1129" [label="[]", style=dashed]; +"1229 Shape_1119" -> "1231 Gather_1121" [label="[-1]", style=dashed]; +"1230 Constant_1120" -> "1231 Gather_1121" [label="[]", style=dashed]; +"1231 Gather_1121" -> "1240 Unsqueeze_1130" [label="[]", style=dashed]; +"1232 Shape_1122" -> "1234 Gather_1124" [label="[-1]", style=dashed]; +"1233 Constant_1123" -> "1234 Gather_1124" [label="[]", style=dashed]; +"1234 Gather_1124" -> "1236 Div_1126" [label="[]", style=dashed]; +"1235 Constant_1125" -> "1236 Div_1126" [label="[]", style=dashed]; +"1236 Div_1126" -> "1237 Cast_1127" [label="[]", style=dashed]; +"1237 Cast_1127" -> "1238 Cast_1128" [label="[]", style=dashed]; +"1238 Cast_1128" -> "1241 Unsqueeze_1131" [label="[]", style=dashed]; +"1239 Unsqueeze_1129" -> "1242 Concat_1132" [label="[1]", style=dashed]; +"1240 Unsqueeze_1130" -> "1242 Concat_1132" [label="[1]", style=dashed]; +"1241 Unsqueeze_1131" -> "1242 Concat_1132" [label="[1]", style=dashed]; +"1242 Concat_1132" -> "1243 Reshape_1133" [label="[4]", style=dashed]; +"1243 Reshape_1133" -> "1244 Transpose_1134" [label="[]", style=solid]; +"1244 Transpose_1134" -> "1289 MatMul_1177" [label="[]", style=solid]; +"1245 Shape_1135" -> "1247 Gather_1137" [label="[-1]", style=dashed]; +"1246 Constant_1136" -> "1247 Gather_1137" [label="[]", style=dashed]; +"1247 Gather_1137" -> "1258 Unsqueeze_1148" [label="[]", style=dashed]; +"1248 Shape_1138" -> "1250 Gather_1140" [label="[-1]", style=dashed]; +"1249 Constant_1139" -> "1250 Gather_1140" [label="[]", style=dashed]; +"1250 Gather_1140" -> "1259 Unsqueeze_1149" [label="[]", style=dashed]; +"1251 Shape_1141" -> "1253 Gather_1143" [label="[-1]", style=dashed]; +"1252 Constant_1142" -> "1253 Gather_1143" [label="[]", style=dashed]; +"1253 Gather_1143" -> "1255 Div_1145" [label="[]", style=dashed]; +"1254 Constant_1144" -> "1255 Div_1145" [label="[]", style=dashed]; +"1255 Div_1145" -> "1256 Cast_1146" [label="[]", style=dashed]; +"1256 Cast_1146" -> "1257 Cast_1147" [label="[]", style=dashed]; +"1257 Cast_1147" -> "1260 Unsqueeze_1150" [label="[]", style=dashed]; +"1258 Unsqueeze_1148" -> "1261 Concat_1151" [label="[1]", style=dashed]; +"1259 Unsqueeze_1149" -> "1261 Concat_1151" [label="[1]", style=dashed]; +"1260 Unsqueeze_1150" -> "1261 Concat_1151" [label="[1]", style=dashed]; +"1261 Concat_1151" -> "1262 Reshape_1152" [label="[4]", style=dashed]; +"1262 Reshape_1152" -> "1263 QuantizeLinear_1481_1" [label="[]", style=solid]; +"1262 Reshape_1152" -> "1285 Transpose_1173" [label="[]", style=solid]; +"1263 QuantizeLinear_1481_1" -> "1264 DequantizeLinear_1481_1" [label="[]", style=dashed]; +"1264 DequantizeLinear_1481_1" -> "1265 Transpose_1153" [label="[]", style=solid]; +"1265 Transpose_1153" -> "1289 MatMul_1177" [label="[]", style=solid]; +"1266 Shape_1154" -> "1268 Gather_1156" [label="[-1]", style=dashed]; +"1267 Constant_1155" -> "1268 Gather_1156" [label="[]", style=dashed]; +"1268 Gather_1156" -> "1279 Unsqueeze_1167" [label="[]", style=dashed]; +"1269 Shape_1157" -> "1271 Gather_1159" [label="[-1]", style=dashed]; +"1270 Constant_1158" -> "1271 Gather_1159" [label="[]", style=dashed]; +"1271 Gather_1159" -> "1280 Unsqueeze_1168" [label="[]", style=dashed]; +"1272 Shape_1160" -> "1274 Gather_1162" [label="[-1]", style=dashed]; +"1273 Constant_1161" -> "1274 Gather_1162" [label="[]", style=dashed]; +"1274 Gather_1162" -> "1276 Div_1164" [label="[]", style=dashed]; +"1275 Constant_1163" -> "1276 Div_1164" [label="[]", style=dashed]; +"1276 Div_1164" -> "1277 Cast_1165" [label="[]", style=dashed]; +"1277 Cast_1165" -> "1278 Cast_1166" [label="[]", style=dashed]; +"1278 Cast_1166" -> "1281 Unsqueeze_1169" [label="[]", style=dashed]; +"1279 Unsqueeze_1167" -> "1282 Concat_1170" [label="[1]", style=dashed]; +"1280 Unsqueeze_1168" -> "1282 Concat_1170" [label="[1]", style=dashed]; +"1281 Unsqueeze_1169" -> "1282 Concat_1170" [label="[1]", style=dashed]; +"1282 Concat_1170" -> "1283 Reshape_1171" [label="[4]", style=dashed]; +"1283 Reshape_1171" -> "1284 Transpose_1172" [label="[]", style=solid]; +"1284 Transpose_1172" -> "1287 Unsqueeze_1175" [label="[]", style=solid]; +"1284 Transpose_1172" -> "1313 MatMul_1201" [label="[]", style=solid]; +"1285 Transpose_1173" -> "1286 Unsqueeze_1174" [label="[]", style=solid]; +"1286 Unsqueeze_1174" -> "1288 Concat_1176" [label="[]", style=solid]; +"1287 Unsqueeze_1175" -> "1288 Concat_1176" [label="[]", style=solid]; +"1288 Concat_1176" -> "2832 nncf_model_output_6" [label="[2, 1, 12, 8, 64]", style=solid]; +"1289 MatMul_1177" -> "1291 Div_1179" [label="[]", style=solid]; +"1290 Constant_1178" -> "1291 Div_1179" [label="[]", style=solid]; +"1291 Div_1179" -> "1292 Shape_1180" [label="[]", style=solid]; +"1291 Div_1179" -> "1295 Shape_1183" [label="[]", style=solid]; +"1291 Div_1179" -> "1306 Mul_1194" [label="[]", style=solid]; +"1292 Shape_1180" -> "1294 Gather_1182" [label="[-1]", style=dashed]; +"1293 Constant_1181" -> "1294 Gather_1182" [label="[]", style=dashed]; +"1294 Gather_1182" -> "1298 Sub_1186" [label="[]", style=dashed]; +"1295 Shape_1183" -> "1297 Gather_1185" [label="[-1]", style=dashed]; +"1296 Constant_1184" -> "1297 Gather_1185" [label="[]", style=dashed]; +"1297 Gather_1185" -> "1298 Sub_1186" [label="[]", style=dashed]; +"1297 Gather_1185" -> "1300 Unsqueeze_1188" [label="[]", style=dashed]; +"1297 Gather_1185" -> "1303 Unsqueeze_1191" [label="[]", style=dashed]; +"1298 Sub_1186" -> "1299 Unsqueeze_1187" [label="[]", style=dashed]; +"1299 Unsqueeze_1187" -> "1302 Slice_1190" [label="[1]", style=dashed]; +"1300 Unsqueeze_1188" -> "1302 Slice_1190" [label="[1]", style=dashed]; +"1301 Constant_1189" -> "1302 Slice_1190" [label="[1]", style=dashed]; +"1302 Slice_1190" -> "1305 Slice_1193" [label="[]", style=solid]; +"1303 Unsqueeze_1191" -> "1305 Slice_1193" [label="[1]", style=dashed]; +"1304 Constant_1192" -> "1305 Slice_1193" [label="[1]", style=dashed]; +"1305 Slice_1193" -> "1306 Mul_1194" [label="[]", style=solid]; +"1305 Slice_1193" -> "1308 Sub_1196" [label="[]", style=solid]; +"1306 Mul_1194" -> "1311 Sub_1199" [label="[]", style=solid]; +"1307 Constant_1195" -> "1308 Sub_1196" [label="[]", style=solid]; +"1308 Sub_1196" -> "1310 Mul_1198" [label="[]", style=solid]; +"1309 Constant_1197" -> "1310 Mul_1198" [label="[]", style=solid]; +"1310 Mul_1198" -> "1311 Sub_1199" [label="[]", style=solid]; +"1311 Sub_1199" -> "1312 Softmax_1200" [label="[]", style=solid]; +"1312 Softmax_1200" -> "1313 MatMul_1201" [label="[]", style=solid]; +"1313 MatMul_1201" -> "1314 QuantizeLinear_1538_1" [label="[]", style=solid]; +"1314 QuantizeLinear_1538_1" -> "1315 DequantizeLinear_1538_1" [label="[]", style=dashed]; +"1315 DequantizeLinear_1538_1" -> "1316 Transpose_1202" [label="[]", style=solid]; +"1316 Transpose_1202" -> "1317 Shape_1203" [label="[]", style=solid]; +"1316 Transpose_1202" -> "1320 Shape_1206" [label="[]", style=solid]; +"1316 Transpose_1202" -> "1323 Shape_1209" [label="[]", style=solid]; +"1316 Transpose_1202" -> "1326 Shape_1212" [label="[]", style=solid]; +"1316 Transpose_1202" -> "1334 Reshape_1220" [label="[]", style=solid]; +"1317 Shape_1203" -> "1319 Gather_1205" [label="[-1]", style=dashed]; +"1318 Constant_1204" -> "1319 Gather_1205" [label="[]", style=dashed]; +"1319 Gather_1205" -> "1330 Unsqueeze_1216" [label="[]", style=dashed]; +"1320 Shape_1206" -> "1322 Gather_1208" [label="[-1]", style=dashed]; +"1321 Constant_1207" -> "1322 Gather_1208" [label="[]", style=dashed]; +"1322 Gather_1208" -> "1331 Unsqueeze_1217" [label="[]", style=dashed]; +"1323 Shape_1209" -> "1325 Gather_1211" [label="[-1]", style=dashed]; +"1324 Constant_1210" -> "1325 Gather_1211" [label="[]", style=dashed]; +"1325 Gather_1211" -> "1329 Mul_1215" [label="[]", style=dashed]; +"1326 Shape_1212" -> "1328 Gather_1214" [label="[-1]", style=dashed]; +"1327 Constant_1213" -> "1328 Gather_1214" [label="[]", style=dashed]; +"1328 Gather_1214" -> "1329 Mul_1215" [label="[]", style=dashed]; +"1329 Mul_1215" -> "1332 Unsqueeze_1218" [label="[]", style=dashed]; +"1330 Unsqueeze_1216" -> "1333 Concat_1219" [label="[1]", style=dashed]; +"1331 Unsqueeze_1217" -> "1333 Concat_1219" [label="[1]", style=dashed]; +"1332 Unsqueeze_1218" -> "1333 Concat_1219" [label="[1]", style=dashed]; +"1333 Concat_1219" -> "1334 Reshape_1220" [label="[3]", style=dashed]; +"1334 Reshape_1220" -> "1335 Shape_1221" [label="[]", style=solid]; +"1334 Reshape_1220" -> "1338 Shape_1224" [label="[]", style=solid]; +"1334 Reshape_1220" -> "1341 Shape_1227" [label="[]", style=solid]; +"1334 Reshape_1220" -> "1346 Reshape_1232" [label="[]", style=solid]; +"1335 Shape_1221" -> "1337 Gather_1223" [label="[-1]", style=dashed]; +"1336 Constant_1222" -> "1337 Gather_1223" [label="[]", style=dashed]; +"1337 Gather_1223" -> "1350 Unsqueeze_1234" [label="[]", style=dashed]; +"1338 Shape_1224" -> "1340 Gather_1226" [label="[-1]", style=dashed]; +"1339 Constant_1225" -> "1340 Gather_1226" [label="[]", style=dashed]; +"1340 Gather_1226" -> "1351 Unsqueeze_1235" [label="[]", style=dashed]; +"1341 Shape_1227" -> "1343 Gather_1229" [label="[-1]", style=dashed]; +"1342 Constant_1228" -> "1343 Gather_1229" [label="[]", style=dashed]; +"1343 Gather_1229" -> "1344 Unsqueeze_1230" [label="[]", style=dashed]; +"1344 Unsqueeze_1230" -> "1345 Concat_1231" [label="[1]", style=dashed]; +"1345 Concat_1231" -> "1346 Reshape_1232" [label="[2]", style=dashed]; +"1346 Reshape_1232" -> "1349 Gemm_1233" [label="[]", style=solid]; +"1347 QuantizeLinear_h.5.attn.c_proj.weight_1" -> "1348 DequantizeLinear_h.5.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"1348 DequantizeLinear_h.5.attn.c_proj.weight_1" -> "1349 Gemm_1233" [label="[768, 768]", style=solid]; +"1349 Gemm_1233" -> "1353 Reshape_1237" [label="[]", style=solid]; +"1350 Unsqueeze_1234" -> "1352 Concat_1236" [label="[1]", style=dashed]; +"1351 Unsqueeze_1235" -> "1352 Concat_1236" [label="[1]", style=dashed]; +"1352 Concat_1236" -> "1353 Reshape_1237" [label="[3]", style=dashed]; +"1353 Reshape_1237" -> "1354 Add_1238" [label="[]", style=solid]; +"1354 Add_1238" -> "1355 ReduceMean_1239" [label="[]", style=solid]; +"1354 Add_1238" -> "1356 Sub_1240" [label="[]", style=solid]; +"1354 Add_1238" -> "1421 Add_1297" [label="[]", style=solid]; +"1355 ReduceMean_1239" -> "1356 Sub_1240" [label="[]", style=solid]; +"1356 Sub_1240" -> "1358 Pow_1242" [label="[]", style=solid]; +"1356 Sub_1240" -> "1363 Div_1247" [label="[]", style=solid]; +"1357 Constant_1241" -> "1358 Pow_1242" [label="[]", style=solid]; +"1358 Pow_1242" -> "1359 ReduceMean_1243" [label="[]", style=solid]; +"1359 ReduceMean_1243" -> "1361 Add_1245" [label="[]", style=solid]; +"1360 Constant_1244" -> "1361 Add_1245" [label="[]", style=solid]; +"1361 Add_1245" -> "1362 Sqrt_1246" [label="[]", style=solid]; +"1362 Sqrt_1246" -> "1363 Div_1247" [label="[]", style=solid]; +"1363 Div_1247" -> "1364 Mul_1248" [label="[]", style=solid]; +"1364 Mul_1248" -> "1365 Add_1249" [label="[]", style=solid]; +"1365 Add_1249" -> "1366 QuantizeLinear_1590_1" [label="[]", style=solid]; +"1366 QuantizeLinear_1590_1" -> "1367 DequantizeLinear_1590_1" [label="[]", style=dashed]; +"1367 DequantizeLinear_1590_1" -> "1368 Shape_1250" [label="[]", style=solid]; +"1367 DequantizeLinear_1590_1" -> "1371 Shape_1253" [label="[]", style=solid]; +"1367 DequantizeLinear_1590_1" -> "1374 Shape_1256" [label="[]", style=solid]; +"1367 DequantizeLinear_1590_1" -> "1379 Reshape_1261" [label="[]", style=solid]; +"1368 Shape_1250" -> "1370 Gather_1252" [label="[-1]", style=dashed]; +"1369 Constant_1251" -> "1370 Gather_1252" [label="[]", style=dashed]; +"1370 Gather_1252" -> "1383 Unsqueeze_1263" [label="[]", style=dashed]; +"1371 Shape_1253" -> "1373 Gather_1255" [label="[-1]", style=dashed]; +"1372 Constant_1254" -> "1373 Gather_1255" [label="[]", style=dashed]; +"1373 Gather_1255" -> "1384 Unsqueeze_1264" [label="[]", style=dashed]; +"1374 Shape_1256" -> "1376 Gather_1258" [label="[-1]", style=dashed]; +"1375 Constant_1257" -> "1376 Gather_1258" [label="[]", style=dashed]; +"1376 Gather_1258" -> "1377 Unsqueeze_1259" [label="[]", style=dashed]; +"1377 Unsqueeze_1259" -> "1378 Concat_1260" [label="[1]", style=dashed]; +"1378 Concat_1260" -> "1379 Reshape_1261" [label="[2]", style=dashed]; +"1379 Reshape_1261" -> "1382 Gemm_1262" [label="[]", style=solid]; +"1380 QuantizeLinear_h.5.mlp.c_fc.weight_1" -> "1381 DequantizeLinear_h.5.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"1381 DequantizeLinear_h.5.mlp.c_fc.weight_1" -> "1382 Gemm_1262" [label="[768, 3072]", style=solid]; +"1382 Gemm_1262" -> "1386 Reshape_1266" [label="[]", style=solid]; +"1383 Unsqueeze_1263" -> "1385 Concat_1265" [label="[1]", style=dashed]; +"1384 Unsqueeze_1264" -> "1385 Concat_1265" [label="[1]", style=dashed]; +"1385 Concat_1265" -> "1386 Reshape_1266" [label="[3]", style=dashed]; +"1386 Reshape_1266" -> "1388 Mul_1268" [label="[]", style=solid]; +"1386 Reshape_1266" -> "1390 Pow_1270" [label="[]", style=solid]; +"1386 Reshape_1266" -> "1393 Add_1273" [label="[]", style=solid]; +"1387 Constant_1267" -> "1388 Mul_1268" [label="[]", style=solid]; +"1388 Mul_1268" -> "1399 Mul_1279" [label="[]", style=solid]; +"1389 Constant_1269" -> "1390 Pow_1270" [label="[]", style=solid]; +"1390 Pow_1270" -> "1392 Mul_1272" [label="[]", style=solid]; +"1391 Constant_1271" -> "1392 Mul_1272" [label="[]", style=solid]; +"1392 Mul_1272" -> "1393 Add_1273" [label="[]", style=solid]; +"1393 Add_1273" -> "1395 Mul_1275" [label="[]", style=solid]; +"1394 Constant_1274" -> "1395 Mul_1275" [label="[]", style=solid]; +"1395 Mul_1275" -> "1396 Tanh_1276" [label="[]", style=solid]; +"1396 Tanh_1276" -> "1398 Add_1278" [label="[]", style=solid]; +"1397 Constant_1277" -> "1398 Add_1278" [label="[]", style=solid]; +"1398 Add_1278" -> "1399 Mul_1279" [label="[]", style=solid]; +"1399 Mul_1279" -> "1400 QuantizeLinear_1624_1" [label="[]", style=solid]; +"1400 QuantizeLinear_1624_1" -> "1401 DequantizeLinear_1624_1" [label="[]", style=dashed]; +"1401 DequantizeLinear_1624_1" -> "1402 Shape_1280" [label="[]", style=solid]; +"1401 DequantizeLinear_1624_1" -> "1405 Shape_1283" [label="[]", style=solid]; +"1401 DequantizeLinear_1624_1" -> "1408 Shape_1286" [label="[]", style=solid]; +"1401 DequantizeLinear_1624_1" -> "1413 Reshape_1291" [label="[]", style=solid]; +"1402 Shape_1280" -> "1404 Gather_1282" [label="[-1]", style=dashed]; +"1403 Constant_1281" -> "1404 Gather_1282" [label="[]", style=dashed]; +"1404 Gather_1282" -> "1417 Unsqueeze_1293" [label="[]", style=dashed]; +"1405 Shape_1283" -> "1407 Gather_1285" [label="[-1]", style=dashed]; +"1406 Constant_1284" -> "1407 Gather_1285" [label="[]", style=dashed]; +"1407 Gather_1285" -> "1418 Unsqueeze_1294" [label="[]", style=dashed]; +"1408 Shape_1286" -> "1410 Gather_1288" [label="[-1]", style=dashed]; +"1409 Constant_1287" -> "1410 Gather_1288" [label="[]", style=dashed]; +"1410 Gather_1288" -> "1411 Unsqueeze_1289" [label="[]", style=dashed]; +"1411 Unsqueeze_1289" -> "1412 Concat_1290" [label="[1]", style=dashed]; +"1412 Concat_1290" -> "1413 Reshape_1291" [label="[2]", style=dashed]; +"1413 Reshape_1291" -> "1416 Gemm_1292" [label="[]", style=solid]; +"1414 QuantizeLinear_h.5.mlp.c_proj.weight_1" -> "1415 DequantizeLinear_h.5.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"1415 DequantizeLinear_h.5.mlp.c_proj.weight_1" -> "1416 Gemm_1292" [label="[3072, 768]", style=solid]; +"1416 Gemm_1292" -> "1420 Reshape_1296" [label="[]", style=solid]; +"1417 Unsqueeze_1293" -> "1419 Concat_1295" [label="[1]", style=dashed]; +"1418 Unsqueeze_1294" -> "1419 Concat_1295" [label="[1]", style=dashed]; +"1419 Concat_1295" -> "1420 Reshape_1296" [label="[3]", style=dashed]; +"1420 Reshape_1296" -> "1421 Add_1297" [label="[]", style=solid]; +"1421 Add_1297" -> "1422 ReduceMean_1298" [label="[]", style=solid]; +"1421 Add_1297" -> "1423 Sub_1299" [label="[]", style=solid]; +"1421 Add_1297" -> "1585 Add_1449" [label="[]", style=solid]; +"1422 ReduceMean_1298" -> "1423 Sub_1299" [label="[]", style=solid]; +"1423 Sub_1299" -> "1425 Pow_1301" [label="[]", style=solid]; +"1423 Sub_1299" -> "1430 Div_1306" [label="[]", style=solid]; +"1424 Constant_1300" -> "1425 Pow_1301" [label="[]", style=solid]; +"1425 Pow_1301" -> "1426 ReduceMean_1302" [label="[]", style=solid]; +"1426 ReduceMean_1302" -> "1428 Add_1304" [label="[]", style=solid]; +"1427 Constant_1303" -> "1428 Add_1304" [label="[]", style=solid]; +"1428 Add_1304" -> "1429 Sqrt_1305" [label="[]", style=solid]; +"1429 Sqrt_1305" -> "1430 Div_1306" [label="[]", style=solid]; +"1430 Div_1306" -> "1431 Mul_1307" [label="[]", style=solid]; +"1431 Mul_1307" -> "1432 Add_1308" [label="[]", style=solid]; +"1432 Add_1308" -> "1433 QuantizeLinear_1657_1" [label="[]", style=solid]; +"1433 QuantizeLinear_1657_1" -> "1434 DequantizeLinear_1657_1" [label="[]", style=dashed]; +"1434 DequantizeLinear_1657_1" -> "1435 Shape_1309" [label="[]", style=solid]; +"1434 DequantizeLinear_1657_1" -> "1438 Shape_1312" [label="[]", style=solid]; +"1434 DequantizeLinear_1657_1" -> "1441 Shape_1315" [label="[]", style=solid]; +"1434 DequantizeLinear_1657_1" -> "1446 Reshape_1320" [label="[]", style=solid]; +"1435 Shape_1309" -> "1437 Gather_1311" [label="[-1]", style=dashed]; +"1436 Constant_1310" -> "1437 Gather_1311" [label="[]", style=dashed]; +"1437 Gather_1311" -> "1450 Unsqueeze_1322" [label="[]", style=dashed]; +"1438 Shape_1312" -> "1440 Gather_1314" [label="[-1]", style=dashed]; +"1439 Constant_1313" -> "1440 Gather_1314" [label="[]", style=dashed]; +"1440 Gather_1314" -> "1451 Unsqueeze_1323" [label="[]", style=dashed]; +"1441 Shape_1315" -> "1443 Gather_1317" [label="[-1]", style=dashed]; +"1442 Constant_1316" -> "1443 Gather_1317" [label="[]", style=dashed]; +"1443 Gather_1317" -> "1444 Unsqueeze_1318" [label="[]", style=dashed]; +"1444 Unsqueeze_1318" -> "1445 Concat_1319" [label="[1]", style=dashed]; +"1445 Concat_1319" -> "1446 Reshape_1320" [label="[2]", style=dashed]; +"1446 Reshape_1320" -> "1449 Gemm_1321" [label="[]", style=solid]; +"1447 QuantizeLinear_h.6.attn.c_attn.weight_1" -> "1448 DequantizeLinear_h.6.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"1448 DequantizeLinear_h.6.attn.c_attn.weight_1" -> "1449 Gemm_1321" [label="[768, 2304]", style=solid]; +"1449 Gemm_1321" -> "1453 Reshape_1325" [label="[]", style=solid]; +"1450 Unsqueeze_1322" -> "1452 Concat_1324" [label="[1]", style=dashed]; +"1451 Unsqueeze_1323" -> "1452 Concat_1324" [label="[1]", style=dashed]; +"1452 Concat_1324" -> "1453 Reshape_1325" [label="[3]", style=dashed]; +"1453 Reshape_1325" -> "1454 Split_1326" [label="[]", style=solid]; +"1454 Split_1326" -> "1455 QuantizeLinear_query.13_1" [label="[]", style=solid]; +"1454 Split_1326" -> "1457 Shape_1327" [label="[]", style=solid]; +"1454 Split_1326" -> "1460 Shape_1330" [label="[]", style=solid]; +"1454 Split_1326" -> "1463 Shape_1333" [label="[]", style=solid]; +"1454 Split_1326" -> "1476 Shape_1346" [label="[]", style=solid]; +"1454 Split_1326" -> "1479 Shape_1349" [label="[]", style=solid]; +"1454 Split_1326" -> "1482 Shape_1352" [label="[]", style=solid]; +"1454 Split_1326" -> "1493 Reshape_1363" [label="[]", style=solid]; +"1454 Split_1326" -> "1497 Shape_1365" [label="[]", style=solid]; +"1454 Split_1326" -> "1500 Shape_1368" [label="[]", style=solid]; +"1454 Split_1326" -> "1503 Shape_1371" [label="[]", style=solid]; +"1454 Split_1326" -> "1514 Reshape_1382" [label="[]", style=solid]; +"1455 QuantizeLinear_query.13_1" -> "1456 DequantizeLinear_query.13_1" [label="[]", style=dashed]; +"1456 DequantizeLinear_query.13_1" -> "1474 Reshape_1344" [label="[]", style=solid]; +"1457 Shape_1327" -> "1459 Gather_1329" [label="[-1]", style=dashed]; +"1458 Constant_1328" -> "1459 Gather_1329" [label="[]", style=dashed]; +"1459 Gather_1329" -> "1470 Unsqueeze_1340" [label="[]", style=dashed]; +"1460 Shape_1330" -> "1462 Gather_1332" [label="[-1]", style=dashed]; +"1461 Constant_1331" -> "1462 Gather_1332" [label="[]", style=dashed]; +"1462 Gather_1332" -> "1471 Unsqueeze_1341" [label="[]", style=dashed]; +"1463 Shape_1333" -> "1465 Gather_1335" [label="[-1]", style=dashed]; +"1464 Constant_1334" -> "1465 Gather_1335" [label="[]", style=dashed]; +"1465 Gather_1335" -> "1467 Div_1337" [label="[]", style=dashed]; +"1466 Constant_1336" -> "1467 Div_1337" [label="[]", style=dashed]; +"1467 Div_1337" -> "1468 Cast_1338" [label="[]", style=dashed]; +"1468 Cast_1338" -> "1469 Cast_1339" [label="[]", style=dashed]; +"1469 Cast_1339" -> "1472 Unsqueeze_1342" [label="[]", style=dashed]; +"1470 Unsqueeze_1340" -> "1473 Concat_1343" [label="[1]", style=dashed]; +"1471 Unsqueeze_1341" -> "1473 Concat_1343" [label="[1]", style=dashed]; +"1472 Unsqueeze_1342" -> "1473 Concat_1343" [label="[1]", style=dashed]; +"1473 Concat_1343" -> "1474 Reshape_1344" [label="[4]", style=dashed]; +"1474 Reshape_1344" -> "1475 Transpose_1345" [label="[]", style=solid]; +"1475 Transpose_1345" -> "1520 MatMul_1388" [label="[]", style=solid]; +"1476 Shape_1346" -> "1478 Gather_1348" [label="[-1]", style=dashed]; +"1477 Constant_1347" -> "1478 Gather_1348" [label="[]", style=dashed]; +"1478 Gather_1348" -> "1489 Unsqueeze_1359" [label="[]", style=dashed]; +"1479 Shape_1349" -> "1481 Gather_1351" [label="[-1]", style=dashed]; +"1480 Constant_1350" -> "1481 Gather_1351" [label="[]", style=dashed]; +"1481 Gather_1351" -> "1490 Unsqueeze_1360" [label="[]", style=dashed]; +"1482 Shape_1352" -> "1484 Gather_1354" [label="[-1]", style=dashed]; +"1483 Constant_1353" -> "1484 Gather_1354" [label="[]", style=dashed]; +"1484 Gather_1354" -> "1486 Div_1356" [label="[]", style=dashed]; +"1485 Constant_1355" -> "1486 Div_1356" [label="[]", style=dashed]; +"1486 Div_1356" -> "1487 Cast_1357" [label="[]", style=dashed]; +"1487 Cast_1357" -> "1488 Cast_1358" [label="[]", style=dashed]; +"1488 Cast_1358" -> "1491 Unsqueeze_1361" [label="[]", style=dashed]; +"1489 Unsqueeze_1359" -> "1492 Concat_1362" [label="[1]", style=dashed]; +"1490 Unsqueeze_1360" -> "1492 Concat_1362" [label="[1]", style=dashed]; +"1491 Unsqueeze_1361" -> "1492 Concat_1362" [label="[1]", style=dashed]; +"1492 Concat_1362" -> "1493 Reshape_1363" [label="[4]", style=dashed]; +"1493 Reshape_1363" -> "1494 QuantizeLinear_1722_1" [label="[]", style=solid]; +"1493 Reshape_1363" -> "1516 Transpose_1384" [label="[]", style=solid]; +"1494 QuantizeLinear_1722_1" -> "1495 DequantizeLinear_1722_1" [label="[]", style=dashed]; +"1495 DequantizeLinear_1722_1" -> "1496 Transpose_1364" [label="[]", style=solid]; +"1496 Transpose_1364" -> "1520 MatMul_1388" [label="[]", style=solid]; +"1497 Shape_1365" -> "1499 Gather_1367" [label="[-1]", style=dashed]; +"1498 Constant_1366" -> "1499 Gather_1367" [label="[]", style=dashed]; +"1499 Gather_1367" -> "1510 Unsqueeze_1378" [label="[]", style=dashed]; +"1500 Shape_1368" -> "1502 Gather_1370" [label="[-1]", style=dashed]; +"1501 Constant_1369" -> "1502 Gather_1370" [label="[]", style=dashed]; +"1502 Gather_1370" -> "1511 Unsqueeze_1379" [label="[]", style=dashed]; +"1503 Shape_1371" -> "1505 Gather_1373" [label="[-1]", style=dashed]; +"1504 Constant_1372" -> "1505 Gather_1373" [label="[]", style=dashed]; +"1505 Gather_1373" -> "1507 Div_1375" [label="[]", style=dashed]; +"1506 Constant_1374" -> "1507 Div_1375" [label="[]", style=dashed]; +"1507 Div_1375" -> "1508 Cast_1376" [label="[]", style=dashed]; +"1508 Cast_1376" -> "1509 Cast_1377" [label="[]", style=dashed]; +"1509 Cast_1377" -> "1512 Unsqueeze_1380" [label="[]", style=dashed]; +"1510 Unsqueeze_1378" -> "1513 Concat_1381" [label="[1]", style=dashed]; +"1511 Unsqueeze_1379" -> "1513 Concat_1381" [label="[1]", style=dashed]; +"1512 Unsqueeze_1380" -> "1513 Concat_1381" [label="[1]", style=dashed]; +"1513 Concat_1381" -> "1514 Reshape_1382" [label="[4]", style=dashed]; +"1514 Reshape_1382" -> "1515 Transpose_1383" [label="[]", style=solid]; +"1515 Transpose_1383" -> "1518 Unsqueeze_1386" [label="[]", style=solid]; +"1515 Transpose_1383" -> "1544 MatMul_1412" [label="[]", style=solid]; +"1516 Transpose_1384" -> "1517 Unsqueeze_1385" [label="[]", style=solid]; +"1517 Unsqueeze_1385" -> "1519 Concat_1387" [label="[]", style=solid]; +"1518 Unsqueeze_1386" -> "1519 Concat_1387" [label="[]", style=solid]; +"1519 Concat_1387" -> "2833 nncf_model_output_7" [label="[2, 1, 12, 8, 64]", style=solid]; +"1520 MatMul_1388" -> "1522 Div_1390" [label="[]", style=solid]; +"1521 Constant_1389" -> "1522 Div_1390" [label="[]", style=solid]; +"1522 Div_1390" -> "1523 Shape_1391" [label="[]", style=solid]; +"1522 Div_1390" -> "1526 Shape_1394" [label="[]", style=solid]; +"1522 Div_1390" -> "1537 Mul_1405" [label="[]", style=solid]; +"1523 Shape_1391" -> "1525 Gather_1393" [label="[-1]", style=dashed]; +"1524 Constant_1392" -> "1525 Gather_1393" [label="[]", style=dashed]; +"1525 Gather_1393" -> "1529 Sub_1397" [label="[]", style=dashed]; +"1526 Shape_1394" -> "1528 Gather_1396" [label="[-1]", style=dashed]; +"1527 Constant_1395" -> "1528 Gather_1396" [label="[]", style=dashed]; +"1528 Gather_1396" -> "1529 Sub_1397" [label="[]", style=dashed]; +"1528 Gather_1396" -> "1531 Unsqueeze_1399" [label="[]", style=dashed]; +"1528 Gather_1396" -> "1534 Unsqueeze_1402" [label="[]", style=dashed]; +"1529 Sub_1397" -> "1530 Unsqueeze_1398" [label="[]", style=dashed]; +"1530 Unsqueeze_1398" -> "1533 Slice_1401" [label="[1]", style=dashed]; +"1531 Unsqueeze_1399" -> "1533 Slice_1401" [label="[1]", style=dashed]; +"1532 Constant_1400" -> "1533 Slice_1401" [label="[1]", style=dashed]; +"1533 Slice_1401" -> "1536 Slice_1404" [label="[]", style=solid]; +"1534 Unsqueeze_1402" -> "1536 Slice_1404" [label="[1]", style=dashed]; +"1535 Constant_1403" -> "1536 Slice_1404" [label="[1]", style=dashed]; +"1536 Slice_1404" -> "1537 Mul_1405" [label="[]", style=solid]; +"1536 Slice_1404" -> "1539 Sub_1407" [label="[]", style=solid]; +"1537 Mul_1405" -> "1542 Sub_1410" [label="[]", style=solid]; +"1538 Constant_1406" -> "1539 Sub_1407" [label="[]", style=solid]; +"1539 Sub_1407" -> "1541 Mul_1409" [label="[]", style=solid]; +"1540 Constant_1408" -> "1541 Mul_1409" [label="[]", style=solid]; +"1541 Mul_1409" -> "1542 Sub_1410" [label="[]", style=solid]; +"1542 Sub_1410" -> "1543 Softmax_1411" [label="[]", style=solid]; +"1543 Softmax_1411" -> "1544 MatMul_1412" [label="[]", style=solid]; +"1544 MatMul_1412" -> "1545 QuantizeLinear_1779_1" [label="[]", style=solid]; +"1545 QuantizeLinear_1779_1" -> "1546 DequantizeLinear_1779_1" [label="[]", style=dashed]; +"1546 DequantizeLinear_1779_1" -> "1547 Transpose_1413" [label="[]", style=solid]; +"1547 Transpose_1413" -> "1548 Shape_1414" [label="[]", style=solid]; +"1547 Transpose_1413" -> "1551 Shape_1417" [label="[]", style=solid]; +"1547 Transpose_1413" -> "1554 Shape_1420" [label="[]", style=solid]; +"1547 Transpose_1413" -> "1557 Shape_1423" [label="[]", style=solid]; +"1547 Transpose_1413" -> "1565 Reshape_1431" [label="[]", style=solid]; +"1548 Shape_1414" -> "1550 Gather_1416" [label="[-1]", style=dashed]; +"1549 Constant_1415" -> "1550 Gather_1416" [label="[]", style=dashed]; +"1550 Gather_1416" -> "1561 Unsqueeze_1427" [label="[]", style=dashed]; +"1551 Shape_1417" -> "1553 Gather_1419" [label="[-1]", style=dashed]; +"1552 Constant_1418" -> "1553 Gather_1419" [label="[]", style=dashed]; +"1553 Gather_1419" -> "1562 Unsqueeze_1428" [label="[]", style=dashed]; +"1554 Shape_1420" -> "1556 Gather_1422" [label="[-1]", style=dashed]; +"1555 Constant_1421" -> "1556 Gather_1422" [label="[]", style=dashed]; +"1556 Gather_1422" -> "1560 Mul_1426" [label="[]", style=dashed]; +"1557 Shape_1423" -> "1559 Gather_1425" [label="[-1]", style=dashed]; +"1558 Constant_1424" -> "1559 Gather_1425" [label="[]", style=dashed]; +"1559 Gather_1425" -> "1560 Mul_1426" [label="[]", style=dashed]; +"1560 Mul_1426" -> "1563 Unsqueeze_1429" [label="[]", style=dashed]; +"1561 Unsqueeze_1427" -> "1564 Concat_1430" [label="[1]", style=dashed]; +"1562 Unsqueeze_1428" -> "1564 Concat_1430" [label="[1]", style=dashed]; +"1563 Unsqueeze_1429" -> "1564 Concat_1430" [label="[1]", style=dashed]; +"1564 Concat_1430" -> "1565 Reshape_1431" [label="[3]", style=dashed]; +"1565 Reshape_1431" -> "1566 Shape_1432" [label="[]", style=solid]; +"1565 Reshape_1431" -> "1569 Shape_1435" [label="[]", style=solid]; +"1565 Reshape_1431" -> "1572 Shape_1438" [label="[]", style=solid]; +"1565 Reshape_1431" -> "1577 Reshape_1443" [label="[]", style=solid]; +"1566 Shape_1432" -> "1568 Gather_1434" [label="[-1]", style=dashed]; +"1567 Constant_1433" -> "1568 Gather_1434" [label="[]", style=dashed]; +"1568 Gather_1434" -> "1581 Unsqueeze_1445" [label="[]", style=dashed]; +"1569 Shape_1435" -> "1571 Gather_1437" [label="[-1]", style=dashed]; +"1570 Constant_1436" -> "1571 Gather_1437" [label="[]", style=dashed]; +"1571 Gather_1437" -> "1582 Unsqueeze_1446" [label="[]", style=dashed]; +"1572 Shape_1438" -> "1574 Gather_1440" [label="[-1]", style=dashed]; +"1573 Constant_1439" -> "1574 Gather_1440" [label="[]", style=dashed]; +"1574 Gather_1440" -> "1575 Unsqueeze_1441" [label="[]", style=dashed]; +"1575 Unsqueeze_1441" -> "1576 Concat_1442" [label="[1]", style=dashed]; +"1576 Concat_1442" -> "1577 Reshape_1443" [label="[2]", style=dashed]; +"1577 Reshape_1443" -> "1580 Gemm_1444" [label="[]", style=solid]; +"1578 QuantizeLinear_h.6.attn.c_proj.weight_1" -> "1579 DequantizeLinear_h.6.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"1579 DequantizeLinear_h.6.attn.c_proj.weight_1" -> "1580 Gemm_1444" [label="[768, 768]", style=solid]; +"1580 Gemm_1444" -> "1584 Reshape_1448" [label="[]", style=solid]; +"1581 Unsqueeze_1445" -> "1583 Concat_1447" [label="[1]", style=dashed]; +"1582 Unsqueeze_1446" -> "1583 Concat_1447" [label="[1]", style=dashed]; +"1583 Concat_1447" -> "1584 Reshape_1448" [label="[3]", style=dashed]; +"1584 Reshape_1448" -> "1585 Add_1449" [label="[]", style=solid]; +"1585 Add_1449" -> "1586 ReduceMean_1450" [label="[]", style=solid]; +"1585 Add_1449" -> "1587 Sub_1451" [label="[]", style=solid]; +"1585 Add_1449" -> "1652 Add_1508" [label="[]", style=solid]; +"1586 ReduceMean_1450" -> "1587 Sub_1451" [label="[]", style=solid]; +"1587 Sub_1451" -> "1589 Pow_1453" [label="[]", style=solid]; +"1587 Sub_1451" -> "1594 Div_1458" [label="[]", style=solid]; +"1588 Constant_1452" -> "1589 Pow_1453" [label="[]", style=solid]; +"1589 Pow_1453" -> "1590 ReduceMean_1454" [label="[]", style=solid]; +"1590 ReduceMean_1454" -> "1592 Add_1456" [label="[]", style=solid]; +"1591 Constant_1455" -> "1592 Add_1456" [label="[]", style=solid]; +"1592 Add_1456" -> "1593 Sqrt_1457" [label="[]", style=solid]; +"1593 Sqrt_1457" -> "1594 Div_1458" [label="[]", style=solid]; +"1594 Div_1458" -> "1595 Mul_1459" [label="[]", style=solid]; +"1595 Mul_1459" -> "1596 Add_1460" [label="[]", style=solid]; +"1596 Add_1460" -> "1597 QuantizeLinear_1831_1" [label="[]", style=solid]; +"1597 QuantizeLinear_1831_1" -> "1598 DequantizeLinear_1831_1" [label="[]", style=dashed]; +"1598 DequantizeLinear_1831_1" -> "1599 Shape_1461" [label="[]", style=solid]; +"1598 DequantizeLinear_1831_1" -> "1602 Shape_1464" [label="[]", style=solid]; +"1598 DequantizeLinear_1831_1" -> "1605 Shape_1467" [label="[]", style=solid]; +"1598 DequantizeLinear_1831_1" -> "1610 Reshape_1472" [label="[]", style=solid]; +"1599 Shape_1461" -> "1601 Gather_1463" [label="[-1]", style=dashed]; +"1600 Constant_1462" -> "1601 Gather_1463" [label="[]", style=dashed]; +"1601 Gather_1463" -> "1614 Unsqueeze_1474" [label="[]", style=dashed]; +"1602 Shape_1464" -> "1604 Gather_1466" [label="[-1]", style=dashed]; +"1603 Constant_1465" -> "1604 Gather_1466" [label="[]", style=dashed]; +"1604 Gather_1466" -> "1615 Unsqueeze_1475" [label="[]", style=dashed]; +"1605 Shape_1467" -> "1607 Gather_1469" [label="[-1]", style=dashed]; +"1606 Constant_1468" -> "1607 Gather_1469" [label="[]", style=dashed]; +"1607 Gather_1469" -> "1608 Unsqueeze_1470" [label="[]", style=dashed]; +"1608 Unsqueeze_1470" -> "1609 Concat_1471" [label="[1]", style=dashed]; +"1609 Concat_1471" -> "1610 Reshape_1472" [label="[2]", style=dashed]; +"1610 Reshape_1472" -> "1613 Gemm_1473" [label="[]", style=solid]; +"1611 QuantizeLinear_h.6.mlp.c_fc.weight_1" -> "1612 DequantizeLinear_h.6.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"1612 DequantizeLinear_h.6.mlp.c_fc.weight_1" -> "1613 Gemm_1473" [label="[768, 3072]", style=solid]; +"1613 Gemm_1473" -> "1617 Reshape_1477" [label="[]", style=solid]; +"1614 Unsqueeze_1474" -> "1616 Concat_1476" [label="[1]", style=dashed]; +"1615 Unsqueeze_1475" -> "1616 Concat_1476" [label="[1]", style=dashed]; +"1616 Concat_1476" -> "1617 Reshape_1477" [label="[3]", style=dashed]; +"1617 Reshape_1477" -> "1619 Mul_1479" [label="[]", style=solid]; +"1617 Reshape_1477" -> "1621 Pow_1481" [label="[]", style=solid]; +"1617 Reshape_1477" -> "1624 Add_1484" [label="[]", style=solid]; +"1618 Constant_1478" -> "1619 Mul_1479" [label="[]", style=solid]; +"1619 Mul_1479" -> "1630 Mul_1490" [label="[]", style=solid]; +"1620 Constant_1480" -> "1621 Pow_1481" [label="[]", style=solid]; +"1621 Pow_1481" -> "1623 Mul_1483" [label="[]", style=solid]; +"1622 Constant_1482" -> "1623 Mul_1483" [label="[]", style=solid]; +"1623 Mul_1483" -> "1624 Add_1484" [label="[]", style=solid]; +"1624 Add_1484" -> "1626 Mul_1486" [label="[]", style=solid]; +"1625 Constant_1485" -> "1626 Mul_1486" [label="[]", style=solid]; +"1626 Mul_1486" -> "1627 Tanh_1487" [label="[]", style=solid]; +"1627 Tanh_1487" -> "1629 Add_1489" [label="[]", style=solid]; +"1628 Constant_1488" -> "1629 Add_1489" [label="[]", style=solid]; +"1629 Add_1489" -> "1630 Mul_1490" [label="[]", style=solid]; +"1630 Mul_1490" -> "1631 QuantizeLinear_1865_1" [label="[]", style=solid]; +"1631 QuantizeLinear_1865_1" -> "1632 DequantizeLinear_1865_1" [label="[]", style=dashed]; +"1632 DequantizeLinear_1865_1" -> "1633 Shape_1491" [label="[]", style=solid]; +"1632 DequantizeLinear_1865_1" -> "1636 Shape_1494" [label="[]", style=solid]; +"1632 DequantizeLinear_1865_1" -> "1639 Shape_1497" [label="[]", style=solid]; +"1632 DequantizeLinear_1865_1" -> "1644 Reshape_1502" [label="[]", style=solid]; +"1633 Shape_1491" -> "1635 Gather_1493" [label="[-1]", style=dashed]; +"1634 Constant_1492" -> "1635 Gather_1493" [label="[]", style=dashed]; +"1635 Gather_1493" -> "1648 Unsqueeze_1504" [label="[]", style=dashed]; +"1636 Shape_1494" -> "1638 Gather_1496" [label="[-1]", style=dashed]; +"1637 Constant_1495" -> "1638 Gather_1496" [label="[]", style=dashed]; +"1638 Gather_1496" -> "1649 Unsqueeze_1505" [label="[]", style=dashed]; +"1639 Shape_1497" -> "1641 Gather_1499" [label="[-1]", style=dashed]; +"1640 Constant_1498" -> "1641 Gather_1499" [label="[]", style=dashed]; +"1641 Gather_1499" -> "1642 Unsqueeze_1500" [label="[]", style=dashed]; +"1642 Unsqueeze_1500" -> "1643 Concat_1501" [label="[1]", style=dashed]; +"1643 Concat_1501" -> "1644 Reshape_1502" [label="[2]", style=dashed]; +"1644 Reshape_1502" -> "1647 Gemm_1503" [label="[]", style=solid]; +"1645 QuantizeLinear_h.6.mlp.c_proj.weight_1" -> "1646 DequantizeLinear_h.6.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"1646 DequantizeLinear_h.6.mlp.c_proj.weight_1" -> "1647 Gemm_1503" [label="[3072, 768]", style=solid]; +"1647 Gemm_1503" -> "1651 Reshape_1507" [label="[]", style=solid]; +"1648 Unsqueeze_1504" -> "1650 Concat_1506" [label="[1]", style=dashed]; +"1649 Unsqueeze_1505" -> "1650 Concat_1506" [label="[1]", style=dashed]; +"1650 Concat_1506" -> "1651 Reshape_1507" [label="[3]", style=dashed]; +"1651 Reshape_1507" -> "1652 Add_1508" [label="[]", style=solid]; +"1652 Add_1508" -> "1653 ReduceMean_1509" [label="[]", style=solid]; +"1652 Add_1508" -> "1654 Sub_1510" [label="[]", style=solid]; +"1652 Add_1508" -> "1816 Add_1660" [label="[]", style=solid]; +"1653 ReduceMean_1509" -> "1654 Sub_1510" [label="[]", style=solid]; +"1654 Sub_1510" -> "1656 Pow_1512" [label="[]", style=solid]; +"1654 Sub_1510" -> "1661 Div_1517" [label="[]", style=solid]; +"1655 Constant_1511" -> "1656 Pow_1512" [label="[]", style=solid]; +"1656 Pow_1512" -> "1657 ReduceMean_1513" [label="[]", style=solid]; +"1657 ReduceMean_1513" -> "1659 Add_1515" [label="[]", style=solid]; +"1658 Constant_1514" -> "1659 Add_1515" [label="[]", style=solid]; +"1659 Add_1515" -> "1660 Sqrt_1516" [label="[]", style=solid]; +"1660 Sqrt_1516" -> "1661 Div_1517" [label="[]", style=solid]; +"1661 Div_1517" -> "1662 Mul_1518" [label="[]", style=solid]; +"1662 Mul_1518" -> "1663 Add_1519" [label="[]", style=solid]; +"1663 Add_1519" -> "1664 QuantizeLinear_1898_1" [label="[]", style=solid]; +"1664 QuantizeLinear_1898_1" -> "1665 DequantizeLinear_1898_1" [label="[]", style=dashed]; +"1665 DequantizeLinear_1898_1" -> "1666 Shape_1520" [label="[]", style=solid]; +"1665 DequantizeLinear_1898_1" -> "1669 Shape_1523" [label="[]", style=solid]; +"1665 DequantizeLinear_1898_1" -> "1672 Shape_1526" [label="[]", style=solid]; +"1665 DequantizeLinear_1898_1" -> "1677 Reshape_1531" [label="[]", style=solid]; +"1666 Shape_1520" -> "1668 Gather_1522" [label="[-1]", style=dashed]; +"1667 Constant_1521" -> "1668 Gather_1522" [label="[]", style=dashed]; +"1668 Gather_1522" -> "1681 Unsqueeze_1533" [label="[]", style=dashed]; +"1669 Shape_1523" -> "1671 Gather_1525" [label="[-1]", style=dashed]; +"1670 Constant_1524" -> "1671 Gather_1525" [label="[]", style=dashed]; +"1671 Gather_1525" -> "1682 Unsqueeze_1534" [label="[]", style=dashed]; +"1672 Shape_1526" -> "1674 Gather_1528" [label="[-1]", style=dashed]; +"1673 Constant_1527" -> "1674 Gather_1528" [label="[]", style=dashed]; +"1674 Gather_1528" -> "1675 Unsqueeze_1529" [label="[]", style=dashed]; +"1675 Unsqueeze_1529" -> "1676 Concat_1530" [label="[1]", style=dashed]; +"1676 Concat_1530" -> "1677 Reshape_1531" [label="[2]", style=dashed]; +"1677 Reshape_1531" -> "1680 Gemm_1532" [label="[]", style=solid]; +"1678 QuantizeLinear_h.7.attn.c_attn.weight_1" -> "1679 DequantizeLinear_h.7.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"1679 DequantizeLinear_h.7.attn.c_attn.weight_1" -> "1680 Gemm_1532" [label="[768, 2304]", style=solid]; +"1680 Gemm_1532" -> "1684 Reshape_1536" [label="[]", style=solid]; +"1681 Unsqueeze_1533" -> "1683 Concat_1535" [label="[1]", style=dashed]; +"1682 Unsqueeze_1534" -> "1683 Concat_1535" [label="[1]", style=dashed]; +"1683 Concat_1535" -> "1684 Reshape_1536" [label="[3]", style=dashed]; +"1684 Reshape_1536" -> "1685 Split_1537" [label="[]", style=solid]; +"1685 Split_1537" -> "1686 QuantizeLinear_query.15_1" [label="[]", style=solid]; +"1685 Split_1537" -> "1688 Shape_1538" [label="[]", style=solid]; +"1685 Split_1537" -> "1691 Shape_1541" [label="[]", style=solid]; +"1685 Split_1537" -> "1694 Shape_1544" [label="[]", style=solid]; +"1685 Split_1537" -> "1707 Shape_1557" [label="[]", style=solid]; +"1685 Split_1537" -> "1710 Shape_1560" [label="[]", style=solid]; +"1685 Split_1537" -> "1713 Shape_1563" [label="[]", style=solid]; +"1685 Split_1537" -> "1724 Reshape_1574" [label="[]", style=solid]; +"1685 Split_1537" -> "1728 Shape_1576" [label="[]", style=solid]; +"1685 Split_1537" -> "1731 Shape_1579" [label="[]", style=solid]; +"1685 Split_1537" -> "1734 Shape_1582" [label="[]", style=solid]; +"1685 Split_1537" -> "1745 Reshape_1593" [label="[]", style=solid]; +"1686 QuantizeLinear_query.15_1" -> "1687 DequantizeLinear_query.15_1" [label="[]", style=dashed]; +"1687 DequantizeLinear_query.15_1" -> "1705 Reshape_1555" [label="[]", style=solid]; +"1688 Shape_1538" -> "1690 Gather_1540" [label="[-1]", style=dashed]; +"1689 Constant_1539" -> "1690 Gather_1540" [label="[]", style=dashed]; +"1690 Gather_1540" -> "1701 Unsqueeze_1551" [label="[]", style=dashed]; +"1691 Shape_1541" -> "1693 Gather_1543" [label="[-1]", style=dashed]; +"1692 Constant_1542" -> "1693 Gather_1543" [label="[]", style=dashed]; +"1693 Gather_1543" -> "1702 Unsqueeze_1552" [label="[]", style=dashed]; +"1694 Shape_1544" -> "1696 Gather_1546" [label="[-1]", style=dashed]; +"1695 Constant_1545" -> "1696 Gather_1546" [label="[]", style=dashed]; +"1696 Gather_1546" -> "1698 Div_1548" [label="[]", style=dashed]; +"1697 Constant_1547" -> "1698 Div_1548" [label="[]", style=dashed]; +"1698 Div_1548" -> "1699 Cast_1549" [label="[]", style=dashed]; +"1699 Cast_1549" -> "1700 Cast_1550" [label="[]", style=dashed]; +"1700 Cast_1550" -> "1703 Unsqueeze_1553" [label="[]", style=dashed]; +"1701 Unsqueeze_1551" -> "1704 Concat_1554" [label="[1]", style=dashed]; +"1702 Unsqueeze_1552" -> "1704 Concat_1554" [label="[1]", style=dashed]; +"1703 Unsqueeze_1553" -> "1704 Concat_1554" [label="[1]", style=dashed]; +"1704 Concat_1554" -> "1705 Reshape_1555" [label="[4]", style=dashed]; +"1705 Reshape_1555" -> "1706 Transpose_1556" [label="[]", style=solid]; +"1706 Transpose_1556" -> "1751 MatMul_1599" [label="[]", style=solid]; +"1707 Shape_1557" -> "1709 Gather_1559" [label="[-1]", style=dashed]; +"1708 Constant_1558" -> "1709 Gather_1559" [label="[]", style=dashed]; +"1709 Gather_1559" -> "1720 Unsqueeze_1570" [label="[]", style=dashed]; +"1710 Shape_1560" -> "1712 Gather_1562" [label="[-1]", style=dashed]; +"1711 Constant_1561" -> "1712 Gather_1562" [label="[]", style=dashed]; +"1712 Gather_1562" -> "1721 Unsqueeze_1571" [label="[]", style=dashed]; +"1713 Shape_1563" -> "1715 Gather_1565" [label="[-1]", style=dashed]; +"1714 Constant_1564" -> "1715 Gather_1565" [label="[]", style=dashed]; +"1715 Gather_1565" -> "1717 Div_1567" [label="[]", style=dashed]; +"1716 Constant_1566" -> "1717 Div_1567" [label="[]", style=dashed]; +"1717 Div_1567" -> "1718 Cast_1568" [label="[]", style=dashed]; +"1718 Cast_1568" -> "1719 Cast_1569" [label="[]", style=dashed]; +"1719 Cast_1569" -> "1722 Unsqueeze_1572" [label="[]", style=dashed]; +"1720 Unsqueeze_1570" -> "1723 Concat_1573" [label="[1]", style=dashed]; +"1721 Unsqueeze_1571" -> "1723 Concat_1573" [label="[1]", style=dashed]; +"1722 Unsqueeze_1572" -> "1723 Concat_1573" [label="[1]", style=dashed]; +"1723 Concat_1573" -> "1724 Reshape_1574" [label="[4]", style=dashed]; +"1724 Reshape_1574" -> "1725 QuantizeLinear_1963_1" [label="[]", style=solid]; +"1724 Reshape_1574" -> "1747 Transpose_1595" [label="[]", style=solid]; +"1725 QuantizeLinear_1963_1" -> "1726 DequantizeLinear_1963_1" [label="[]", style=dashed]; +"1726 DequantizeLinear_1963_1" -> "1727 Transpose_1575" [label="[]", style=solid]; +"1727 Transpose_1575" -> "1751 MatMul_1599" [label="[]", style=solid]; +"1728 Shape_1576" -> "1730 Gather_1578" [label="[-1]", style=dashed]; +"1729 Constant_1577" -> "1730 Gather_1578" [label="[]", style=dashed]; +"1730 Gather_1578" -> "1741 Unsqueeze_1589" [label="[]", style=dashed]; +"1731 Shape_1579" -> "1733 Gather_1581" [label="[-1]", style=dashed]; +"1732 Constant_1580" -> "1733 Gather_1581" [label="[]", style=dashed]; +"1733 Gather_1581" -> "1742 Unsqueeze_1590" [label="[]", style=dashed]; +"1734 Shape_1582" -> "1736 Gather_1584" [label="[-1]", style=dashed]; +"1735 Constant_1583" -> "1736 Gather_1584" [label="[]", style=dashed]; +"1736 Gather_1584" -> "1738 Div_1586" [label="[]", style=dashed]; +"1737 Constant_1585" -> "1738 Div_1586" [label="[]", style=dashed]; +"1738 Div_1586" -> "1739 Cast_1587" [label="[]", style=dashed]; +"1739 Cast_1587" -> "1740 Cast_1588" [label="[]", style=dashed]; +"1740 Cast_1588" -> "1743 Unsqueeze_1591" [label="[]", style=dashed]; +"1741 Unsqueeze_1589" -> "1744 Concat_1592" [label="[1]", style=dashed]; +"1742 Unsqueeze_1590" -> "1744 Concat_1592" [label="[1]", style=dashed]; +"1743 Unsqueeze_1591" -> "1744 Concat_1592" [label="[1]", style=dashed]; +"1744 Concat_1592" -> "1745 Reshape_1593" [label="[4]", style=dashed]; +"1745 Reshape_1593" -> "1746 Transpose_1594" [label="[]", style=solid]; +"1746 Transpose_1594" -> "1749 Unsqueeze_1597" [label="[]", style=solid]; +"1746 Transpose_1594" -> "1775 MatMul_1623" [label="[]", style=solid]; +"1747 Transpose_1595" -> "1748 Unsqueeze_1596" [label="[]", style=solid]; +"1748 Unsqueeze_1596" -> "1750 Concat_1598" [label="[]", style=solid]; +"1749 Unsqueeze_1597" -> "1750 Concat_1598" [label="[]", style=solid]; +"1750 Concat_1598" -> "2834 nncf_model_output_8" [label="[2, 1, 12, 8, 64]", style=solid]; +"1751 MatMul_1599" -> "1753 Div_1601" [label="[]", style=solid]; +"1752 Constant_1600" -> "1753 Div_1601" [label="[]", style=solid]; +"1753 Div_1601" -> "1754 Shape_1602" [label="[]", style=solid]; +"1753 Div_1601" -> "1757 Shape_1605" [label="[]", style=solid]; +"1753 Div_1601" -> "1768 Mul_1616" [label="[]", style=solid]; +"1754 Shape_1602" -> "1756 Gather_1604" [label="[-1]", style=dashed]; +"1755 Constant_1603" -> "1756 Gather_1604" [label="[]", style=dashed]; +"1756 Gather_1604" -> "1760 Sub_1608" [label="[]", style=dashed]; +"1757 Shape_1605" -> "1759 Gather_1607" [label="[-1]", style=dashed]; +"1758 Constant_1606" -> "1759 Gather_1607" [label="[]", style=dashed]; +"1759 Gather_1607" -> "1760 Sub_1608" [label="[]", style=dashed]; +"1759 Gather_1607" -> "1762 Unsqueeze_1610" [label="[]", style=dashed]; +"1759 Gather_1607" -> "1765 Unsqueeze_1613" [label="[]", style=dashed]; +"1760 Sub_1608" -> "1761 Unsqueeze_1609" [label="[]", style=dashed]; +"1761 Unsqueeze_1609" -> "1764 Slice_1612" [label="[1]", style=dashed]; +"1762 Unsqueeze_1610" -> "1764 Slice_1612" [label="[1]", style=dashed]; +"1763 Constant_1611" -> "1764 Slice_1612" [label="[1]", style=dashed]; +"1764 Slice_1612" -> "1767 Slice_1615" [label="[]", style=solid]; +"1765 Unsqueeze_1613" -> "1767 Slice_1615" [label="[1]", style=dashed]; +"1766 Constant_1614" -> "1767 Slice_1615" [label="[1]", style=dashed]; +"1767 Slice_1615" -> "1768 Mul_1616" [label="[]", style=solid]; +"1767 Slice_1615" -> "1770 Sub_1618" [label="[]", style=solid]; +"1768 Mul_1616" -> "1773 Sub_1621" [label="[]", style=solid]; +"1769 Constant_1617" -> "1770 Sub_1618" [label="[]", style=solid]; +"1770 Sub_1618" -> "1772 Mul_1620" [label="[]", style=solid]; +"1771 Constant_1619" -> "1772 Mul_1620" [label="[]", style=solid]; +"1772 Mul_1620" -> "1773 Sub_1621" [label="[]", style=solid]; +"1773 Sub_1621" -> "1774 Softmax_1622" [label="[]", style=solid]; +"1774 Softmax_1622" -> "1775 MatMul_1623" [label="[]", style=solid]; +"1775 MatMul_1623" -> "1776 QuantizeLinear_2020_1" [label="[]", style=solid]; +"1776 QuantizeLinear_2020_1" -> "1777 DequantizeLinear_2020_1" [label="[]", style=dashed]; +"1777 DequantizeLinear_2020_1" -> "1778 Transpose_1624" [label="[]", style=solid]; +"1778 Transpose_1624" -> "1779 Shape_1625" [label="[]", style=solid]; +"1778 Transpose_1624" -> "1782 Shape_1628" [label="[]", style=solid]; +"1778 Transpose_1624" -> "1785 Shape_1631" [label="[]", style=solid]; +"1778 Transpose_1624" -> "1788 Shape_1634" [label="[]", style=solid]; +"1778 Transpose_1624" -> "1796 Reshape_1642" [label="[]", style=solid]; +"1779 Shape_1625" -> "1781 Gather_1627" [label="[-1]", style=dashed]; +"1780 Constant_1626" -> "1781 Gather_1627" [label="[]", style=dashed]; +"1781 Gather_1627" -> "1792 Unsqueeze_1638" [label="[]", style=dashed]; +"1782 Shape_1628" -> "1784 Gather_1630" [label="[-1]", style=dashed]; +"1783 Constant_1629" -> "1784 Gather_1630" [label="[]", style=dashed]; +"1784 Gather_1630" -> "1793 Unsqueeze_1639" [label="[]", style=dashed]; +"1785 Shape_1631" -> "1787 Gather_1633" [label="[-1]", style=dashed]; +"1786 Constant_1632" -> "1787 Gather_1633" [label="[]", style=dashed]; +"1787 Gather_1633" -> "1791 Mul_1637" [label="[]", style=dashed]; +"1788 Shape_1634" -> "1790 Gather_1636" [label="[-1]", style=dashed]; +"1789 Constant_1635" -> "1790 Gather_1636" [label="[]", style=dashed]; +"1790 Gather_1636" -> "1791 Mul_1637" [label="[]", style=dashed]; +"1791 Mul_1637" -> "1794 Unsqueeze_1640" [label="[]", style=dashed]; +"1792 Unsqueeze_1638" -> "1795 Concat_1641" [label="[1]", style=dashed]; +"1793 Unsqueeze_1639" -> "1795 Concat_1641" [label="[1]", style=dashed]; +"1794 Unsqueeze_1640" -> "1795 Concat_1641" [label="[1]", style=dashed]; +"1795 Concat_1641" -> "1796 Reshape_1642" [label="[3]", style=dashed]; +"1796 Reshape_1642" -> "1797 Shape_1643" [label="[]", style=solid]; +"1796 Reshape_1642" -> "1800 Shape_1646" [label="[]", style=solid]; +"1796 Reshape_1642" -> "1803 Shape_1649" [label="[]", style=solid]; +"1796 Reshape_1642" -> "1808 Reshape_1654" [label="[]", style=solid]; +"1797 Shape_1643" -> "1799 Gather_1645" [label="[-1]", style=dashed]; +"1798 Constant_1644" -> "1799 Gather_1645" [label="[]", style=dashed]; +"1799 Gather_1645" -> "1812 Unsqueeze_1656" [label="[]", style=dashed]; +"1800 Shape_1646" -> "1802 Gather_1648" [label="[-1]", style=dashed]; +"1801 Constant_1647" -> "1802 Gather_1648" [label="[]", style=dashed]; +"1802 Gather_1648" -> "1813 Unsqueeze_1657" [label="[]", style=dashed]; +"1803 Shape_1649" -> "1805 Gather_1651" [label="[-1]", style=dashed]; +"1804 Constant_1650" -> "1805 Gather_1651" [label="[]", style=dashed]; +"1805 Gather_1651" -> "1806 Unsqueeze_1652" [label="[]", style=dashed]; +"1806 Unsqueeze_1652" -> "1807 Concat_1653" [label="[1]", style=dashed]; +"1807 Concat_1653" -> "1808 Reshape_1654" [label="[2]", style=dashed]; +"1808 Reshape_1654" -> "1811 Gemm_1655" [label="[]", style=solid]; +"1809 QuantizeLinear_h.7.attn.c_proj.weight_1" -> "1810 DequantizeLinear_h.7.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"1810 DequantizeLinear_h.7.attn.c_proj.weight_1" -> "1811 Gemm_1655" [label="[768, 768]", style=solid]; +"1811 Gemm_1655" -> "1815 Reshape_1659" [label="[]", style=solid]; +"1812 Unsqueeze_1656" -> "1814 Concat_1658" [label="[1]", style=dashed]; +"1813 Unsqueeze_1657" -> "1814 Concat_1658" [label="[1]", style=dashed]; +"1814 Concat_1658" -> "1815 Reshape_1659" [label="[3]", style=dashed]; +"1815 Reshape_1659" -> "1816 Add_1660" [label="[]", style=solid]; +"1816 Add_1660" -> "1817 ReduceMean_1661" [label="[]", style=solid]; +"1816 Add_1660" -> "1818 Sub_1662" [label="[]", style=solid]; +"1816 Add_1660" -> "1883 Add_1719" [label="[]", style=solid]; +"1817 ReduceMean_1661" -> "1818 Sub_1662" [label="[]", style=solid]; +"1818 Sub_1662" -> "1820 Pow_1664" [label="[]", style=solid]; +"1818 Sub_1662" -> "1825 Div_1669" [label="[]", style=solid]; +"1819 Constant_1663" -> "1820 Pow_1664" [label="[]", style=solid]; +"1820 Pow_1664" -> "1821 ReduceMean_1665" [label="[]", style=solid]; +"1821 ReduceMean_1665" -> "1823 Add_1667" [label="[]", style=solid]; +"1822 Constant_1666" -> "1823 Add_1667" [label="[]", style=solid]; +"1823 Add_1667" -> "1824 Sqrt_1668" [label="[]", style=solid]; +"1824 Sqrt_1668" -> "1825 Div_1669" [label="[]", style=solid]; +"1825 Div_1669" -> "1826 Mul_1670" [label="[]", style=solid]; +"1826 Mul_1670" -> "1827 Add_1671" [label="[]", style=solid]; +"1827 Add_1671" -> "1828 QuantizeLinear_2072_1" [label="[]", style=solid]; +"1828 QuantizeLinear_2072_1" -> "1829 DequantizeLinear_2072_1" [label="[]", style=dashed]; +"1829 DequantizeLinear_2072_1" -> "1830 Shape_1672" [label="[]", style=solid]; +"1829 DequantizeLinear_2072_1" -> "1833 Shape_1675" [label="[]", style=solid]; +"1829 DequantizeLinear_2072_1" -> "1836 Shape_1678" [label="[]", style=solid]; +"1829 DequantizeLinear_2072_1" -> "1841 Reshape_1683" [label="[]", style=solid]; +"1830 Shape_1672" -> "1832 Gather_1674" [label="[-1]", style=dashed]; +"1831 Constant_1673" -> "1832 Gather_1674" [label="[]", style=dashed]; +"1832 Gather_1674" -> "1845 Unsqueeze_1685" [label="[]", style=dashed]; +"1833 Shape_1675" -> "1835 Gather_1677" [label="[-1]", style=dashed]; +"1834 Constant_1676" -> "1835 Gather_1677" [label="[]", style=dashed]; +"1835 Gather_1677" -> "1846 Unsqueeze_1686" [label="[]", style=dashed]; +"1836 Shape_1678" -> "1838 Gather_1680" [label="[-1]", style=dashed]; +"1837 Constant_1679" -> "1838 Gather_1680" [label="[]", style=dashed]; +"1838 Gather_1680" -> "1839 Unsqueeze_1681" [label="[]", style=dashed]; +"1839 Unsqueeze_1681" -> "1840 Concat_1682" [label="[1]", style=dashed]; +"1840 Concat_1682" -> "1841 Reshape_1683" [label="[2]", style=dashed]; +"1841 Reshape_1683" -> "1844 Gemm_1684" [label="[]", style=solid]; +"1842 QuantizeLinear_h.7.mlp.c_fc.weight_1" -> "1843 DequantizeLinear_h.7.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"1843 DequantizeLinear_h.7.mlp.c_fc.weight_1" -> "1844 Gemm_1684" [label="[768, 3072]", style=solid]; +"1844 Gemm_1684" -> "1848 Reshape_1688" [label="[]", style=solid]; +"1845 Unsqueeze_1685" -> "1847 Concat_1687" [label="[1]", style=dashed]; +"1846 Unsqueeze_1686" -> "1847 Concat_1687" [label="[1]", style=dashed]; +"1847 Concat_1687" -> "1848 Reshape_1688" [label="[3]", style=dashed]; +"1848 Reshape_1688" -> "1850 Mul_1690" [label="[]", style=solid]; +"1848 Reshape_1688" -> "1852 Pow_1692" [label="[]", style=solid]; +"1848 Reshape_1688" -> "1855 Add_1695" [label="[]", style=solid]; +"1849 Constant_1689" -> "1850 Mul_1690" [label="[]", style=solid]; +"1850 Mul_1690" -> "1861 Mul_1701" [label="[]", style=solid]; +"1851 Constant_1691" -> "1852 Pow_1692" [label="[]", style=solid]; +"1852 Pow_1692" -> "1854 Mul_1694" [label="[]", style=solid]; +"1853 Constant_1693" -> "1854 Mul_1694" [label="[]", style=solid]; +"1854 Mul_1694" -> "1855 Add_1695" [label="[]", style=solid]; +"1855 Add_1695" -> "1857 Mul_1697" [label="[]", style=solid]; +"1856 Constant_1696" -> "1857 Mul_1697" [label="[]", style=solid]; +"1857 Mul_1697" -> "1858 Tanh_1698" [label="[]", style=solid]; +"1858 Tanh_1698" -> "1860 Add_1700" [label="[]", style=solid]; +"1859 Constant_1699" -> "1860 Add_1700" [label="[]", style=solid]; +"1860 Add_1700" -> "1861 Mul_1701" [label="[]", style=solid]; +"1861 Mul_1701" -> "1862 QuantizeLinear_2106_1" [label="[]", style=solid]; +"1862 QuantizeLinear_2106_1" -> "1863 DequantizeLinear_2106_1" [label="[]", style=dashed]; +"1863 DequantizeLinear_2106_1" -> "1864 Shape_1702" [label="[]", style=solid]; +"1863 DequantizeLinear_2106_1" -> "1867 Shape_1705" [label="[]", style=solid]; +"1863 DequantizeLinear_2106_1" -> "1870 Shape_1708" [label="[]", style=solid]; +"1863 DequantizeLinear_2106_1" -> "1875 Reshape_1713" [label="[]", style=solid]; +"1864 Shape_1702" -> "1866 Gather_1704" [label="[-1]", style=dashed]; +"1865 Constant_1703" -> "1866 Gather_1704" [label="[]", style=dashed]; +"1866 Gather_1704" -> "1879 Unsqueeze_1715" [label="[]", style=dashed]; +"1867 Shape_1705" -> "1869 Gather_1707" [label="[-1]", style=dashed]; +"1868 Constant_1706" -> "1869 Gather_1707" [label="[]", style=dashed]; +"1869 Gather_1707" -> "1880 Unsqueeze_1716" [label="[]", style=dashed]; +"1870 Shape_1708" -> "1872 Gather_1710" [label="[-1]", style=dashed]; +"1871 Constant_1709" -> "1872 Gather_1710" [label="[]", style=dashed]; +"1872 Gather_1710" -> "1873 Unsqueeze_1711" [label="[]", style=dashed]; +"1873 Unsqueeze_1711" -> "1874 Concat_1712" [label="[1]", style=dashed]; +"1874 Concat_1712" -> "1875 Reshape_1713" [label="[2]", style=dashed]; +"1875 Reshape_1713" -> "1878 Gemm_1714" [label="[]", style=solid]; +"1876 QuantizeLinear_h.7.mlp.c_proj.weight_1" -> "1877 DequantizeLinear_h.7.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"1877 DequantizeLinear_h.7.mlp.c_proj.weight_1" -> "1878 Gemm_1714" [label="[3072, 768]", style=solid]; +"1878 Gemm_1714" -> "1882 Reshape_1718" [label="[]", style=solid]; +"1879 Unsqueeze_1715" -> "1881 Concat_1717" [label="[1]", style=dashed]; +"1880 Unsqueeze_1716" -> "1881 Concat_1717" [label="[1]", style=dashed]; +"1881 Concat_1717" -> "1882 Reshape_1718" [label="[3]", style=dashed]; +"1882 Reshape_1718" -> "1883 Add_1719" [label="[]", style=solid]; +"1883 Add_1719" -> "1884 ReduceMean_1720" [label="[]", style=solid]; +"1883 Add_1719" -> "1885 Sub_1721" [label="[]", style=solid]; +"1883 Add_1719" -> "2047 Add_1871" [label="[]", style=solid]; +"1884 ReduceMean_1720" -> "1885 Sub_1721" [label="[]", style=solid]; +"1885 Sub_1721" -> "1887 Pow_1723" [label="[]", style=solid]; +"1885 Sub_1721" -> "1892 Div_1728" [label="[]", style=solid]; +"1886 Constant_1722" -> "1887 Pow_1723" [label="[]", style=solid]; +"1887 Pow_1723" -> "1888 ReduceMean_1724" [label="[]", style=solid]; +"1888 ReduceMean_1724" -> "1890 Add_1726" [label="[]", style=solid]; +"1889 Constant_1725" -> "1890 Add_1726" [label="[]", style=solid]; +"1890 Add_1726" -> "1891 Sqrt_1727" [label="[]", style=solid]; +"1891 Sqrt_1727" -> "1892 Div_1728" [label="[]", style=solid]; +"1892 Div_1728" -> "1893 Mul_1729" [label="[]", style=solid]; +"1893 Mul_1729" -> "1894 Add_1730" [label="[]", style=solid]; +"1894 Add_1730" -> "1895 QuantizeLinear_2139_1" [label="[]", style=solid]; +"1895 QuantizeLinear_2139_1" -> "1896 DequantizeLinear_2139_1" [label="[]", style=dashed]; +"1896 DequantizeLinear_2139_1" -> "1897 Shape_1731" [label="[]", style=solid]; +"1896 DequantizeLinear_2139_1" -> "1900 Shape_1734" [label="[]", style=solid]; +"1896 DequantizeLinear_2139_1" -> "1903 Shape_1737" [label="[]", style=solid]; +"1896 DequantizeLinear_2139_1" -> "1908 Reshape_1742" [label="[]", style=solid]; +"1897 Shape_1731" -> "1899 Gather_1733" [label="[-1]", style=dashed]; +"1898 Constant_1732" -> "1899 Gather_1733" [label="[]", style=dashed]; +"1899 Gather_1733" -> "1912 Unsqueeze_1744" [label="[]", style=dashed]; +"1900 Shape_1734" -> "1902 Gather_1736" [label="[-1]", style=dashed]; +"1901 Constant_1735" -> "1902 Gather_1736" [label="[]", style=dashed]; +"1902 Gather_1736" -> "1913 Unsqueeze_1745" [label="[]", style=dashed]; +"1903 Shape_1737" -> "1905 Gather_1739" [label="[-1]", style=dashed]; +"1904 Constant_1738" -> "1905 Gather_1739" [label="[]", style=dashed]; +"1905 Gather_1739" -> "1906 Unsqueeze_1740" [label="[]", style=dashed]; +"1906 Unsqueeze_1740" -> "1907 Concat_1741" [label="[1]", style=dashed]; +"1907 Concat_1741" -> "1908 Reshape_1742" [label="[2]", style=dashed]; +"1908 Reshape_1742" -> "1911 Gemm_1743" [label="[]", style=solid]; +"1909 QuantizeLinear_h.8.attn.c_attn.weight_1" -> "1910 DequantizeLinear_h.8.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"1910 DequantizeLinear_h.8.attn.c_attn.weight_1" -> "1911 Gemm_1743" [label="[768, 2304]", style=solid]; +"1911 Gemm_1743" -> "1915 Reshape_1747" [label="[]", style=solid]; +"1912 Unsqueeze_1744" -> "1914 Concat_1746" [label="[1]", style=dashed]; +"1913 Unsqueeze_1745" -> "1914 Concat_1746" [label="[1]", style=dashed]; +"1914 Concat_1746" -> "1915 Reshape_1747" [label="[3]", style=dashed]; +"1915 Reshape_1747" -> "1916 Split_1748" [label="[]", style=solid]; +"1916 Split_1748" -> "1917 QuantizeLinear_query.17_1" [label="[]", style=solid]; +"1916 Split_1748" -> "1919 Shape_1749" [label="[]", style=solid]; +"1916 Split_1748" -> "1922 Shape_1752" [label="[]", style=solid]; +"1916 Split_1748" -> "1925 Shape_1755" [label="[]", style=solid]; +"1916 Split_1748" -> "1938 Shape_1768" [label="[]", style=solid]; +"1916 Split_1748" -> "1941 Shape_1771" [label="[]", style=solid]; +"1916 Split_1748" -> "1944 Shape_1774" [label="[]", style=solid]; +"1916 Split_1748" -> "1955 Reshape_1785" [label="[]", style=solid]; +"1916 Split_1748" -> "1959 Shape_1787" [label="[]", style=solid]; +"1916 Split_1748" -> "1962 Shape_1790" [label="[]", style=solid]; +"1916 Split_1748" -> "1965 Shape_1793" [label="[]", style=solid]; +"1916 Split_1748" -> "1976 Reshape_1804" [label="[]", style=solid]; +"1917 QuantizeLinear_query.17_1" -> "1918 DequantizeLinear_query.17_1" [label="[]", style=dashed]; +"1918 DequantizeLinear_query.17_1" -> "1936 Reshape_1766" [label="[]", style=solid]; +"1919 Shape_1749" -> "1921 Gather_1751" [label="[-1]", style=dashed]; +"1920 Constant_1750" -> "1921 Gather_1751" [label="[]", style=dashed]; +"1921 Gather_1751" -> "1932 Unsqueeze_1762" [label="[]", style=dashed]; +"1922 Shape_1752" -> "1924 Gather_1754" [label="[-1]", style=dashed]; +"1923 Constant_1753" -> "1924 Gather_1754" [label="[]", style=dashed]; +"1924 Gather_1754" -> "1933 Unsqueeze_1763" [label="[]", style=dashed]; +"1925 Shape_1755" -> "1927 Gather_1757" [label="[-1]", style=dashed]; +"1926 Constant_1756" -> "1927 Gather_1757" [label="[]", style=dashed]; +"1927 Gather_1757" -> "1929 Div_1759" [label="[]", style=dashed]; +"1928 Constant_1758" -> "1929 Div_1759" [label="[]", style=dashed]; +"1929 Div_1759" -> "1930 Cast_1760" [label="[]", style=dashed]; +"1930 Cast_1760" -> "1931 Cast_1761" [label="[]", style=dashed]; +"1931 Cast_1761" -> "1934 Unsqueeze_1764" [label="[]", style=dashed]; +"1932 Unsqueeze_1762" -> "1935 Concat_1765" [label="[1]", style=dashed]; +"1933 Unsqueeze_1763" -> "1935 Concat_1765" [label="[1]", style=dashed]; +"1934 Unsqueeze_1764" -> "1935 Concat_1765" [label="[1]", style=dashed]; +"1935 Concat_1765" -> "1936 Reshape_1766" [label="[4]", style=dashed]; +"1936 Reshape_1766" -> "1937 Transpose_1767" [label="[]", style=solid]; +"1937 Transpose_1767" -> "1982 MatMul_1810" [label="[]", style=solid]; +"1938 Shape_1768" -> "1940 Gather_1770" [label="[-1]", style=dashed]; +"1939 Constant_1769" -> "1940 Gather_1770" [label="[]", style=dashed]; +"1940 Gather_1770" -> "1951 Unsqueeze_1781" [label="[]", style=dashed]; +"1941 Shape_1771" -> "1943 Gather_1773" [label="[-1]", style=dashed]; +"1942 Constant_1772" -> "1943 Gather_1773" [label="[]", style=dashed]; +"1943 Gather_1773" -> "1952 Unsqueeze_1782" [label="[]", style=dashed]; +"1944 Shape_1774" -> "1946 Gather_1776" [label="[-1]", style=dashed]; +"1945 Constant_1775" -> "1946 Gather_1776" [label="[]", style=dashed]; +"1946 Gather_1776" -> "1948 Div_1778" [label="[]", style=dashed]; +"1947 Constant_1777" -> "1948 Div_1778" [label="[]", style=dashed]; +"1948 Div_1778" -> "1949 Cast_1779" [label="[]", style=dashed]; +"1949 Cast_1779" -> "1950 Cast_1780" [label="[]", style=dashed]; +"1950 Cast_1780" -> "1953 Unsqueeze_1783" [label="[]", style=dashed]; +"1951 Unsqueeze_1781" -> "1954 Concat_1784" [label="[1]", style=dashed]; +"1952 Unsqueeze_1782" -> "1954 Concat_1784" [label="[1]", style=dashed]; +"1953 Unsqueeze_1783" -> "1954 Concat_1784" [label="[1]", style=dashed]; +"1954 Concat_1784" -> "1955 Reshape_1785" [label="[4]", style=dashed]; +"1955 Reshape_1785" -> "1956 QuantizeLinear_2204_1" [label="[]", style=solid]; +"1955 Reshape_1785" -> "1978 Transpose_1806" [label="[]", style=solid]; +"1956 QuantizeLinear_2204_1" -> "1957 DequantizeLinear_2204_1" [label="[]", style=dashed]; +"1957 DequantizeLinear_2204_1" -> "1958 Transpose_1786" [label="[]", style=solid]; +"1958 Transpose_1786" -> "1982 MatMul_1810" [label="[]", style=solid]; +"1959 Shape_1787" -> "1961 Gather_1789" [label="[-1]", style=dashed]; +"1960 Constant_1788" -> "1961 Gather_1789" [label="[]", style=dashed]; +"1961 Gather_1789" -> "1972 Unsqueeze_1800" [label="[]", style=dashed]; +"1962 Shape_1790" -> "1964 Gather_1792" [label="[-1]", style=dashed]; +"1963 Constant_1791" -> "1964 Gather_1792" [label="[]", style=dashed]; +"1964 Gather_1792" -> "1973 Unsqueeze_1801" [label="[]", style=dashed]; +"1965 Shape_1793" -> "1967 Gather_1795" [label="[-1]", style=dashed]; +"1966 Constant_1794" -> "1967 Gather_1795" [label="[]", style=dashed]; +"1967 Gather_1795" -> "1969 Div_1797" [label="[]", style=dashed]; +"1968 Constant_1796" -> "1969 Div_1797" [label="[]", style=dashed]; +"1969 Div_1797" -> "1970 Cast_1798" [label="[]", style=dashed]; +"1970 Cast_1798" -> "1971 Cast_1799" [label="[]", style=dashed]; +"1971 Cast_1799" -> "1974 Unsqueeze_1802" [label="[]", style=dashed]; +"1972 Unsqueeze_1800" -> "1975 Concat_1803" [label="[1]", style=dashed]; +"1973 Unsqueeze_1801" -> "1975 Concat_1803" [label="[1]", style=dashed]; +"1974 Unsqueeze_1802" -> "1975 Concat_1803" [label="[1]", style=dashed]; +"1975 Concat_1803" -> "1976 Reshape_1804" [label="[4]", style=dashed]; +"1976 Reshape_1804" -> "1977 Transpose_1805" [label="[]", style=solid]; +"1977 Transpose_1805" -> "1980 Unsqueeze_1808" [label="[]", style=solid]; +"1977 Transpose_1805" -> "2006 MatMul_1834" [label="[]", style=solid]; +"1978 Transpose_1806" -> "1979 Unsqueeze_1807" [label="[]", style=solid]; +"1979 Unsqueeze_1807" -> "1981 Concat_1809" [label="[]", style=solid]; +"1980 Unsqueeze_1808" -> "1981 Concat_1809" [label="[]", style=solid]; +"1981 Concat_1809" -> "2835 nncf_model_output_9" [label="[2, 1, 12, 8, 64]", style=solid]; +"1982 MatMul_1810" -> "1984 Div_1812" [label="[]", style=solid]; +"1983 Constant_1811" -> "1984 Div_1812" [label="[]", style=solid]; +"1984 Div_1812" -> "1985 Shape_1813" [label="[]", style=solid]; +"1984 Div_1812" -> "1988 Shape_1816" [label="[]", style=solid]; +"1984 Div_1812" -> "1999 Mul_1827" [label="[]", style=solid]; +"1985 Shape_1813" -> "1987 Gather_1815" [label="[-1]", style=dashed]; +"1986 Constant_1814" -> "1987 Gather_1815" [label="[]", style=dashed]; +"1987 Gather_1815" -> "1991 Sub_1819" [label="[]", style=dashed]; +"1988 Shape_1816" -> "1990 Gather_1818" [label="[-1]", style=dashed]; +"1989 Constant_1817" -> "1990 Gather_1818" [label="[]", style=dashed]; +"1990 Gather_1818" -> "1991 Sub_1819" [label="[]", style=dashed]; +"1990 Gather_1818" -> "1993 Unsqueeze_1821" [label="[]", style=dashed]; +"1990 Gather_1818" -> "1996 Unsqueeze_1824" [label="[]", style=dashed]; +"1991 Sub_1819" -> "1992 Unsqueeze_1820" [label="[]", style=dashed]; +"1992 Unsqueeze_1820" -> "1995 Slice_1823" [label="[1]", style=dashed]; +"1993 Unsqueeze_1821" -> "1995 Slice_1823" [label="[1]", style=dashed]; +"1994 Constant_1822" -> "1995 Slice_1823" [label="[1]", style=dashed]; +"1995 Slice_1823" -> "1998 Slice_1826" [label="[]", style=solid]; +"1996 Unsqueeze_1824" -> "1998 Slice_1826" [label="[1]", style=dashed]; +"1997 Constant_1825" -> "1998 Slice_1826" [label="[1]", style=dashed]; +"1998 Slice_1826" -> "1999 Mul_1827" [label="[]", style=solid]; +"1998 Slice_1826" -> "2001 Sub_1829" [label="[]", style=solid]; +"1999 Mul_1827" -> "2004 Sub_1832" [label="[]", style=solid]; +"2000 Constant_1828" -> "2001 Sub_1829" [label="[]", style=solid]; +"2001 Sub_1829" -> "2003 Mul_1831" [label="[]", style=solid]; +"2002 Constant_1830" -> "2003 Mul_1831" [label="[]", style=solid]; +"2003 Mul_1831" -> "2004 Sub_1832" [label="[]", style=solid]; +"2004 Sub_1832" -> "2005 Softmax_1833" [label="[]", style=solid]; +"2005 Softmax_1833" -> "2006 MatMul_1834" [label="[]", style=solid]; +"2006 MatMul_1834" -> "2007 QuantizeLinear_2261_1" [label="[]", style=solid]; +"2007 QuantizeLinear_2261_1" -> "2008 DequantizeLinear_2261_1" [label="[]", style=dashed]; +"2008 DequantizeLinear_2261_1" -> "2009 Transpose_1835" [label="[]", style=solid]; +"2009 Transpose_1835" -> "2010 Shape_1836" [label="[]", style=solid]; +"2009 Transpose_1835" -> "2013 Shape_1839" [label="[]", style=solid]; +"2009 Transpose_1835" -> "2016 Shape_1842" [label="[]", style=solid]; +"2009 Transpose_1835" -> "2019 Shape_1845" [label="[]", style=solid]; +"2009 Transpose_1835" -> "2027 Reshape_1853" [label="[]", style=solid]; +"2010 Shape_1836" -> "2012 Gather_1838" [label="[-1]", style=dashed]; +"2011 Constant_1837" -> "2012 Gather_1838" [label="[]", style=dashed]; +"2012 Gather_1838" -> "2023 Unsqueeze_1849" [label="[]", style=dashed]; +"2013 Shape_1839" -> "2015 Gather_1841" [label="[-1]", style=dashed]; +"2014 Constant_1840" -> "2015 Gather_1841" [label="[]", style=dashed]; +"2015 Gather_1841" -> "2024 Unsqueeze_1850" [label="[]", style=dashed]; +"2016 Shape_1842" -> "2018 Gather_1844" [label="[-1]", style=dashed]; +"2017 Constant_1843" -> "2018 Gather_1844" [label="[]", style=dashed]; +"2018 Gather_1844" -> "2022 Mul_1848" [label="[]", style=dashed]; +"2019 Shape_1845" -> "2021 Gather_1847" [label="[-1]", style=dashed]; +"2020 Constant_1846" -> "2021 Gather_1847" [label="[]", style=dashed]; +"2021 Gather_1847" -> "2022 Mul_1848" [label="[]", style=dashed]; +"2022 Mul_1848" -> "2025 Unsqueeze_1851" [label="[]", style=dashed]; +"2023 Unsqueeze_1849" -> "2026 Concat_1852" [label="[1]", style=dashed]; +"2024 Unsqueeze_1850" -> "2026 Concat_1852" [label="[1]", style=dashed]; +"2025 Unsqueeze_1851" -> "2026 Concat_1852" [label="[1]", style=dashed]; +"2026 Concat_1852" -> "2027 Reshape_1853" [label="[3]", style=dashed]; +"2027 Reshape_1853" -> "2028 Shape_1854" [label="[]", style=solid]; +"2027 Reshape_1853" -> "2031 Shape_1857" [label="[]", style=solid]; +"2027 Reshape_1853" -> "2034 Shape_1860" [label="[]", style=solid]; +"2027 Reshape_1853" -> "2039 Reshape_1865" [label="[]", style=solid]; +"2028 Shape_1854" -> "2030 Gather_1856" [label="[-1]", style=dashed]; +"2029 Constant_1855" -> "2030 Gather_1856" [label="[]", style=dashed]; +"2030 Gather_1856" -> "2043 Unsqueeze_1867" [label="[]", style=dashed]; +"2031 Shape_1857" -> "2033 Gather_1859" [label="[-1]", style=dashed]; +"2032 Constant_1858" -> "2033 Gather_1859" [label="[]", style=dashed]; +"2033 Gather_1859" -> "2044 Unsqueeze_1868" [label="[]", style=dashed]; +"2034 Shape_1860" -> "2036 Gather_1862" [label="[-1]", style=dashed]; +"2035 Constant_1861" -> "2036 Gather_1862" [label="[]", style=dashed]; +"2036 Gather_1862" -> "2037 Unsqueeze_1863" [label="[]", style=dashed]; +"2037 Unsqueeze_1863" -> "2038 Concat_1864" [label="[1]", style=dashed]; +"2038 Concat_1864" -> "2039 Reshape_1865" [label="[2]", style=dashed]; +"2039 Reshape_1865" -> "2042 Gemm_1866" [label="[]", style=solid]; +"2040 QuantizeLinear_h.8.attn.c_proj.weight_1" -> "2041 DequantizeLinear_h.8.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"2041 DequantizeLinear_h.8.attn.c_proj.weight_1" -> "2042 Gemm_1866" [label="[768, 768]", style=solid]; +"2042 Gemm_1866" -> "2046 Reshape_1870" [label="[]", style=solid]; +"2043 Unsqueeze_1867" -> "2045 Concat_1869" [label="[1]", style=dashed]; +"2044 Unsqueeze_1868" -> "2045 Concat_1869" [label="[1]", style=dashed]; +"2045 Concat_1869" -> "2046 Reshape_1870" [label="[3]", style=dashed]; +"2046 Reshape_1870" -> "2047 Add_1871" [label="[]", style=solid]; +"2047 Add_1871" -> "2048 ReduceMean_1872" [label="[]", style=solid]; +"2047 Add_1871" -> "2049 Sub_1873" [label="[]", style=solid]; +"2047 Add_1871" -> "2114 Add_1930" [label="[]", style=solid]; +"2048 ReduceMean_1872" -> "2049 Sub_1873" [label="[]", style=solid]; +"2049 Sub_1873" -> "2051 Pow_1875" [label="[]", style=solid]; +"2049 Sub_1873" -> "2056 Div_1880" [label="[]", style=solid]; +"2050 Constant_1874" -> "2051 Pow_1875" [label="[]", style=solid]; +"2051 Pow_1875" -> "2052 ReduceMean_1876" [label="[]", style=solid]; +"2052 ReduceMean_1876" -> "2054 Add_1878" [label="[]", style=solid]; +"2053 Constant_1877" -> "2054 Add_1878" [label="[]", style=solid]; +"2054 Add_1878" -> "2055 Sqrt_1879" [label="[]", style=solid]; +"2055 Sqrt_1879" -> "2056 Div_1880" [label="[]", style=solid]; +"2056 Div_1880" -> "2057 Mul_1881" [label="[]", style=solid]; +"2057 Mul_1881" -> "2058 Add_1882" [label="[]", style=solid]; +"2058 Add_1882" -> "2059 QuantizeLinear_2313_1" [label="[]", style=solid]; +"2059 QuantizeLinear_2313_1" -> "2060 DequantizeLinear_2313_1" [label="[]", style=dashed]; +"2060 DequantizeLinear_2313_1" -> "2061 Shape_1883" [label="[]", style=solid]; +"2060 DequantizeLinear_2313_1" -> "2064 Shape_1886" [label="[]", style=solid]; +"2060 DequantizeLinear_2313_1" -> "2067 Shape_1889" [label="[]", style=solid]; +"2060 DequantizeLinear_2313_1" -> "2072 Reshape_1894" [label="[]", style=solid]; +"2061 Shape_1883" -> "2063 Gather_1885" [label="[-1]", style=dashed]; +"2062 Constant_1884" -> "2063 Gather_1885" [label="[]", style=dashed]; +"2063 Gather_1885" -> "2076 Unsqueeze_1896" [label="[]", style=dashed]; +"2064 Shape_1886" -> "2066 Gather_1888" [label="[-1]", style=dashed]; +"2065 Constant_1887" -> "2066 Gather_1888" [label="[]", style=dashed]; +"2066 Gather_1888" -> "2077 Unsqueeze_1897" [label="[]", style=dashed]; +"2067 Shape_1889" -> "2069 Gather_1891" [label="[-1]", style=dashed]; +"2068 Constant_1890" -> "2069 Gather_1891" [label="[]", style=dashed]; +"2069 Gather_1891" -> "2070 Unsqueeze_1892" [label="[]", style=dashed]; +"2070 Unsqueeze_1892" -> "2071 Concat_1893" [label="[1]", style=dashed]; +"2071 Concat_1893" -> "2072 Reshape_1894" [label="[2]", style=dashed]; +"2072 Reshape_1894" -> "2075 Gemm_1895" [label="[]", style=solid]; +"2073 QuantizeLinear_h.8.mlp.c_fc.weight_1" -> "2074 DequantizeLinear_h.8.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"2074 DequantizeLinear_h.8.mlp.c_fc.weight_1" -> "2075 Gemm_1895" [label="[768, 3072]", style=solid]; +"2075 Gemm_1895" -> "2079 Reshape_1899" [label="[]", style=solid]; +"2076 Unsqueeze_1896" -> "2078 Concat_1898" [label="[1]", style=dashed]; +"2077 Unsqueeze_1897" -> "2078 Concat_1898" [label="[1]", style=dashed]; +"2078 Concat_1898" -> "2079 Reshape_1899" [label="[3]", style=dashed]; +"2079 Reshape_1899" -> "2081 Mul_1901" [label="[]", style=solid]; +"2079 Reshape_1899" -> "2083 Pow_1903" [label="[]", style=solid]; +"2079 Reshape_1899" -> "2086 Add_1906" [label="[]", style=solid]; +"2080 Constant_1900" -> "2081 Mul_1901" [label="[]", style=solid]; +"2081 Mul_1901" -> "2092 Mul_1912" [label="[]", style=solid]; +"2082 Constant_1902" -> "2083 Pow_1903" [label="[]", style=solid]; +"2083 Pow_1903" -> "2085 Mul_1905" [label="[]", style=solid]; +"2084 Constant_1904" -> "2085 Mul_1905" [label="[]", style=solid]; +"2085 Mul_1905" -> "2086 Add_1906" [label="[]", style=solid]; +"2086 Add_1906" -> "2088 Mul_1908" [label="[]", style=solid]; +"2087 Constant_1907" -> "2088 Mul_1908" [label="[]", style=solid]; +"2088 Mul_1908" -> "2089 Tanh_1909" [label="[]", style=solid]; +"2089 Tanh_1909" -> "2091 Add_1911" [label="[]", style=solid]; +"2090 Constant_1910" -> "2091 Add_1911" [label="[]", style=solid]; +"2091 Add_1911" -> "2092 Mul_1912" [label="[]", style=solid]; +"2092 Mul_1912" -> "2093 QuantizeLinear_2347_1" [label="[]", style=solid]; +"2093 QuantizeLinear_2347_1" -> "2094 DequantizeLinear_2347_1" [label="[]", style=dashed]; +"2094 DequantizeLinear_2347_1" -> "2095 Shape_1913" [label="[]", style=solid]; +"2094 DequantizeLinear_2347_1" -> "2098 Shape_1916" [label="[]", style=solid]; +"2094 DequantizeLinear_2347_1" -> "2101 Shape_1919" [label="[]", style=solid]; +"2094 DequantizeLinear_2347_1" -> "2106 Reshape_1924" [label="[]", style=solid]; +"2095 Shape_1913" -> "2097 Gather_1915" [label="[-1]", style=dashed]; +"2096 Constant_1914" -> "2097 Gather_1915" [label="[]", style=dashed]; +"2097 Gather_1915" -> "2110 Unsqueeze_1926" [label="[]", style=dashed]; +"2098 Shape_1916" -> "2100 Gather_1918" [label="[-1]", style=dashed]; +"2099 Constant_1917" -> "2100 Gather_1918" [label="[]", style=dashed]; +"2100 Gather_1918" -> "2111 Unsqueeze_1927" [label="[]", style=dashed]; +"2101 Shape_1919" -> "2103 Gather_1921" [label="[-1]", style=dashed]; +"2102 Constant_1920" -> "2103 Gather_1921" [label="[]", style=dashed]; +"2103 Gather_1921" -> "2104 Unsqueeze_1922" [label="[]", style=dashed]; +"2104 Unsqueeze_1922" -> "2105 Concat_1923" [label="[1]", style=dashed]; +"2105 Concat_1923" -> "2106 Reshape_1924" [label="[2]", style=dashed]; +"2106 Reshape_1924" -> "2109 Gemm_1925" [label="[]", style=solid]; +"2107 QuantizeLinear_h.8.mlp.c_proj.weight_1" -> "2108 DequantizeLinear_h.8.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"2108 DequantizeLinear_h.8.mlp.c_proj.weight_1" -> "2109 Gemm_1925" [label="[3072, 768]", style=solid]; +"2109 Gemm_1925" -> "2113 Reshape_1929" [label="[]", style=solid]; +"2110 Unsqueeze_1926" -> "2112 Concat_1928" [label="[1]", style=dashed]; +"2111 Unsqueeze_1927" -> "2112 Concat_1928" [label="[1]", style=dashed]; +"2112 Concat_1928" -> "2113 Reshape_1929" [label="[3]", style=dashed]; +"2113 Reshape_1929" -> "2114 Add_1930" [label="[]", style=solid]; +"2114 Add_1930" -> "2115 ReduceMean_1931" [label="[]", style=solid]; +"2114 Add_1930" -> "2116 Sub_1932" [label="[]", style=solid]; +"2114 Add_1930" -> "2278 Add_2082" [label="[]", style=solid]; +"2115 ReduceMean_1931" -> "2116 Sub_1932" [label="[]", style=solid]; +"2116 Sub_1932" -> "2118 Pow_1934" [label="[]", style=solid]; +"2116 Sub_1932" -> "2123 Div_1939" [label="[]", style=solid]; +"2117 Constant_1933" -> "2118 Pow_1934" [label="[]", style=solid]; +"2118 Pow_1934" -> "2119 ReduceMean_1935" [label="[]", style=solid]; +"2119 ReduceMean_1935" -> "2121 Add_1937" [label="[]", style=solid]; +"2120 Constant_1936" -> "2121 Add_1937" [label="[]", style=solid]; +"2121 Add_1937" -> "2122 Sqrt_1938" [label="[]", style=solid]; +"2122 Sqrt_1938" -> "2123 Div_1939" [label="[]", style=solid]; +"2123 Div_1939" -> "2124 Mul_1940" [label="[]", style=solid]; +"2124 Mul_1940" -> "2125 Add_1941" [label="[]", style=solid]; +"2125 Add_1941" -> "2126 QuantizeLinear_2380_1" [label="[]", style=solid]; +"2126 QuantizeLinear_2380_1" -> "2127 DequantizeLinear_2380_1" [label="[]", style=dashed]; +"2127 DequantizeLinear_2380_1" -> "2128 Shape_1942" [label="[]", style=solid]; +"2127 DequantizeLinear_2380_1" -> "2131 Shape_1945" [label="[]", style=solid]; +"2127 DequantizeLinear_2380_1" -> "2134 Shape_1948" [label="[]", style=solid]; +"2127 DequantizeLinear_2380_1" -> "2139 Reshape_1953" [label="[]", style=solid]; +"2128 Shape_1942" -> "2130 Gather_1944" [label="[-1]", style=dashed]; +"2129 Constant_1943" -> "2130 Gather_1944" [label="[]", style=dashed]; +"2130 Gather_1944" -> "2143 Unsqueeze_1955" [label="[]", style=dashed]; +"2131 Shape_1945" -> "2133 Gather_1947" [label="[-1]", style=dashed]; +"2132 Constant_1946" -> "2133 Gather_1947" [label="[]", style=dashed]; +"2133 Gather_1947" -> "2144 Unsqueeze_1956" [label="[]", style=dashed]; +"2134 Shape_1948" -> "2136 Gather_1950" [label="[-1]", style=dashed]; +"2135 Constant_1949" -> "2136 Gather_1950" [label="[]", style=dashed]; +"2136 Gather_1950" -> "2137 Unsqueeze_1951" [label="[]", style=dashed]; +"2137 Unsqueeze_1951" -> "2138 Concat_1952" [label="[1]", style=dashed]; +"2138 Concat_1952" -> "2139 Reshape_1953" [label="[2]", style=dashed]; +"2139 Reshape_1953" -> "2142 Gemm_1954" [label="[]", style=solid]; +"2140 QuantizeLinear_h.9.attn.c_attn.weight_1" -> "2141 DequantizeLinear_h.9.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"2141 DequantizeLinear_h.9.attn.c_attn.weight_1" -> "2142 Gemm_1954" [label="[768, 2304]", style=solid]; +"2142 Gemm_1954" -> "2146 Reshape_1958" [label="[]", style=solid]; +"2143 Unsqueeze_1955" -> "2145 Concat_1957" [label="[1]", style=dashed]; +"2144 Unsqueeze_1956" -> "2145 Concat_1957" [label="[1]", style=dashed]; +"2145 Concat_1957" -> "2146 Reshape_1958" [label="[3]", style=dashed]; +"2146 Reshape_1958" -> "2147 Split_1959" [label="[]", style=solid]; +"2147 Split_1959" -> "2148 QuantizeLinear_query.19_1" [label="[]", style=solid]; +"2147 Split_1959" -> "2150 Shape_1960" [label="[]", style=solid]; +"2147 Split_1959" -> "2153 Shape_1963" [label="[]", style=solid]; +"2147 Split_1959" -> "2156 Shape_1966" [label="[]", style=solid]; +"2147 Split_1959" -> "2169 Shape_1979" [label="[]", style=solid]; +"2147 Split_1959" -> "2172 Shape_1982" [label="[]", style=solid]; +"2147 Split_1959" -> "2175 Shape_1985" [label="[]", style=solid]; +"2147 Split_1959" -> "2186 Reshape_1996" [label="[]", style=solid]; +"2147 Split_1959" -> "2190 Shape_1998" [label="[]", style=solid]; +"2147 Split_1959" -> "2193 Shape_2001" [label="[]", style=solid]; +"2147 Split_1959" -> "2196 Shape_2004" [label="[]", style=solid]; +"2147 Split_1959" -> "2207 Reshape_2015" [label="[]", style=solid]; +"2148 QuantizeLinear_query.19_1" -> "2149 DequantizeLinear_query.19_1" [label="[]", style=dashed]; +"2149 DequantizeLinear_query.19_1" -> "2167 Reshape_1977" [label="[]", style=solid]; +"2150 Shape_1960" -> "2152 Gather_1962" [label="[-1]", style=dashed]; +"2151 Constant_1961" -> "2152 Gather_1962" [label="[]", style=dashed]; +"2152 Gather_1962" -> "2163 Unsqueeze_1973" [label="[]", style=dashed]; +"2153 Shape_1963" -> "2155 Gather_1965" [label="[-1]", style=dashed]; +"2154 Constant_1964" -> "2155 Gather_1965" [label="[]", style=dashed]; +"2155 Gather_1965" -> "2164 Unsqueeze_1974" [label="[]", style=dashed]; +"2156 Shape_1966" -> "2158 Gather_1968" [label="[-1]", style=dashed]; +"2157 Constant_1967" -> "2158 Gather_1968" [label="[]", style=dashed]; +"2158 Gather_1968" -> "2160 Div_1970" [label="[]", style=dashed]; +"2159 Constant_1969" -> "2160 Div_1970" [label="[]", style=dashed]; +"2160 Div_1970" -> "2161 Cast_1971" [label="[]", style=dashed]; +"2161 Cast_1971" -> "2162 Cast_1972" [label="[]", style=dashed]; +"2162 Cast_1972" -> "2165 Unsqueeze_1975" [label="[]", style=dashed]; +"2163 Unsqueeze_1973" -> "2166 Concat_1976" [label="[1]", style=dashed]; +"2164 Unsqueeze_1974" -> "2166 Concat_1976" [label="[1]", style=dashed]; +"2165 Unsqueeze_1975" -> "2166 Concat_1976" [label="[1]", style=dashed]; +"2166 Concat_1976" -> "2167 Reshape_1977" [label="[4]", style=dashed]; +"2167 Reshape_1977" -> "2168 Transpose_1978" [label="[]", style=solid]; +"2168 Transpose_1978" -> "2213 MatMul_2021" [label="[]", style=solid]; +"2169 Shape_1979" -> "2171 Gather_1981" [label="[-1]", style=dashed]; +"2170 Constant_1980" -> "2171 Gather_1981" [label="[]", style=dashed]; +"2171 Gather_1981" -> "2182 Unsqueeze_1992" [label="[]", style=dashed]; +"2172 Shape_1982" -> "2174 Gather_1984" [label="[-1]", style=dashed]; +"2173 Constant_1983" -> "2174 Gather_1984" [label="[]", style=dashed]; +"2174 Gather_1984" -> "2183 Unsqueeze_1993" [label="[]", style=dashed]; +"2175 Shape_1985" -> "2177 Gather_1987" [label="[-1]", style=dashed]; +"2176 Constant_1986" -> "2177 Gather_1987" [label="[]", style=dashed]; +"2177 Gather_1987" -> "2179 Div_1989" [label="[]", style=dashed]; +"2178 Constant_1988" -> "2179 Div_1989" [label="[]", style=dashed]; +"2179 Div_1989" -> "2180 Cast_1990" [label="[]", style=dashed]; +"2180 Cast_1990" -> "2181 Cast_1991" [label="[]", style=dashed]; +"2181 Cast_1991" -> "2184 Unsqueeze_1994" [label="[]", style=dashed]; +"2182 Unsqueeze_1992" -> "2185 Concat_1995" [label="[1]", style=dashed]; +"2183 Unsqueeze_1993" -> "2185 Concat_1995" [label="[1]", style=dashed]; +"2184 Unsqueeze_1994" -> "2185 Concat_1995" [label="[1]", style=dashed]; +"2185 Concat_1995" -> "2186 Reshape_1996" [label="[4]", style=dashed]; +"2186 Reshape_1996" -> "2187 QuantizeLinear_2445_1" [label="[]", style=solid]; +"2186 Reshape_1996" -> "2209 Transpose_2017" [label="[]", style=solid]; +"2187 QuantizeLinear_2445_1" -> "2188 DequantizeLinear_2445_1" [label="[]", style=dashed]; +"2188 DequantizeLinear_2445_1" -> "2189 Transpose_1997" [label="[]", style=solid]; +"2189 Transpose_1997" -> "2213 MatMul_2021" [label="[]", style=solid]; +"2190 Shape_1998" -> "2192 Gather_2000" [label="[-1]", style=dashed]; +"2191 Constant_1999" -> "2192 Gather_2000" [label="[]", style=dashed]; +"2192 Gather_2000" -> "2203 Unsqueeze_2011" [label="[]", style=dashed]; +"2193 Shape_2001" -> "2195 Gather_2003" [label="[-1]", style=dashed]; +"2194 Constant_2002" -> "2195 Gather_2003" [label="[]", style=dashed]; +"2195 Gather_2003" -> "2204 Unsqueeze_2012" [label="[]", style=dashed]; +"2196 Shape_2004" -> "2198 Gather_2006" [label="[-1]", style=dashed]; +"2197 Constant_2005" -> "2198 Gather_2006" [label="[]", style=dashed]; +"2198 Gather_2006" -> "2200 Div_2008" [label="[]", style=dashed]; +"2199 Constant_2007" -> "2200 Div_2008" [label="[]", style=dashed]; +"2200 Div_2008" -> "2201 Cast_2009" [label="[]", style=dashed]; +"2201 Cast_2009" -> "2202 Cast_2010" [label="[]", style=dashed]; +"2202 Cast_2010" -> "2205 Unsqueeze_2013" [label="[]", style=dashed]; +"2203 Unsqueeze_2011" -> "2206 Concat_2014" [label="[1]", style=dashed]; +"2204 Unsqueeze_2012" -> "2206 Concat_2014" [label="[1]", style=dashed]; +"2205 Unsqueeze_2013" -> "2206 Concat_2014" [label="[1]", style=dashed]; +"2206 Concat_2014" -> "2207 Reshape_2015" [label="[4]", style=dashed]; +"2207 Reshape_2015" -> "2208 Transpose_2016" [label="[]", style=solid]; +"2208 Transpose_2016" -> "2211 Unsqueeze_2019" [label="[]", style=solid]; +"2208 Transpose_2016" -> "2237 MatMul_2045" [label="[]", style=solid]; +"2209 Transpose_2017" -> "2210 Unsqueeze_2018" [label="[]", style=solid]; +"2210 Unsqueeze_2018" -> "2212 Concat_2020" [label="[]", style=solid]; +"2211 Unsqueeze_2019" -> "2212 Concat_2020" [label="[]", style=solid]; +"2212 Concat_2020" -> "2836 nncf_model_output_10" [label="[2, 1, 12, 8, 64]", style=solid]; +"2213 MatMul_2021" -> "2215 Div_2023" [label="[]", style=solid]; +"2214 Constant_2022" -> "2215 Div_2023" [label="[]", style=solid]; +"2215 Div_2023" -> "2216 Shape_2024" [label="[]", style=solid]; +"2215 Div_2023" -> "2219 Shape_2027" [label="[]", style=solid]; +"2215 Div_2023" -> "2230 Mul_2038" [label="[]", style=solid]; +"2216 Shape_2024" -> "2218 Gather_2026" [label="[-1]", style=dashed]; +"2217 Constant_2025" -> "2218 Gather_2026" [label="[]", style=dashed]; +"2218 Gather_2026" -> "2222 Sub_2030" [label="[]", style=dashed]; +"2219 Shape_2027" -> "2221 Gather_2029" [label="[-1]", style=dashed]; +"2220 Constant_2028" -> "2221 Gather_2029" [label="[]", style=dashed]; +"2221 Gather_2029" -> "2222 Sub_2030" [label="[]", style=dashed]; +"2221 Gather_2029" -> "2224 Unsqueeze_2032" [label="[]", style=dashed]; +"2221 Gather_2029" -> "2227 Unsqueeze_2035" [label="[]", style=dashed]; +"2222 Sub_2030" -> "2223 Unsqueeze_2031" [label="[]", style=dashed]; +"2223 Unsqueeze_2031" -> "2226 Slice_2034" [label="[1]", style=dashed]; +"2224 Unsqueeze_2032" -> "2226 Slice_2034" [label="[1]", style=dashed]; +"2225 Constant_2033" -> "2226 Slice_2034" [label="[1]", style=dashed]; +"2226 Slice_2034" -> "2229 Slice_2037" [label="[]", style=solid]; +"2227 Unsqueeze_2035" -> "2229 Slice_2037" [label="[1]", style=dashed]; +"2228 Constant_2036" -> "2229 Slice_2037" [label="[1]", style=dashed]; +"2229 Slice_2037" -> "2230 Mul_2038" [label="[]", style=solid]; +"2229 Slice_2037" -> "2232 Sub_2040" [label="[]", style=solid]; +"2230 Mul_2038" -> "2235 Sub_2043" [label="[]", style=solid]; +"2231 Constant_2039" -> "2232 Sub_2040" [label="[]", style=solid]; +"2232 Sub_2040" -> "2234 Mul_2042" [label="[]", style=solid]; +"2233 Constant_2041" -> "2234 Mul_2042" [label="[]", style=solid]; +"2234 Mul_2042" -> "2235 Sub_2043" [label="[]", style=solid]; +"2235 Sub_2043" -> "2236 Softmax_2044" [label="[]", style=solid]; +"2236 Softmax_2044" -> "2237 MatMul_2045" [label="[]", style=solid]; +"2237 MatMul_2045" -> "2238 QuantizeLinear_2502_1" [label="[]", style=solid]; +"2238 QuantizeLinear_2502_1" -> "2239 DequantizeLinear_2502_1" [label="[]", style=dashed]; +"2239 DequantizeLinear_2502_1" -> "2240 Transpose_2046" [label="[]", style=solid]; +"2240 Transpose_2046" -> "2241 Shape_2047" [label="[]", style=solid]; +"2240 Transpose_2046" -> "2244 Shape_2050" [label="[]", style=solid]; +"2240 Transpose_2046" -> "2247 Shape_2053" [label="[]", style=solid]; +"2240 Transpose_2046" -> "2250 Shape_2056" [label="[]", style=solid]; +"2240 Transpose_2046" -> "2258 Reshape_2064" [label="[]", style=solid]; +"2241 Shape_2047" -> "2243 Gather_2049" [label="[-1]", style=dashed]; +"2242 Constant_2048" -> "2243 Gather_2049" [label="[]", style=dashed]; +"2243 Gather_2049" -> "2254 Unsqueeze_2060" [label="[]", style=dashed]; +"2244 Shape_2050" -> "2246 Gather_2052" [label="[-1]", style=dashed]; +"2245 Constant_2051" -> "2246 Gather_2052" [label="[]", style=dashed]; +"2246 Gather_2052" -> "2255 Unsqueeze_2061" [label="[]", style=dashed]; +"2247 Shape_2053" -> "2249 Gather_2055" [label="[-1]", style=dashed]; +"2248 Constant_2054" -> "2249 Gather_2055" [label="[]", style=dashed]; +"2249 Gather_2055" -> "2253 Mul_2059" [label="[]", style=dashed]; +"2250 Shape_2056" -> "2252 Gather_2058" [label="[-1]", style=dashed]; +"2251 Constant_2057" -> "2252 Gather_2058" [label="[]", style=dashed]; +"2252 Gather_2058" -> "2253 Mul_2059" [label="[]", style=dashed]; +"2253 Mul_2059" -> "2256 Unsqueeze_2062" [label="[]", style=dashed]; +"2254 Unsqueeze_2060" -> "2257 Concat_2063" [label="[1]", style=dashed]; +"2255 Unsqueeze_2061" -> "2257 Concat_2063" [label="[1]", style=dashed]; +"2256 Unsqueeze_2062" -> "2257 Concat_2063" [label="[1]", style=dashed]; +"2257 Concat_2063" -> "2258 Reshape_2064" [label="[3]", style=dashed]; +"2258 Reshape_2064" -> "2259 Shape_2065" [label="[]", style=solid]; +"2258 Reshape_2064" -> "2262 Shape_2068" [label="[]", style=solid]; +"2258 Reshape_2064" -> "2265 Shape_2071" [label="[]", style=solid]; +"2258 Reshape_2064" -> "2270 Reshape_2076" [label="[]", style=solid]; +"2259 Shape_2065" -> "2261 Gather_2067" [label="[-1]", style=dashed]; +"2260 Constant_2066" -> "2261 Gather_2067" [label="[]", style=dashed]; +"2261 Gather_2067" -> "2274 Unsqueeze_2078" [label="[]", style=dashed]; +"2262 Shape_2068" -> "2264 Gather_2070" [label="[-1]", style=dashed]; +"2263 Constant_2069" -> "2264 Gather_2070" [label="[]", style=dashed]; +"2264 Gather_2070" -> "2275 Unsqueeze_2079" [label="[]", style=dashed]; +"2265 Shape_2071" -> "2267 Gather_2073" [label="[-1]", style=dashed]; +"2266 Constant_2072" -> "2267 Gather_2073" [label="[]", style=dashed]; +"2267 Gather_2073" -> "2268 Unsqueeze_2074" [label="[]", style=dashed]; +"2268 Unsqueeze_2074" -> "2269 Concat_2075" [label="[1]", style=dashed]; +"2269 Concat_2075" -> "2270 Reshape_2076" [label="[2]", style=dashed]; +"2270 Reshape_2076" -> "2273 Gemm_2077" [label="[]", style=solid]; +"2271 QuantizeLinear_h.9.attn.c_proj.weight_1" -> "2272 DequantizeLinear_h.9.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"2272 DequantizeLinear_h.9.attn.c_proj.weight_1" -> "2273 Gemm_2077" [label="[768, 768]", style=solid]; +"2273 Gemm_2077" -> "2277 Reshape_2081" [label="[]", style=solid]; +"2274 Unsqueeze_2078" -> "2276 Concat_2080" [label="[1]", style=dashed]; +"2275 Unsqueeze_2079" -> "2276 Concat_2080" [label="[1]", style=dashed]; +"2276 Concat_2080" -> "2277 Reshape_2081" [label="[3]", style=dashed]; +"2277 Reshape_2081" -> "2278 Add_2082" [label="[]", style=solid]; +"2278 Add_2082" -> "2279 ReduceMean_2083" [label="[]", style=solid]; +"2278 Add_2082" -> "2280 Sub_2084" [label="[]", style=solid]; +"2278 Add_2082" -> "2345 Add_2141" [label="[]", style=solid]; +"2279 ReduceMean_2083" -> "2280 Sub_2084" [label="[]", style=solid]; +"2280 Sub_2084" -> "2282 Pow_2086" [label="[]", style=solid]; +"2280 Sub_2084" -> "2287 Div_2091" [label="[]", style=solid]; +"2281 Constant_2085" -> "2282 Pow_2086" [label="[]", style=solid]; +"2282 Pow_2086" -> "2283 ReduceMean_2087" [label="[]", style=solid]; +"2283 ReduceMean_2087" -> "2285 Add_2089" [label="[]", style=solid]; +"2284 Constant_2088" -> "2285 Add_2089" [label="[]", style=solid]; +"2285 Add_2089" -> "2286 Sqrt_2090" [label="[]", style=solid]; +"2286 Sqrt_2090" -> "2287 Div_2091" [label="[]", style=solid]; +"2287 Div_2091" -> "2288 Mul_2092" [label="[]", style=solid]; +"2288 Mul_2092" -> "2289 Add_2093" [label="[]", style=solid]; +"2289 Add_2093" -> "2290 QuantizeLinear_2554_1" [label="[]", style=solid]; +"2290 QuantizeLinear_2554_1" -> "2291 DequantizeLinear_2554_1" [label="[]", style=dashed]; +"2291 DequantizeLinear_2554_1" -> "2292 Shape_2094" [label="[]", style=solid]; +"2291 DequantizeLinear_2554_1" -> "2295 Shape_2097" [label="[]", style=solid]; +"2291 DequantizeLinear_2554_1" -> "2298 Shape_2100" [label="[]", style=solid]; +"2291 DequantizeLinear_2554_1" -> "2303 Reshape_2105" [label="[]", style=solid]; +"2292 Shape_2094" -> "2294 Gather_2096" [label="[-1]", style=dashed]; +"2293 Constant_2095" -> "2294 Gather_2096" [label="[]", style=dashed]; +"2294 Gather_2096" -> "2307 Unsqueeze_2107" [label="[]", style=dashed]; +"2295 Shape_2097" -> "2297 Gather_2099" [label="[-1]", style=dashed]; +"2296 Constant_2098" -> "2297 Gather_2099" [label="[]", style=dashed]; +"2297 Gather_2099" -> "2308 Unsqueeze_2108" [label="[]", style=dashed]; +"2298 Shape_2100" -> "2300 Gather_2102" [label="[-1]", style=dashed]; +"2299 Constant_2101" -> "2300 Gather_2102" [label="[]", style=dashed]; +"2300 Gather_2102" -> "2301 Unsqueeze_2103" [label="[]", style=dashed]; +"2301 Unsqueeze_2103" -> "2302 Concat_2104" [label="[1]", style=dashed]; +"2302 Concat_2104" -> "2303 Reshape_2105" [label="[2]", style=dashed]; +"2303 Reshape_2105" -> "2306 Gemm_2106" [label="[]", style=solid]; +"2304 QuantizeLinear_h.9.mlp.c_fc.weight_1" -> "2305 DequantizeLinear_h.9.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"2305 DequantizeLinear_h.9.mlp.c_fc.weight_1" -> "2306 Gemm_2106" [label="[768, 3072]", style=solid]; +"2306 Gemm_2106" -> "2310 Reshape_2110" [label="[]", style=solid]; +"2307 Unsqueeze_2107" -> "2309 Concat_2109" [label="[1]", style=dashed]; +"2308 Unsqueeze_2108" -> "2309 Concat_2109" [label="[1]", style=dashed]; +"2309 Concat_2109" -> "2310 Reshape_2110" [label="[3]", style=dashed]; +"2310 Reshape_2110" -> "2312 Mul_2112" [label="[]", style=solid]; +"2310 Reshape_2110" -> "2314 Pow_2114" [label="[]", style=solid]; +"2310 Reshape_2110" -> "2317 Add_2117" [label="[]", style=solid]; +"2311 Constant_2111" -> "2312 Mul_2112" [label="[]", style=solid]; +"2312 Mul_2112" -> "2323 Mul_2123" [label="[]", style=solid]; +"2313 Constant_2113" -> "2314 Pow_2114" [label="[]", style=solid]; +"2314 Pow_2114" -> "2316 Mul_2116" [label="[]", style=solid]; +"2315 Constant_2115" -> "2316 Mul_2116" [label="[]", style=solid]; +"2316 Mul_2116" -> "2317 Add_2117" [label="[]", style=solid]; +"2317 Add_2117" -> "2319 Mul_2119" [label="[]", style=solid]; +"2318 Constant_2118" -> "2319 Mul_2119" [label="[]", style=solid]; +"2319 Mul_2119" -> "2320 Tanh_2120" [label="[]", style=solid]; +"2320 Tanh_2120" -> "2322 Add_2122" [label="[]", style=solid]; +"2321 Constant_2121" -> "2322 Add_2122" [label="[]", style=solid]; +"2322 Add_2122" -> "2323 Mul_2123" [label="[]", style=solid]; +"2323 Mul_2123" -> "2324 QuantizeLinear_2588_1" [label="[]", style=solid]; +"2324 QuantizeLinear_2588_1" -> "2325 DequantizeLinear_2588_1" [label="[]", style=dashed]; +"2325 DequantizeLinear_2588_1" -> "2326 Shape_2124" [label="[]", style=solid]; +"2325 DequantizeLinear_2588_1" -> "2329 Shape_2127" [label="[]", style=solid]; +"2325 DequantizeLinear_2588_1" -> "2332 Shape_2130" [label="[]", style=solid]; +"2325 DequantizeLinear_2588_1" -> "2337 Reshape_2135" [label="[]", style=solid]; +"2326 Shape_2124" -> "2328 Gather_2126" [label="[-1]", style=dashed]; +"2327 Constant_2125" -> "2328 Gather_2126" [label="[]", style=dashed]; +"2328 Gather_2126" -> "2341 Unsqueeze_2137" [label="[]", style=dashed]; +"2329 Shape_2127" -> "2331 Gather_2129" [label="[-1]", style=dashed]; +"2330 Constant_2128" -> "2331 Gather_2129" [label="[]", style=dashed]; +"2331 Gather_2129" -> "2342 Unsqueeze_2138" [label="[]", style=dashed]; +"2332 Shape_2130" -> "2334 Gather_2132" [label="[-1]", style=dashed]; +"2333 Constant_2131" -> "2334 Gather_2132" [label="[]", style=dashed]; +"2334 Gather_2132" -> "2335 Unsqueeze_2133" [label="[]", style=dashed]; +"2335 Unsqueeze_2133" -> "2336 Concat_2134" [label="[1]", style=dashed]; +"2336 Concat_2134" -> "2337 Reshape_2135" [label="[2]", style=dashed]; +"2337 Reshape_2135" -> "2340 Gemm_2136" [label="[]", style=solid]; +"2338 QuantizeLinear_h.9.mlp.c_proj.weight_1" -> "2339 DequantizeLinear_h.9.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"2339 DequantizeLinear_h.9.mlp.c_proj.weight_1" -> "2340 Gemm_2136" [label="[3072, 768]", style=solid]; +"2340 Gemm_2136" -> "2344 Reshape_2140" [label="[]", style=solid]; +"2341 Unsqueeze_2137" -> "2343 Concat_2139" [label="[1]", style=dashed]; +"2342 Unsqueeze_2138" -> "2343 Concat_2139" [label="[1]", style=dashed]; +"2343 Concat_2139" -> "2344 Reshape_2140" [label="[3]", style=dashed]; +"2344 Reshape_2140" -> "2345 Add_2141" [label="[]", style=solid]; +"2345 Add_2141" -> "2346 ReduceMean_2142" [label="[]", style=solid]; +"2345 Add_2141" -> "2347 Sub_2143" [label="[]", style=solid]; +"2345 Add_2141" -> "2509 Add_2293" [label="[]", style=solid]; +"2346 ReduceMean_2142" -> "2347 Sub_2143" [label="[]", style=solid]; +"2347 Sub_2143" -> "2349 Pow_2145" [label="[]", style=solid]; +"2347 Sub_2143" -> "2354 Div_2150" [label="[]", style=solid]; +"2348 Constant_2144" -> "2349 Pow_2145" [label="[]", style=solid]; +"2349 Pow_2145" -> "2350 ReduceMean_2146" [label="[]", style=solid]; +"2350 ReduceMean_2146" -> "2352 Add_2148" [label="[]", style=solid]; +"2351 Constant_2147" -> "2352 Add_2148" [label="[]", style=solid]; +"2352 Add_2148" -> "2353 Sqrt_2149" [label="[]", style=solid]; +"2353 Sqrt_2149" -> "2354 Div_2150" [label="[]", style=solid]; +"2354 Div_2150" -> "2355 Mul_2151" [label="[]", style=solid]; +"2355 Mul_2151" -> "2356 Add_2152" [label="[]", style=solid]; +"2356 Add_2152" -> "2357 QuantizeLinear_2621_1" [label="[]", style=solid]; +"2357 QuantizeLinear_2621_1" -> "2358 DequantizeLinear_2621_1" [label="[]", style=dashed]; +"2358 DequantizeLinear_2621_1" -> "2359 Shape_2153" [label="[]", style=solid]; +"2358 DequantizeLinear_2621_1" -> "2362 Shape_2156" [label="[]", style=solid]; +"2358 DequantizeLinear_2621_1" -> "2365 Shape_2159" [label="[]", style=solid]; +"2358 DequantizeLinear_2621_1" -> "2370 Reshape_2164" [label="[]", style=solid]; +"2359 Shape_2153" -> "2361 Gather_2155" [label="[-1]", style=dashed]; +"2360 Constant_2154" -> "2361 Gather_2155" [label="[]", style=dashed]; +"2361 Gather_2155" -> "2374 Unsqueeze_2166" [label="[]", style=dashed]; +"2362 Shape_2156" -> "2364 Gather_2158" [label="[-1]", style=dashed]; +"2363 Constant_2157" -> "2364 Gather_2158" [label="[]", style=dashed]; +"2364 Gather_2158" -> "2375 Unsqueeze_2167" [label="[]", style=dashed]; +"2365 Shape_2159" -> "2367 Gather_2161" [label="[-1]", style=dashed]; +"2366 Constant_2160" -> "2367 Gather_2161" [label="[]", style=dashed]; +"2367 Gather_2161" -> "2368 Unsqueeze_2162" [label="[]", style=dashed]; +"2368 Unsqueeze_2162" -> "2369 Concat_2163" [label="[1]", style=dashed]; +"2369 Concat_2163" -> "2370 Reshape_2164" [label="[2]", style=dashed]; +"2370 Reshape_2164" -> "2373 Gemm_2165" [label="[]", style=solid]; +"2371 QuantizeLinear_h.10.attn.c_attn.weight_1" -> "2372 DequantizeLinear_h.10.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"2372 DequantizeLinear_h.10.attn.c_attn.weight_1" -> "2373 Gemm_2165" [label="[768, 2304]", style=solid]; +"2373 Gemm_2165" -> "2377 Reshape_2169" [label="[]", style=solid]; +"2374 Unsqueeze_2166" -> "2376 Concat_2168" [label="[1]", style=dashed]; +"2375 Unsqueeze_2167" -> "2376 Concat_2168" [label="[1]", style=dashed]; +"2376 Concat_2168" -> "2377 Reshape_2169" [label="[3]", style=dashed]; +"2377 Reshape_2169" -> "2378 Split_2170" [label="[]", style=solid]; +"2378 Split_2170" -> "2379 QuantizeLinear_query.21_1" [label="[]", style=solid]; +"2378 Split_2170" -> "2381 Shape_2171" [label="[]", style=solid]; +"2378 Split_2170" -> "2384 Shape_2174" [label="[]", style=solid]; +"2378 Split_2170" -> "2387 Shape_2177" [label="[]", style=solid]; +"2378 Split_2170" -> "2400 Shape_2190" [label="[]", style=solid]; +"2378 Split_2170" -> "2403 Shape_2193" [label="[]", style=solid]; +"2378 Split_2170" -> "2406 Shape_2196" [label="[]", style=solid]; +"2378 Split_2170" -> "2417 Reshape_2207" [label="[]", style=solid]; +"2378 Split_2170" -> "2421 Shape_2209" [label="[]", style=solid]; +"2378 Split_2170" -> "2424 Shape_2212" [label="[]", style=solid]; +"2378 Split_2170" -> "2427 Shape_2215" [label="[]", style=solid]; +"2378 Split_2170" -> "2438 Reshape_2226" [label="[]", style=solid]; +"2379 QuantizeLinear_query.21_1" -> "2380 DequantizeLinear_query.21_1" [label="[]", style=dashed]; +"2380 DequantizeLinear_query.21_1" -> "2398 Reshape_2188" [label="[]", style=solid]; +"2381 Shape_2171" -> "2383 Gather_2173" [label="[-1]", style=dashed]; +"2382 Constant_2172" -> "2383 Gather_2173" [label="[]", style=dashed]; +"2383 Gather_2173" -> "2394 Unsqueeze_2184" [label="[]", style=dashed]; +"2384 Shape_2174" -> "2386 Gather_2176" [label="[-1]", style=dashed]; +"2385 Constant_2175" -> "2386 Gather_2176" [label="[]", style=dashed]; +"2386 Gather_2176" -> "2395 Unsqueeze_2185" [label="[]", style=dashed]; +"2387 Shape_2177" -> "2389 Gather_2179" [label="[-1]", style=dashed]; +"2388 Constant_2178" -> "2389 Gather_2179" [label="[]", style=dashed]; +"2389 Gather_2179" -> "2391 Div_2181" [label="[]", style=dashed]; +"2390 Constant_2180" -> "2391 Div_2181" [label="[]", style=dashed]; +"2391 Div_2181" -> "2392 Cast_2182" [label="[]", style=dashed]; +"2392 Cast_2182" -> "2393 Cast_2183" [label="[]", style=dashed]; +"2393 Cast_2183" -> "2396 Unsqueeze_2186" [label="[]", style=dashed]; +"2394 Unsqueeze_2184" -> "2397 Concat_2187" [label="[1]", style=dashed]; +"2395 Unsqueeze_2185" -> "2397 Concat_2187" [label="[1]", style=dashed]; +"2396 Unsqueeze_2186" -> "2397 Concat_2187" [label="[1]", style=dashed]; +"2397 Concat_2187" -> "2398 Reshape_2188" [label="[4]", style=dashed]; +"2398 Reshape_2188" -> "2399 Transpose_2189" [label="[]", style=solid]; +"2399 Transpose_2189" -> "2444 MatMul_2232" [label="[]", style=solid]; +"2400 Shape_2190" -> "2402 Gather_2192" [label="[-1]", style=dashed]; +"2401 Constant_2191" -> "2402 Gather_2192" [label="[]", style=dashed]; +"2402 Gather_2192" -> "2413 Unsqueeze_2203" [label="[]", style=dashed]; +"2403 Shape_2193" -> "2405 Gather_2195" [label="[-1]", style=dashed]; +"2404 Constant_2194" -> "2405 Gather_2195" [label="[]", style=dashed]; +"2405 Gather_2195" -> "2414 Unsqueeze_2204" [label="[]", style=dashed]; +"2406 Shape_2196" -> "2408 Gather_2198" [label="[-1]", style=dashed]; +"2407 Constant_2197" -> "2408 Gather_2198" [label="[]", style=dashed]; +"2408 Gather_2198" -> "2410 Div_2200" [label="[]", style=dashed]; +"2409 Constant_2199" -> "2410 Div_2200" [label="[]", style=dashed]; +"2410 Div_2200" -> "2411 Cast_2201" [label="[]", style=dashed]; +"2411 Cast_2201" -> "2412 Cast_2202" [label="[]", style=dashed]; +"2412 Cast_2202" -> "2415 Unsqueeze_2205" [label="[]", style=dashed]; +"2413 Unsqueeze_2203" -> "2416 Concat_2206" [label="[1]", style=dashed]; +"2414 Unsqueeze_2204" -> "2416 Concat_2206" [label="[1]", style=dashed]; +"2415 Unsqueeze_2205" -> "2416 Concat_2206" [label="[1]", style=dashed]; +"2416 Concat_2206" -> "2417 Reshape_2207" [label="[4]", style=dashed]; +"2417 Reshape_2207" -> "2418 QuantizeLinear_2686_1" [label="[]", style=solid]; +"2417 Reshape_2207" -> "2440 Transpose_2228" [label="[]", style=solid]; +"2418 QuantizeLinear_2686_1" -> "2419 DequantizeLinear_2686_1" [label="[]", style=dashed]; +"2419 DequantizeLinear_2686_1" -> "2420 Transpose_2208" [label="[]", style=solid]; +"2420 Transpose_2208" -> "2444 MatMul_2232" [label="[]", style=solid]; +"2421 Shape_2209" -> "2423 Gather_2211" [label="[-1]", style=dashed]; +"2422 Constant_2210" -> "2423 Gather_2211" [label="[]", style=dashed]; +"2423 Gather_2211" -> "2434 Unsqueeze_2222" [label="[]", style=dashed]; +"2424 Shape_2212" -> "2426 Gather_2214" [label="[-1]", style=dashed]; +"2425 Constant_2213" -> "2426 Gather_2214" [label="[]", style=dashed]; +"2426 Gather_2214" -> "2435 Unsqueeze_2223" [label="[]", style=dashed]; +"2427 Shape_2215" -> "2429 Gather_2217" [label="[-1]", style=dashed]; +"2428 Constant_2216" -> "2429 Gather_2217" [label="[]", style=dashed]; +"2429 Gather_2217" -> "2431 Div_2219" [label="[]", style=dashed]; +"2430 Constant_2218" -> "2431 Div_2219" [label="[]", style=dashed]; +"2431 Div_2219" -> "2432 Cast_2220" [label="[]", style=dashed]; +"2432 Cast_2220" -> "2433 Cast_2221" [label="[]", style=dashed]; +"2433 Cast_2221" -> "2436 Unsqueeze_2224" [label="[]", style=dashed]; +"2434 Unsqueeze_2222" -> "2437 Concat_2225" [label="[1]", style=dashed]; +"2435 Unsqueeze_2223" -> "2437 Concat_2225" [label="[1]", style=dashed]; +"2436 Unsqueeze_2224" -> "2437 Concat_2225" [label="[1]", style=dashed]; +"2437 Concat_2225" -> "2438 Reshape_2226" [label="[4]", style=dashed]; +"2438 Reshape_2226" -> "2439 Transpose_2227" [label="[]", style=solid]; +"2439 Transpose_2227" -> "2442 Unsqueeze_2230" [label="[]", style=solid]; +"2439 Transpose_2227" -> "2468 MatMul_2256" [label="[]", style=solid]; +"2440 Transpose_2228" -> "2441 Unsqueeze_2229" [label="[]", style=solid]; +"2441 Unsqueeze_2229" -> "2443 Concat_2231" [label="[]", style=solid]; +"2442 Unsqueeze_2230" -> "2443 Concat_2231" [label="[]", style=solid]; +"2443 Concat_2231" -> "2837 nncf_model_output_11" [label="[2, 1, 12, 8, 64]", style=solid]; +"2444 MatMul_2232" -> "2446 Div_2234" [label="[]", style=solid]; +"2445 Constant_2233" -> "2446 Div_2234" [label="[]", style=solid]; +"2446 Div_2234" -> "2447 Shape_2235" [label="[]", style=solid]; +"2446 Div_2234" -> "2450 Shape_2238" [label="[]", style=solid]; +"2446 Div_2234" -> "2461 Mul_2249" [label="[]", style=solid]; +"2447 Shape_2235" -> "2449 Gather_2237" [label="[-1]", style=dashed]; +"2448 Constant_2236" -> "2449 Gather_2237" [label="[]", style=dashed]; +"2449 Gather_2237" -> "2453 Sub_2241" [label="[]", style=dashed]; +"2450 Shape_2238" -> "2452 Gather_2240" [label="[-1]", style=dashed]; +"2451 Constant_2239" -> "2452 Gather_2240" [label="[]", style=dashed]; +"2452 Gather_2240" -> "2453 Sub_2241" [label="[]", style=dashed]; +"2452 Gather_2240" -> "2455 Unsqueeze_2243" [label="[]", style=dashed]; +"2452 Gather_2240" -> "2458 Unsqueeze_2246" [label="[]", style=dashed]; +"2453 Sub_2241" -> "2454 Unsqueeze_2242" [label="[]", style=dashed]; +"2454 Unsqueeze_2242" -> "2457 Slice_2245" [label="[1]", style=dashed]; +"2455 Unsqueeze_2243" -> "2457 Slice_2245" [label="[1]", style=dashed]; +"2456 Constant_2244" -> "2457 Slice_2245" [label="[1]", style=dashed]; +"2457 Slice_2245" -> "2460 Slice_2248" [label="[]", style=solid]; +"2458 Unsqueeze_2246" -> "2460 Slice_2248" [label="[1]", style=dashed]; +"2459 Constant_2247" -> "2460 Slice_2248" [label="[1]", style=dashed]; +"2460 Slice_2248" -> "2461 Mul_2249" [label="[]", style=solid]; +"2460 Slice_2248" -> "2463 Sub_2251" [label="[]", style=solid]; +"2461 Mul_2249" -> "2466 Sub_2254" [label="[]", style=solid]; +"2462 Constant_2250" -> "2463 Sub_2251" [label="[]", style=solid]; +"2463 Sub_2251" -> "2465 Mul_2253" [label="[]", style=solid]; +"2464 Constant_2252" -> "2465 Mul_2253" [label="[]", style=solid]; +"2465 Mul_2253" -> "2466 Sub_2254" [label="[]", style=solid]; +"2466 Sub_2254" -> "2467 Softmax_2255" [label="[]", style=solid]; +"2467 Softmax_2255" -> "2468 MatMul_2256" [label="[]", style=solid]; +"2468 MatMul_2256" -> "2469 QuantizeLinear_2743_1" [label="[]", style=solid]; +"2469 QuantizeLinear_2743_1" -> "2470 DequantizeLinear_2743_1" [label="[]", style=dashed]; +"2470 DequantizeLinear_2743_1" -> "2471 Transpose_2257" [label="[]", style=solid]; +"2471 Transpose_2257" -> "2472 Shape_2258" [label="[]", style=solid]; +"2471 Transpose_2257" -> "2475 Shape_2261" [label="[]", style=solid]; +"2471 Transpose_2257" -> "2478 Shape_2264" [label="[]", style=solid]; +"2471 Transpose_2257" -> "2481 Shape_2267" [label="[]", style=solid]; +"2471 Transpose_2257" -> "2489 Reshape_2275" [label="[]", style=solid]; +"2472 Shape_2258" -> "2474 Gather_2260" [label="[-1]", style=dashed]; +"2473 Constant_2259" -> "2474 Gather_2260" [label="[]", style=dashed]; +"2474 Gather_2260" -> "2485 Unsqueeze_2271" [label="[]", style=dashed]; +"2475 Shape_2261" -> "2477 Gather_2263" [label="[-1]", style=dashed]; +"2476 Constant_2262" -> "2477 Gather_2263" [label="[]", style=dashed]; +"2477 Gather_2263" -> "2486 Unsqueeze_2272" [label="[]", style=dashed]; +"2478 Shape_2264" -> "2480 Gather_2266" [label="[-1]", style=dashed]; +"2479 Constant_2265" -> "2480 Gather_2266" [label="[]", style=dashed]; +"2480 Gather_2266" -> "2484 Mul_2270" [label="[]", style=dashed]; +"2481 Shape_2267" -> "2483 Gather_2269" [label="[-1]", style=dashed]; +"2482 Constant_2268" -> "2483 Gather_2269" [label="[]", style=dashed]; +"2483 Gather_2269" -> "2484 Mul_2270" [label="[]", style=dashed]; +"2484 Mul_2270" -> "2487 Unsqueeze_2273" [label="[]", style=dashed]; +"2485 Unsqueeze_2271" -> "2488 Concat_2274" [label="[1]", style=dashed]; +"2486 Unsqueeze_2272" -> "2488 Concat_2274" [label="[1]", style=dashed]; +"2487 Unsqueeze_2273" -> "2488 Concat_2274" [label="[1]", style=dashed]; +"2488 Concat_2274" -> "2489 Reshape_2275" [label="[3]", style=dashed]; +"2489 Reshape_2275" -> "2490 Shape_2276" [label="[]", style=solid]; +"2489 Reshape_2275" -> "2493 Shape_2279" [label="[]", style=solid]; +"2489 Reshape_2275" -> "2496 Shape_2282" [label="[]", style=solid]; +"2489 Reshape_2275" -> "2501 Reshape_2287" [label="[]", style=solid]; +"2490 Shape_2276" -> "2492 Gather_2278" [label="[-1]", style=dashed]; +"2491 Constant_2277" -> "2492 Gather_2278" [label="[]", style=dashed]; +"2492 Gather_2278" -> "2505 Unsqueeze_2289" [label="[]", style=dashed]; +"2493 Shape_2279" -> "2495 Gather_2281" [label="[-1]", style=dashed]; +"2494 Constant_2280" -> "2495 Gather_2281" [label="[]", style=dashed]; +"2495 Gather_2281" -> "2506 Unsqueeze_2290" [label="[]", style=dashed]; +"2496 Shape_2282" -> "2498 Gather_2284" [label="[-1]", style=dashed]; +"2497 Constant_2283" -> "2498 Gather_2284" [label="[]", style=dashed]; +"2498 Gather_2284" -> "2499 Unsqueeze_2285" [label="[]", style=dashed]; +"2499 Unsqueeze_2285" -> "2500 Concat_2286" [label="[1]", style=dashed]; +"2500 Concat_2286" -> "2501 Reshape_2287" [label="[2]", style=dashed]; +"2501 Reshape_2287" -> "2504 Gemm_2288" [label="[]", style=solid]; +"2502 QuantizeLinear_h.10.attn.c_proj.weight_1" -> "2503 DequantizeLinear_h.10.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"2503 DequantizeLinear_h.10.attn.c_proj.weight_1" -> "2504 Gemm_2288" [label="[768, 768]", style=solid]; +"2504 Gemm_2288" -> "2508 Reshape_2292" [label="[]", style=solid]; +"2505 Unsqueeze_2289" -> "2507 Concat_2291" [label="[1]", style=dashed]; +"2506 Unsqueeze_2290" -> "2507 Concat_2291" [label="[1]", style=dashed]; +"2507 Concat_2291" -> "2508 Reshape_2292" [label="[3]", style=dashed]; +"2508 Reshape_2292" -> "2509 Add_2293" [label="[]", style=solid]; +"2509 Add_2293" -> "2510 ReduceMean_2294" [label="[]", style=solid]; +"2509 Add_2293" -> "2511 Sub_2295" [label="[]", style=solid]; +"2509 Add_2293" -> "2576 Add_2352" [label="[]", style=solid]; +"2510 ReduceMean_2294" -> "2511 Sub_2295" [label="[]", style=solid]; +"2511 Sub_2295" -> "2513 Pow_2297" [label="[]", style=solid]; +"2511 Sub_2295" -> "2518 Div_2302" [label="[]", style=solid]; +"2512 Constant_2296" -> "2513 Pow_2297" [label="[]", style=solid]; +"2513 Pow_2297" -> "2514 ReduceMean_2298" [label="[]", style=solid]; +"2514 ReduceMean_2298" -> "2516 Add_2300" [label="[]", style=solid]; +"2515 Constant_2299" -> "2516 Add_2300" [label="[]", style=solid]; +"2516 Add_2300" -> "2517 Sqrt_2301" [label="[]", style=solid]; +"2517 Sqrt_2301" -> "2518 Div_2302" [label="[]", style=solid]; +"2518 Div_2302" -> "2519 Mul_2303" [label="[]", style=solid]; +"2519 Mul_2303" -> "2520 Add_2304" [label="[]", style=solid]; +"2520 Add_2304" -> "2521 QuantizeLinear_2795_1" [label="[]", style=solid]; +"2521 QuantizeLinear_2795_1" -> "2522 DequantizeLinear_2795_1" [label="[]", style=dashed]; +"2522 DequantizeLinear_2795_1" -> "2523 Shape_2305" [label="[]", style=solid]; +"2522 DequantizeLinear_2795_1" -> "2526 Shape_2308" [label="[]", style=solid]; +"2522 DequantizeLinear_2795_1" -> "2529 Shape_2311" [label="[]", style=solid]; +"2522 DequantizeLinear_2795_1" -> "2534 Reshape_2316" [label="[]", style=solid]; +"2523 Shape_2305" -> "2525 Gather_2307" [label="[-1]", style=dashed]; +"2524 Constant_2306" -> "2525 Gather_2307" [label="[]", style=dashed]; +"2525 Gather_2307" -> "2538 Unsqueeze_2318" [label="[]", style=dashed]; +"2526 Shape_2308" -> "2528 Gather_2310" [label="[-1]", style=dashed]; +"2527 Constant_2309" -> "2528 Gather_2310" [label="[]", style=dashed]; +"2528 Gather_2310" -> "2539 Unsqueeze_2319" [label="[]", style=dashed]; +"2529 Shape_2311" -> "2531 Gather_2313" [label="[-1]", style=dashed]; +"2530 Constant_2312" -> "2531 Gather_2313" [label="[]", style=dashed]; +"2531 Gather_2313" -> "2532 Unsqueeze_2314" [label="[]", style=dashed]; +"2532 Unsqueeze_2314" -> "2533 Concat_2315" [label="[1]", style=dashed]; +"2533 Concat_2315" -> "2534 Reshape_2316" [label="[2]", style=dashed]; +"2534 Reshape_2316" -> "2537 Gemm_2317" [label="[]", style=solid]; +"2535 QuantizeLinear_h.10.mlp.c_fc.weight_1" -> "2536 DequantizeLinear_h.10.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"2536 DequantizeLinear_h.10.mlp.c_fc.weight_1" -> "2537 Gemm_2317" [label="[768, 3072]", style=solid]; +"2537 Gemm_2317" -> "2541 Reshape_2321" [label="[]", style=solid]; +"2538 Unsqueeze_2318" -> "2540 Concat_2320" [label="[1]", style=dashed]; +"2539 Unsqueeze_2319" -> "2540 Concat_2320" [label="[1]", style=dashed]; +"2540 Concat_2320" -> "2541 Reshape_2321" [label="[3]", style=dashed]; +"2541 Reshape_2321" -> "2543 Mul_2323" [label="[]", style=solid]; +"2541 Reshape_2321" -> "2545 Pow_2325" [label="[]", style=solid]; +"2541 Reshape_2321" -> "2548 Add_2328" [label="[]", style=solid]; +"2542 Constant_2322" -> "2543 Mul_2323" [label="[]", style=solid]; +"2543 Mul_2323" -> "2554 Mul_2334" [label="[]", style=solid]; +"2544 Constant_2324" -> "2545 Pow_2325" [label="[]", style=solid]; +"2545 Pow_2325" -> "2547 Mul_2327" [label="[]", style=solid]; +"2546 Constant_2326" -> "2547 Mul_2327" [label="[]", style=solid]; +"2547 Mul_2327" -> "2548 Add_2328" [label="[]", style=solid]; +"2548 Add_2328" -> "2550 Mul_2330" [label="[]", style=solid]; +"2549 Constant_2329" -> "2550 Mul_2330" [label="[]", style=solid]; +"2550 Mul_2330" -> "2551 Tanh_2331" [label="[]", style=solid]; +"2551 Tanh_2331" -> "2553 Add_2333" [label="[]", style=solid]; +"2552 Constant_2332" -> "2553 Add_2333" [label="[]", style=solid]; +"2553 Add_2333" -> "2554 Mul_2334" [label="[]", style=solid]; +"2554 Mul_2334" -> "2555 QuantizeLinear_2829_1" [label="[]", style=solid]; +"2555 QuantizeLinear_2829_1" -> "2556 DequantizeLinear_2829_1" [label="[]", style=dashed]; +"2556 DequantizeLinear_2829_1" -> "2557 Shape_2335" [label="[]", style=solid]; +"2556 DequantizeLinear_2829_1" -> "2560 Shape_2338" [label="[]", style=solid]; +"2556 DequantizeLinear_2829_1" -> "2563 Shape_2341" [label="[]", style=solid]; +"2556 DequantizeLinear_2829_1" -> "2568 Reshape_2346" [label="[]", style=solid]; +"2557 Shape_2335" -> "2559 Gather_2337" [label="[-1]", style=dashed]; +"2558 Constant_2336" -> "2559 Gather_2337" [label="[]", style=dashed]; +"2559 Gather_2337" -> "2572 Unsqueeze_2348" [label="[]", style=dashed]; +"2560 Shape_2338" -> "2562 Gather_2340" [label="[-1]", style=dashed]; +"2561 Constant_2339" -> "2562 Gather_2340" [label="[]", style=dashed]; +"2562 Gather_2340" -> "2573 Unsqueeze_2349" [label="[]", style=dashed]; +"2563 Shape_2341" -> "2565 Gather_2343" [label="[-1]", style=dashed]; +"2564 Constant_2342" -> "2565 Gather_2343" [label="[]", style=dashed]; +"2565 Gather_2343" -> "2566 Unsqueeze_2344" [label="[]", style=dashed]; +"2566 Unsqueeze_2344" -> "2567 Concat_2345" [label="[1]", style=dashed]; +"2567 Concat_2345" -> "2568 Reshape_2346" [label="[2]", style=dashed]; +"2568 Reshape_2346" -> "2571 Gemm_2347" [label="[]", style=solid]; +"2569 QuantizeLinear_h.10.mlp.c_proj.weight_1" -> "2570 DequantizeLinear_h.10.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"2570 DequantizeLinear_h.10.mlp.c_proj.weight_1" -> "2571 Gemm_2347" [label="[3072, 768]", style=solid]; +"2571 Gemm_2347" -> "2575 Reshape_2351" [label="[]", style=solid]; +"2572 Unsqueeze_2348" -> "2574 Concat_2350" [label="[1]", style=dashed]; +"2573 Unsqueeze_2349" -> "2574 Concat_2350" [label="[1]", style=dashed]; +"2574 Concat_2350" -> "2575 Reshape_2351" [label="[3]", style=dashed]; +"2575 Reshape_2351" -> "2576 Add_2352" [label="[]", style=solid]; +"2576 Add_2352" -> "2577 ReduceMean_2353" [label="[]", style=solid]; +"2576 Add_2352" -> "2578 Sub_2354" [label="[]", style=solid]; +"2576 Add_2352" -> "2740 Add_2504" [label="[]", style=solid]; +"2577 ReduceMean_2353" -> "2578 Sub_2354" [label="[]", style=solid]; +"2578 Sub_2354" -> "2580 Pow_2356" [label="[]", style=solid]; +"2578 Sub_2354" -> "2585 Div_2361" [label="[]", style=solid]; +"2579 Constant_2355" -> "2580 Pow_2356" [label="[]", style=solid]; +"2580 Pow_2356" -> "2581 ReduceMean_2357" [label="[]", style=solid]; +"2581 ReduceMean_2357" -> "2583 Add_2359" [label="[]", style=solid]; +"2582 Constant_2358" -> "2583 Add_2359" [label="[]", style=solid]; +"2583 Add_2359" -> "2584 Sqrt_2360" [label="[]", style=solid]; +"2584 Sqrt_2360" -> "2585 Div_2361" [label="[]", style=solid]; +"2585 Div_2361" -> "2586 Mul_2362" [label="[]", style=solid]; +"2586 Mul_2362" -> "2587 Add_2363" [label="[]", style=solid]; +"2587 Add_2363" -> "2588 QuantizeLinear_2862_1" [label="[]", style=solid]; +"2588 QuantizeLinear_2862_1" -> "2589 DequantizeLinear_2862_1" [label="[]", style=dashed]; +"2589 DequantizeLinear_2862_1" -> "2590 Shape_2364" [label="[]", style=solid]; +"2589 DequantizeLinear_2862_1" -> "2593 Shape_2367" [label="[]", style=solid]; +"2589 DequantizeLinear_2862_1" -> "2596 Shape_2370" [label="[]", style=solid]; +"2589 DequantizeLinear_2862_1" -> "2601 Reshape_2375" [label="[]", style=solid]; +"2590 Shape_2364" -> "2592 Gather_2366" [label="[-1]", style=dashed]; +"2591 Constant_2365" -> "2592 Gather_2366" [label="[]", style=dashed]; +"2592 Gather_2366" -> "2605 Unsqueeze_2377" [label="[]", style=dashed]; +"2593 Shape_2367" -> "2595 Gather_2369" [label="[-1]", style=dashed]; +"2594 Constant_2368" -> "2595 Gather_2369" [label="[]", style=dashed]; +"2595 Gather_2369" -> "2606 Unsqueeze_2378" [label="[]", style=dashed]; +"2596 Shape_2370" -> "2598 Gather_2372" [label="[-1]", style=dashed]; +"2597 Constant_2371" -> "2598 Gather_2372" [label="[]", style=dashed]; +"2598 Gather_2372" -> "2599 Unsqueeze_2373" [label="[]", style=dashed]; +"2599 Unsqueeze_2373" -> "2600 Concat_2374" [label="[1]", style=dashed]; +"2600 Concat_2374" -> "2601 Reshape_2375" [label="[2]", style=dashed]; +"2601 Reshape_2375" -> "2604 Gemm_2376" [label="[]", style=solid]; +"2602 QuantizeLinear_h.11.attn.c_attn.weight_1" -> "2603 DequantizeLinear_h.11.attn.c_attn.weight_1" [label="[768, 2304]", style=dashed]; +"2603 DequantizeLinear_h.11.attn.c_attn.weight_1" -> "2604 Gemm_2376" [label="[768, 2304]", style=solid]; +"2604 Gemm_2376" -> "2608 Reshape_2380" [label="[]", style=solid]; +"2605 Unsqueeze_2377" -> "2607 Concat_2379" [label="[1]", style=dashed]; +"2606 Unsqueeze_2378" -> "2607 Concat_2379" [label="[1]", style=dashed]; +"2607 Concat_2379" -> "2608 Reshape_2380" [label="[3]", style=dashed]; +"2608 Reshape_2380" -> "2609 Split_2381" [label="[]", style=solid]; +"2609 Split_2381" -> "2610 QuantizeLinear_query.23_1" [label="[]", style=solid]; +"2609 Split_2381" -> "2612 Shape_2382" [label="[]", style=solid]; +"2609 Split_2381" -> "2615 Shape_2385" [label="[]", style=solid]; +"2609 Split_2381" -> "2618 Shape_2388" [label="[]", style=solid]; +"2609 Split_2381" -> "2631 Shape_2401" [label="[]", style=solid]; +"2609 Split_2381" -> "2634 Shape_2404" [label="[]", style=solid]; +"2609 Split_2381" -> "2637 Shape_2407" [label="[]", style=solid]; +"2609 Split_2381" -> "2648 Reshape_2418" [label="[]", style=solid]; +"2609 Split_2381" -> "2652 Shape_2420" [label="[]", style=solid]; +"2609 Split_2381" -> "2655 Shape_2423" [label="[]", style=solid]; +"2609 Split_2381" -> "2658 Shape_2426" [label="[]", style=solid]; +"2609 Split_2381" -> "2669 Reshape_2437" [label="[]", style=solid]; +"2610 QuantizeLinear_query.23_1" -> "2611 DequantizeLinear_query.23_1" [label="[]", style=dashed]; +"2611 DequantizeLinear_query.23_1" -> "2629 Reshape_2399" [label="[]", style=solid]; +"2612 Shape_2382" -> "2614 Gather_2384" [label="[-1]", style=dashed]; +"2613 Constant_2383" -> "2614 Gather_2384" [label="[]", style=dashed]; +"2614 Gather_2384" -> "2625 Unsqueeze_2395" [label="[]", style=dashed]; +"2615 Shape_2385" -> "2617 Gather_2387" [label="[-1]", style=dashed]; +"2616 Constant_2386" -> "2617 Gather_2387" [label="[]", style=dashed]; +"2617 Gather_2387" -> "2626 Unsqueeze_2396" [label="[]", style=dashed]; +"2618 Shape_2388" -> "2620 Gather_2390" [label="[-1]", style=dashed]; +"2619 Constant_2389" -> "2620 Gather_2390" [label="[]", style=dashed]; +"2620 Gather_2390" -> "2622 Div_2392" [label="[]", style=dashed]; +"2621 Constant_2391" -> "2622 Div_2392" [label="[]", style=dashed]; +"2622 Div_2392" -> "2623 Cast_2393" [label="[]", style=dashed]; +"2623 Cast_2393" -> "2624 Cast_2394" [label="[]", style=dashed]; +"2624 Cast_2394" -> "2627 Unsqueeze_2397" [label="[]", style=dashed]; +"2625 Unsqueeze_2395" -> "2628 Concat_2398" [label="[1]", style=dashed]; +"2626 Unsqueeze_2396" -> "2628 Concat_2398" [label="[1]", style=dashed]; +"2627 Unsqueeze_2397" -> "2628 Concat_2398" [label="[1]", style=dashed]; +"2628 Concat_2398" -> "2629 Reshape_2399" [label="[4]", style=dashed]; +"2629 Reshape_2399" -> "2630 Transpose_2400" [label="[]", style=solid]; +"2630 Transpose_2400" -> "2675 MatMul_2443" [label="[]", style=solid]; +"2631 Shape_2401" -> "2633 Gather_2403" [label="[-1]", style=dashed]; +"2632 Constant_2402" -> "2633 Gather_2403" [label="[]", style=dashed]; +"2633 Gather_2403" -> "2644 Unsqueeze_2414" [label="[]", style=dashed]; +"2634 Shape_2404" -> "2636 Gather_2406" [label="[-1]", style=dashed]; +"2635 Constant_2405" -> "2636 Gather_2406" [label="[]", style=dashed]; +"2636 Gather_2406" -> "2645 Unsqueeze_2415" [label="[]", style=dashed]; +"2637 Shape_2407" -> "2639 Gather_2409" [label="[-1]", style=dashed]; +"2638 Constant_2408" -> "2639 Gather_2409" [label="[]", style=dashed]; +"2639 Gather_2409" -> "2641 Div_2411" [label="[]", style=dashed]; +"2640 Constant_2410" -> "2641 Div_2411" [label="[]", style=dashed]; +"2641 Div_2411" -> "2642 Cast_2412" [label="[]", style=dashed]; +"2642 Cast_2412" -> "2643 Cast_2413" [label="[]", style=dashed]; +"2643 Cast_2413" -> "2646 Unsqueeze_2416" [label="[]", style=dashed]; +"2644 Unsqueeze_2414" -> "2647 Concat_2417" [label="[1]", style=dashed]; +"2645 Unsqueeze_2415" -> "2647 Concat_2417" [label="[1]", style=dashed]; +"2646 Unsqueeze_2416" -> "2647 Concat_2417" [label="[1]", style=dashed]; +"2647 Concat_2417" -> "2648 Reshape_2418" [label="[4]", style=dashed]; +"2648 Reshape_2418" -> "2649 QuantizeLinear_2927_1" [label="[]", style=solid]; +"2648 Reshape_2418" -> "2671 Transpose_2439" [label="[]", style=solid]; +"2649 QuantizeLinear_2927_1" -> "2650 DequantizeLinear_2927_1" [label="[]", style=dashed]; +"2650 DequantizeLinear_2927_1" -> "2651 Transpose_2419" [label="[]", style=solid]; +"2651 Transpose_2419" -> "2675 MatMul_2443" [label="[]", style=solid]; +"2652 Shape_2420" -> "2654 Gather_2422" [label="[-1]", style=dashed]; +"2653 Constant_2421" -> "2654 Gather_2422" [label="[]", style=dashed]; +"2654 Gather_2422" -> "2665 Unsqueeze_2433" [label="[]", style=dashed]; +"2655 Shape_2423" -> "2657 Gather_2425" [label="[-1]", style=dashed]; +"2656 Constant_2424" -> "2657 Gather_2425" [label="[]", style=dashed]; +"2657 Gather_2425" -> "2666 Unsqueeze_2434" [label="[]", style=dashed]; +"2658 Shape_2426" -> "2660 Gather_2428" [label="[-1]", style=dashed]; +"2659 Constant_2427" -> "2660 Gather_2428" [label="[]", style=dashed]; +"2660 Gather_2428" -> "2662 Div_2430" [label="[]", style=dashed]; +"2661 Constant_2429" -> "2662 Div_2430" [label="[]", style=dashed]; +"2662 Div_2430" -> "2663 Cast_2431" [label="[]", style=dashed]; +"2663 Cast_2431" -> "2664 Cast_2432" [label="[]", style=dashed]; +"2664 Cast_2432" -> "2667 Unsqueeze_2435" [label="[]", style=dashed]; +"2665 Unsqueeze_2433" -> "2668 Concat_2436" [label="[1]", style=dashed]; +"2666 Unsqueeze_2434" -> "2668 Concat_2436" [label="[1]", style=dashed]; +"2667 Unsqueeze_2435" -> "2668 Concat_2436" [label="[1]", style=dashed]; +"2668 Concat_2436" -> "2669 Reshape_2437" [label="[4]", style=dashed]; +"2669 Reshape_2437" -> "2670 Transpose_2438" [label="[]", style=solid]; +"2670 Transpose_2438" -> "2673 Unsqueeze_2441" [label="[]", style=solid]; +"2670 Transpose_2438" -> "2699 MatMul_2467" [label="[]", style=solid]; +"2671 Transpose_2439" -> "2672 Unsqueeze_2440" [label="[]", style=solid]; +"2672 Unsqueeze_2440" -> "2674 Concat_2442" [label="[]", style=solid]; +"2673 Unsqueeze_2441" -> "2674 Concat_2442" [label="[]", style=solid]; +"2674 Concat_2442" -> "2838 nncf_model_output_12" [label="[2, 1, 12, 8, 64]", style=solid]; +"2675 MatMul_2443" -> "2677 Div_2445" [label="[]", style=solid]; +"2676 Constant_2444" -> "2677 Div_2445" [label="[]", style=solid]; +"2677 Div_2445" -> "2678 Shape_2446" [label="[]", style=solid]; +"2677 Div_2445" -> "2681 Shape_2449" [label="[]", style=solid]; +"2677 Div_2445" -> "2692 Mul_2460" [label="[]", style=solid]; +"2678 Shape_2446" -> "2680 Gather_2448" [label="[-1]", style=dashed]; +"2679 Constant_2447" -> "2680 Gather_2448" [label="[]", style=dashed]; +"2680 Gather_2448" -> "2684 Sub_2452" [label="[]", style=dashed]; +"2681 Shape_2449" -> "2683 Gather_2451" [label="[-1]", style=dashed]; +"2682 Constant_2450" -> "2683 Gather_2451" [label="[]", style=dashed]; +"2683 Gather_2451" -> "2684 Sub_2452" [label="[]", style=dashed]; +"2683 Gather_2451" -> "2686 Unsqueeze_2454" [label="[]", style=dashed]; +"2683 Gather_2451" -> "2689 Unsqueeze_2457" [label="[]", style=dashed]; +"2684 Sub_2452" -> "2685 Unsqueeze_2453" [label="[]", style=dashed]; +"2685 Unsqueeze_2453" -> "2688 Slice_2456" [label="[1]", style=dashed]; +"2686 Unsqueeze_2454" -> "2688 Slice_2456" [label="[1]", style=dashed]; +"2687 Constant_2455" -> "2688 Slice_2456" [label="[1]", style=dashed]; +"2688 Slice_2456" -> "2691 Slice_2459" [label="[]", style=solid]; +"2689 Unsqueeze_2457" -> "2691 Slice_2459" [label="[1]", style=dashed]; +"2690 Constant_2458" -> "2691 Slice_2459" [label="[1]", style=dashed]; +"2691 Slice_2459" -> "2692 Mul_2460" [label="[]", style=solid]; +"2691 Slice_2459" -> "2694 Sub_2462" [label="[]", style=solid]; +"2692 Mul_2460" -> "2697 Sub_2465" [label="[]", style=solid]; +"2693 Constant_2461" -> "2694 Sub_2462" [label="[]", style=solid]; +"2694 Sub_2462" -> "2696 Mul_2464" [label="[]", style=solid]; +"2695 Constant_2463" -> "2696 Mul_2464" [label="[]", style=solid]; +"2696 Mul_2464" -> "2697 Sub_2465" [label="[]", style=solid]; +"2697 Sub_2465" -> "2698 Softmax_2466" [label="[]", style=solid]; +"2698 Softmax_2466" -> "2699 MatMul_2467" [label="[]", style=solid]; +"2699 MatMul_2467" -> "2700 QuantizeLinear_2984_1" [label="[]", style=solid]; +"2700 QuantizeLinear_2984_1" -> "2701 DequantizeLinear_2984_1" [label="[]", style=dashed]; +"2701 DequantizeLinear_2984_1" -> "2702 Transpose_2468" [label="[]", style=solid]; +"2702 Transpose_2468" -> "2703 Shape_2469" [label="[]", style=solid]; +"2702 Transpose_2468" -> "2706 Shape_2472" [label="[]", style=solid]; +"2702 Transpose_2468" -> "2709 Shape_2475" [label="[]", style=solid]; +"2702 Transpose_2468" -> "2712 Shape_2478" [label="[]", style=solid]; +"2702 Transpose_2468" -> "2720 Reshape_2486" [label="[]", style=solid]; +"2703 Shape_2469" -> "2705 Gather_2471" [label="[-1]", style=dashed]; +"2704 Constant_2470" -> "2705 Gather_2471" [label="[]", style=dashed]; +"2705 Gather_2471" -> "2716 Unsqueeze_2482" [label="[]", style=dashed]; +"2706 Shape_2472" -> "2708 Gather_2474" [label="[-1]", style=dashed]; +"2707 Constant_2473" -> "2708 Gather_2474" [label="[]", style=dashed]; +"2708 Gather_2474" -> "2717 Unsqueeze_2483" [label="[]", style=dashed]; +"2709 Shape_2475" -> "2711 Gather_2477" [label="[-1]", style=dashed]; +"2710 Constant_2476" -> "2711 Gather_2477" [label="[]", style=dashed]; +"2711 Gather_2477" -> "2715 Mul_2481" [label="[]", style=dashed]; +"2712 Shape_2478" -> "2714 Gather_2480" [label="[-1]", style=dashed]; +"2713 Constant_2479" -> "2714 Gather_2480" [label="[]", style=dashed]; +"2714 Gather_2480" -> "2715 Mul_2481" [label="[]", style=dashed]; +"2715 Mul_2481" -> "2718 Unsqueeze_2484" [label="[]", style=dashed]; +"2716 Unsqueeze_2482" -> "2719 Concat_2485" [label="[1]", style=dashed]; +"2717 Unsqueeze_2483" -> "2719 Concat_2485" [label="[1]", style=dashed]; +"2718 Unsqueeze_2484" -> "2719 Concat_2485" [label="[1]", style=dashed]; +"2719 Concat_2485" -> "2720 Reshape_2486" [label="[3]", style=dashed]; +"2720 Reshape_2486" -> "2721 Shape_2487" [label="[]", style=solid]; +"2720 Reshape_2486" -> "2724 Shape_2490" [label="[]", style=solid]; +"2720 Reshape_2486" -> "2727 Shape_2493" [label="[]", style=solid]; +"2720 Reshape_2486" -> "2732 Reshape_2498" [label="[]", style=solid]; +"2721 Shape_2487" -> "2723 Gather_2489" [label="[-1]", style=dashed]; +"2722 Constant_2488" -> "2723 Gather_2489" [label="[]", style=dashed]; +"2723 Gather_2489" -> "2736 Unsqueeze_2500" [label="[]", style=dashed]; +"2724 Shape_2490" -> "2726 Gather_2492" [label="[-1]", style=dashed]; +"2725 Constant_2491" -> "2726 Gather_2492" [label="[]", style=dashed]; +"2726 Gather_2492" -> "2737 Unsqueeze_2501" [label="[]", style=dashed]; +"2727 Shape_2493" -> "2729 Gather_2495" [label="[-1]", style=dashed]; +"2728 Constant_2494" -> "2729 Gather_2495" [label="[]", style=dashed]; +"2729 Gather_2495" -> "2730 Unsqueeze_2496" [label="[]", style=dashed]; +"2730 Unsqueeze_2496" -> "2731 Concat_2497" [label="[1]", style=dashed]; +"2731 Concat_2497" -> "2732 Reshape_2498" [label="[2]", style=dashed]; +"2732 Reshape_2498" -> "2735 Gemm_2499" [label="[]", style=solid]; +"2733 QuantizeLinear_h.11.attn.c_proj.weight_1" -> "2734 DequantizeLinear_h.11.attn.c_proj.weight_1" [label="[768, 768]", style=dashed]; +"2734 DequantizeLinear_h.11.attn.c_proj.weight_1" -> "2735 Gemm_2499" [label="[768, 768]", style=solid]; +"2735 Gemm_2499" -> "2739 Reshape_2503" [label="[]", style=solid]; +"2736 Unsqueeze_2500" -> "2738 Concat_2502" [label="[1]", style=dashed]; +"2737 Unsqueeze_2501" -> "2738 Concat_2502" [label="[1]", style=dashed]; +"2738 Concat_2502" -> "2739 Reshape_2503" [label="[3]", style=dashed]; +"2739 Reshape_2503" -> "2740 Add_2504" [label="[]", style=solid]; +"2740 Add_2504" -> "2741 ReduceMean_2505" [label="[]", style=solid]; +"2740 Add_2504" -> "2742 Sub_2506" [label="[]", style=solid]; +"2740 Add_2504" -> "2807 Add_2563" [label="[]", style=solid]; +"2741 ReduceMean_2505" -> "2742 Sub_2506" [label="[]", style=solid]; +"2742 Sub_2506" -> "2744 Pow_2508" [label="[]", style=solid]; +"2742 Sub_2506" -> "2749 Div_2513" [label="[]", style=solid]; +"2743 Constant_2507" -> "2744 Pow_2508" [label="[]", style=solid]; +"2744 Pow_2508" -> "2745 ReduceMean_2509" [label="[]", style=solid]; +"2745 ReduceMean_2509" -> "2747 Add_2511" [label="[]", style=solid]; +"2746 Constant_2510" -> "2747 Add_2511" [label="[]", style=solid]; +"2747 Add_2511" -> "2748 Sqrt_2512" [label="[]", style=solid]; +"2748 Sqrt_2512" -> "2749 Div_2513" [label="[]", style=solid]; +"2749 Div_2513" -> "2750 Mul_2514" [label="[]", style=solid]; +"2750 Mul_2514" -> "2751 Add_2515" [label="[]", style=solid]; +"2751 Add_2515" -> "2752 QuantizeLinear_3036_1" [label="[]", style=solid]; +"2752 QuantizeLinear_3036_1" -> "2753 DequantizeLinear_3036_1" [label="[]", style=dashed]; +"2753 DequantizeLinear_3036_1" -> "2754 Shape_2516" [label="[]", style=solid]; +"2753 DequantizeLinear_3036_1" -> "2757 Shape_2519" [label="[]", style=solid]; +"2753 DequantizeLinear_3036_1" -> "2760 Shape_2522" [label="[]", style=solid]; +"2753 DequantizeLinear_3036_1" -> "2765 Reshape_2527" [label="[]", style=solid]; +"2754 Shape_2516" -> "2756 Gather_2518" [label="[-1]", style=dashed]; +"2755 Constant_2517" -> "2756 Gather_2518" [label="[]", style=dashed]; +"2756 Gather_2518" -> "2769 Unsqueeze_2529" [label="[]", style=dashed]; +"2757 Shape_2519" -> "2759 Gather_2521" [label="[-1]", style=dashed]; +"2758 Constant_2520" -> "2759 Gather_2521" [label="[]", style=dashed]; +"2759 Gather_2521" -> "2770 Unsqueeze_2530" [label="[]", style=dashed]; +"2760 Shape_2522" -> "2762 Gather_2524" [label="[-1]", style=dashed]; +"2761 Constant_2523" -> "2762 Gather_2524" [label="[]", style=dashed]; +"2762 Gather_2524" -> "2763 Unsqueeze_2525" [label="[]", style=dashed]; +"2763 Unsqueeze_2525" -> "2764 Concat_2526" [label="[1]", style=dashed]; +"2764 Concat_2526" -> "2765 Reshape_2527" [label="[2]", style=dashed]; +"2765 Reshape_2527" -> "2768 Gemm_2528" [label="[]", style=solid]; +"2766 QuantizeLinear_h.11.mlp.c_fc.weight_1" -> "2767 DequantizeLinear_h.11.mlp.c_fc.weight_1" [label="[768, 3072]", style=dashed]; +"2767 DequantizeLinear_h.11.mlp.c_fc.weight_1" -> "2768 Gemm_2528" [label="[768, 3072]", style=solid]; +"2768 Gemm_2528" -> "2772 Reshape_2532" [label="[]", style=solid]; +"2769 Unsqueeze_2529" -> "2771 Concat_2531" [label="[1]", style=dashed]; +"2770 Unsqueeze_2530" -> "2771 Concat_2531" [label="[1]", style=dashed]; +"2771 Concat_2531" -> "2772 Reshape_2532" [label="[3]", style=dashed]; +"2772 Reshape_2532" -> "2774 Mul_2534" [label="[]", style=solid]; +"2772 Reshape_2532" -> "2776 Pow_2536" [label="[]", style=solid]; +"2772 Reshape_2532" -> "2779 Add_2539" [label="[]", style=solid]; +"2773 Constant_2533" -> "2774 Mul_2534" [label="[]", style=solid]; +"2774 Mul_2534" -> "2785 Mul_2545" [label="[]", style=solid]; +"2775 Constant_2535" -> "2776 Pow_2536" [label="[]", style=solid]; +"2776 Pow_2536" -> "2778 Mul_2538" [label="[]", style=solid]; +"2777 Constant_2537" -> "2778 Mul_2538" [label="[]", style=solid]; +"2778 Mul_2538" -> "2779 Add_2539" [label="[]", style=solid]; +"2779 Add_2539" -> "2781 Mul_2541" [label="[]", style=solid]; +"2780 Constant_2540" -> "2781 Mul_2541" [label="[]", style=solid]; +"2781 Mul_2541" -> "2782 Tanh_2542" [label="[]", style=solid]; +"2782 Tanh_2542" -> "2784 Add_2544" [label="[]", style=solid]; +"2783 Constant_2543" -> "2784 Add_2544" [label="[]", style=solid]; +"2784 Add_2544" -> "2785 Mul_2545" [label="[]", style=solid]; +"2785 Mul_2545" -> "2786 QuantizeLinear_3070_1" [label="[]", style=solid]; +"2786 QuantizeLinear_3070_1" -> "2787 DequantizeLinear_3070_1" [label="[]", style=dashed]; +"2787 DequantizeLinear_3070_1" -> "2788 Shape_2546" [label="[]", style=solid]; +"2787 DequantizeLinear_3070_1" -> "2791 Shape_2549" [label="[]", style=solid]; +"2787 DequantizeLinear_3070_1" -> "2794 Shape_2552" [label="[]", style=solid]; +"2787 DequantizeLinear_3070_1" -> "2799 Reshape_2557" [label="[]", style=solid]; +"2788 Shape_2546" -> "2790 Gather_2548" [label="[-1]", style=dashed]; +"2789 Constant_2547" -> "2790 Gather_2548" [label="[]", style=dashed]; +"2790 Gather_2548" -> "2803 Unsqueeze_2559" [label="[]", style=dashed]; +"2791 Shape_2549" -> "2793 Gather_2551" [label="[-1]", style=dashed]; +"2792 Constant_2550" -> "2793 Gather_2551" [label="[]", style=dashed]; +"2793 Gather_2551" -> "2804 Unsqueeze_2560" [label="[]", style=dashed]; +"2794 Shape_2552" -> "2796 Gather_2554" [label="[-1]", style=dashed]; +"2795 Constant_2553" -> "2796 Gather_2554" [label="[]", style=dashed]; +"2796 Gather_2554" -> "2797 Unsqueeze_2555" [label="[]", style=dashed]; +"2797 Unsqueeze_2555" -> "2798 Concat_2556" [label="[1]", style=dashed]; +"2798 Concat_2556" -> "2799 Reshape_2557" [label="[2]", style=dashed]; +"2799 Reshape_2557" -> "2802 Gemm_2558" [label="[]", style=solid]; +"2800 QuantizeLinear_h.11.mlp.c_proj.weight_1" -> "2801 DequantizeLinear_h.11.mlp.c_proj.weight_1" [label="[3072, 768]", style=dashed]; +"2801 DequantizeLinear_h.11.mlp.c_proj.weight_1" -> "2802 Gemm_2558" [label="[3072, 768]", style=solid]; +"2802 Gemm_2558" -> "2806 Reshape_2562" [label="[]", style=solid]; +"2803 Unsqueeze_2559" -> "2805 Concat_2561" [label="[1]", style=dashed]; +"2804 Unsqueeze_2560" -> "2805 Concat_2561" [label="[1]", style=dashed]; +"2805 Concat_2561" -> "2806 Reshape_2562" [label="[3]", style=dashed]; +"2806 Reshape_2562" -> "2807 Add_2563" [label="[]", style=solid]; +"2807 Add_2563" -> "2808 ReduceMean_2564" [label="[]", style=solid]; +"2807 Add_2563" -> "2809 Sub_2565" [label="[]", style=solid]; +"2808 ReduceMean_2564" -> "2809 Sub_2565" [label="[]", style=solid]; +"2809 Sub_2565" -> "2811 Pow_2567" [label="[]", style=solid]; +"2809 Sub_2565" -> "2816 Div_2572" [label="[]", style=solid]; +"2810 Constant_2566" -> "2811 Pow_2567" [label="[]", style=solid]; +"2811 Pow_2567" -> "2812 ReduceMean_2568" [label="[]", style=solid]; +"2812 ReduceMean_2568" -> "2814 Add_2570" [label="[]", style=solid]; +"2813 Constant_2569" -> "2814 Add_2570" [label="[]", style=solid]; +"2814 Add_2570" -> "2815 Sqrt_2571" [label="[]", style=solid]; +"2815 Sqrt_2571" -> "2816 Div_2572" [label="[]", style=solid]; +"2816 Div_2572" -> "2817 Mul_2573" [label="[]", style=solid]; +"2817 Mul_2573" -> "2818 Add_2574" [label="[]", style=solid]; +"2818 Add_2574" -> "2824 Reshape_2580" [label="[]", style=solid]; +"2819 Unsqueeze_2575" -> "2823 Concat_2579" [label="[1]", style=dashed]; +"2820 Unsqueeze_2576" -> "2823 Concat_2579" [label="[1]", style=dashed]; +"2821 Unsqueeze_2577" -> "2823 Concat_2579" [label="[1]", style=dashed]; +"2822 Unsqueeze_2578" -> "2823 Concat_2579" [label="[1]", style=dashed]; +"2823 Concat_2579" -> "2824 Reshape_2580" [label="[4]", style=dashed]; +"2824 Reshape_2580" -> "2826 nncf_model_output_0" [label="[1, 1, 8, 768]", style=solid]; +"2825 nncf_model_input_0" -> "0 Shape_0" [label="[-1, -1, -1]", style=dashed]; +"2825 nncf_model_input_0" -> "3 Shape_3" [label="[-1, -1, -1]", style=dashed]; +"2825 nncf_model_input_0" -> "6 Shape_6" [label="[-1, -1, -1]", style=dashed]; +"2825 nncf_model_input_0" -> "11 Reshape_11" [label="[-1, -1, -1]", style=dashed]; +} diff --git a/tests/onnx/data/reference_graphs/quantization/resnet50_cpu_spr.dot b/tests/onnx/data/reference_graphs/quantization/resnet50_cpu_spr.dot new file mode 100644 index 00000000000..fac0d8c33e1 --- /dev/null +++ b/tests/onnx/data/reference_graphs/quantization/resnet50_cpu_spr.dot @@ -0,0 +1,713 @@ +strict digraph { +"0 QuantizeLinear_input.1_1" [id=0, type=QuantizeLinear]; +"1 DequantizeLinear_input.1_1" [id=1, type=DequantizeLinear]; +"2 QuantizeLinear_onnx^^Conv_497_1" [id=2, label="2 QuantizeLinear_onnx::Conv_497_1", type=QuantizeLinear]; +"3 DequantizeLinear_onnx^^Conv_497_1" [id=3, label="3 DequantizeLinear_onnx::Conv_497_1", type=DequantizeLinear]; +"4 /conv1/Conv" [id=4, type=Conv]; +"5 /relu/Relu" [id=5, type=Relu]; +"6 QuantizeLinear_/relu/Relu_output_0_1" [id=6, type=QuantizeLinear]; +"7 DequantizeLinear_/relu/Relu_output_0_1" [id=7, type=DequantizeLinear]; +"8 /maxpool/MaxPool" [id=8, type=MaxPool]; +"9 QuantizeLinear_onnx^^Conv_500_1" [id=9, label="9 QuantizeLinear_onnx::Conv_500_1", type=QuantizeLinear]; +"10 DequantizeLinear_onnx^^Conv_500_1" [id=10, label="10 DequantizeLinear_onnx::Conv_500_1", type=DequantizeLinear]; +"11 /layer1/layer1.0/conv1/Conv" [id=11, type=Conv]; +"12 /layer1/layer1.0/relu/Relu" [id=12, type=Relu]; +"13 QuantizeLinear_/layer1/layer1.0/relu/Relu_output_0_1" [id=13, type=QuantizeLinear]; +"14 DequantizeLinear_/layer1/layer1.0/relu/Relu_output_0_1" [id=14, type=DequantizeLinear]; +"15 QuantizeLinear_onnx^^Conv_503_1" [id=15, label="15 QuantizeLinear_onnx::Conv_503_1", type=QuantizeLinear]; +"16 DequantizeLinear_onnx^^Conv_503_1" [id=16, label="16 DequantizeLinear_onnx::Conv_503_1", type=DequantizeLinear]; +"17 /layer1/layer1.0/conv2/Conv" [id=17, type=Conv]; +"18 /layer1/layer1.0/relu_1/Relu" [id=18, type=Relu]; +"19 QuantizeLinear_/layer1/layer1.0/relu_1/Relu_output_0_1" [id=19, type=QuantizeLinear]; +"20 DequantizeLinear_/layer1/layer1.0/relu_1/Relu_output_0_1" [id=20, type=DequantizeLinear]; +"21 /layer1/layer1.0/conv3/Conv" [id=21, type=Conv]; +"22 QuantizeLinear_onnx^^Conv_509_1" [id=22, label="22 QuantizeLinear_onnx::Conv_509_1", type=QuantizeLinear]; +"23 DequantizeLinear_onnx^^Conv_509_1" [id=23, label="23 DequantizeLinear_onnx::Conv_509_1", type=DequantizeLinear]; +"24 /layer1/layer1.0/downsample/downsample.0/Conv" [id=24, type=Conv]; +"25 QuantizeLinear_/layer1/layer1.0/conv3/Conv_output_0_1" [id=25, type=QuantizeLinear]; +"26 DequantizeLinear_/layer1/layer1.0/conv3/Conv_output_0_1" [id=26, type=DequantizeLinear]; +"27 QuantizeLinear_/layer1/layer1.0/downsample/downsample.0/Conv_output_0_1" [id=27, type=QuantizeLinear]; +"28 DequantizeLinear_/layer1/layer1.0/downsample/downsample.0/Conv_output_0_1" [id=28, type=DequantizeLinear]; +"29 /layer1/layer1.0/Add" [id=29, type=Add]; +"30 /layer1/layer1.0/relu_2/Relu" [id=30, type=Relu]; +"31 QuantizeLinear_/layer1/layer1.0/relu_2/Relu_output_0_1" [id=31, type=QuantizeLinear]; +"32 DequantizeLinear_/layer1/layer1.0/relu_2/Relu_output_0_1" [id=32, type=DequantizeLinear]; +"33 QuantizeLinear_onnx^^Conv_512_1" [id=33, label="33 QuantizeLinear_onnx::Conv_512_1", type=QuantizeLinear]; +"34 DequantizeLinear_onnx^^Conv_512_1" [id=34, label="34 DequantizeLinear_onnx::Conv_512_1", type=DequantizeLinear]; +"35 /layer1/layer1.1/conv1/Conv" [id=35, type=Conv]; +"36 /layer1/layer1.1/relu/Relu" [id=36, type=Relu]; +"37 QuantizeLinear_/layer1/layer1.1/relu/Relu_output_0_1" [id=37, type=QuantizeLinear]; +"38 DequantizeLinear_/layer1/layer1.1/relu/Relu_output_0_1" [id=38, type=DequantizeLinear]; +"39 QuantizeLinear_onnx^^Conv_515_1" [id=39, label="39 QuantizeLinear_onnx::Conv_515_1", type=QuantizeLinear]; +"40 DequantizeLinear_onnx^^Conv_515_1" [id=40, label="40 DequantizeLinear_onnx::Conv_515_1", type=DequantizeLinear]; +"41 /layer1/layer1.1/conv2/Conv" [id=41, type=Conv]; +"42 /layer1/layer1.1/relu_1/Relu" [id=42, type=Relu]; +"43 QuantizeLinear_/layer1/layer1.1/relu_1/Relu_output_0_1" [id=43, type=QuantizeLinear]; +"44 DequantizeLinear_/layer1/layer1.1/relu_1/Relu_output_0_1" [id=44, type=DequantizeLinear]; +"45 /layer1/layer1.1/conv3/Conv" [id=45, type=Conv]; +"46 QuantizeLinear_/layer1/layer1.1/conv3/Conv_output_0_1" [id=46, type=QuantizeLinear]; +"47 DequantizeLinear_/layer1/layer1.1/conv3/Conv_output_0_1" [id=47, type=DequantizeLinear]; +"48 /layer1/layer1.1/Add" [id=48, type=Add]; +"49 /layer1/layer1.1/relu_2/Relu" [id=49, type=Relu]; +"50 QuantizeLinear_/layer1/layer1.1/relu_2/Relu_output_0_1" [id=50, type=QuantizeLinear]; +"51 DequantizeLinear_/layer1/layer1.1/relu_2/Relu_output_0_1" [id=51, type=DequantizeLinear]; +"52 QuantizeLinear_onnx^^Conv_521_1" [id=52, label="52 QuantizeLinear_onnx::Conv_521_1", type=QuantizeLinear]; +"53 DequantizeLinear_onnx^^Conv_521_1" [id=53, label="53 DequantizeLinear_onnx::Conv_521_1", type=DequantizeLinear]; +"54 /layer1/layer1.2/conv1/Conv" [id=54, type=Conv]; +"55 /layer1/layer1.2/relu/Relu" [id=55, type=Relu]; +"56 QuantizeLinear_/layer1/layer1.2/relu/Relu_output_0_1" [id=56, type=QuantizeLinear]; +"57 DequantizeLinear_/layer1/layer1.2/relu/Relu_output_0_1" [id=57, type=DequantizeLinear]; +"58 QuantizeLinear_onnx^^Conv_524_1" [id=58, label="58 QuantizeLinear_onnx::Conv_524_1", type=QuantizeLinear]; +"59 DequantizeLinear_onnx^^Conv_524_1" [id=59, label="59 DequantizeLinear_onnx::Conv_524_1", type=DequantizeLinear]; +"60 /layer1/layer1.2/conv2/Conv" [id=60, type=Conv]; +"61 /layer1/layer1.2/relu_1/Relu" [id=61, type=Relu]; +"62 QuantizeLinear_/layer1/layer1.2/relu_1/Relu_output_0_1" [id=62, type=QuantizeLinear]; +"63 DequantizeLinear_/layer1/layer1.2/relu_1/Relu_output_0_1" [id=63, type=DequantizeLinear]; +"64 /layer1/layer1.2/conv3/Conv" [id=64, type=Conv]; +"65 QuantizeLinear_/layer1/layer1.2/conv3/Conv_output_0_1" [id=65, type=QuantizeLinear]; +"66 DequantizeLinear_/layer1/layer1.2/conv3/Conv_output_0_1" [id=66, type=DequantizeLinear]; +"67 /layer1/layer1.2/Add" [id=67, type=Add]; +"68 /layer1/layer1.2/relu_2/Relu" [id=68, type=Relu]; +"69 QuantizeLinear_/layer1/layer1.2/relu_2/Relu_output_0_1" [id=69, type=QuantizeLinear]; +"70 DequantizeLinear_/layer1/layer1.2/relu_2/Relu_output_0_1" [id=70, type=DequantizeLinear]; +"71 QuantizeLinear_onnx^^Conv_530_1" [id=71, label="71 QuantizeLinear_onnx::Conv_530_1", type=QuantizeLinear]; +"72 DequantizeLinear_onnx^^Conv_530_1" [id=72, label="72 DequantizeLinear_onnx::Conv_530_1", type=DequantizeLinear]; +"73 /layer2/layer2.0/conv1/Conv" [id=73, type=Conv]; +"74 /layer2/layer2.0/relu/Relu" [id=74, type=Relu]; +"75 QuantizeLinear_/layer2/layer2.0/relu/Relu_output_0_1" [id=75, type=QuantizeLinear]; +"76 DequantizeLinear_/layer2/layer2.0/relu/Relu_output_0_1" [id=76, type=DequantizeLinear]; +"77 QuantizeLinear_onnx^^Conv_533_1" [id=77, label="77 QuantizeLinear_onnx::Conv_533_1", type=QuantizeLinear]; +"78 DequantizeLinear_onnx^^Conv_533_1" [id=78, label="78 DequantizeLinear_onnx::Conv_533_1", type=DequantizeLinear]; +"79 /layer2/layer2.0/conv2/Conv" [id=79, type=Conv]; +"80 /layer2/layer2.0/relu_1/Relu" [id=80, type=Relu]; +"81 QuantizeLinear_/layer2/layer2.0/relu_1/Relu_output_0_1" [id=81, type=QuantizeLinear]; +"82 DequantizeLinear_/layer2/layer2.0/relu_1/Relu_output_0_1" [id=82, type=DequantizeLinear]; +"83 QuantizeLinear_onnx^^Conv_536_1" [id=83, label="83 QuantizeLinear_onnx::Conv_536_1", type=QuantizeLinear]; +"84 DequantizeLinear_onnx^^Conv_536_1" [id=84, label="84 DequantizeLinear_onnx::Conv_536_1", type=DequantizeLinear]; +"85 /layer2/layer2.0/conv3/Conv" [id=85, type=Conv]; +"86 QuantizeLinear_onnx^^Conv_539_1" [id=86, label="86 QuantizeLinear_onnx::Conv_539_1", type=QuantizeLinear]; +"87 DequantizeLinear_onnx^^Conv_539_1" [id=87, label="87 DequantizeLinear_onnx::Conv_539_1", type=DequantizeLinear]; +"88 /layer2/layer2.0/downsample/downsample.0/Conv" [id=88, type=Conv]; +"89 QuantizeLinear_/layer2/layer2.0/conv3/Conv_output_0_1" [id=89, type=QuantizeLinear]; +"90 DequantizeLinear_/layer2/layer2.0/conv3/Conv_output_0_1" [id=90, type=DequantizeLinear]; +"91 QuantizeLinear_/layer2/layer2.0/downsample/downsample.0/Conv_output_0_1" [id=91, type=QuantizeLinear]; +"92 DequantizeLinear_/layer2/layer2.0/downsample/downsample.0/Conv_output_0_1" [id=92, type=DequantizeLinear]; +"93 /layer2/layer2.0/Add" [id=93, type=Add]; +"94 /layer2/layer2.0/relu_2/Relu" [id=94, type=Relu]; +"95 QuantizeLinear_/layer2/layer2.0/relu_2/Relu_output_0_1" [id=95, type=QuantizeLinear]; +"96 DequantizeLinear_/layer2/layer2.0/relu_2/Relu_output_0_1" [id=96, type=DequantizeLinear]; +"97 QuantizeLinear_onnx^^Conv_542_1" [id=97, label="97 QuantizeLinear_onnx::Conv_542_1", type=QuantizeLinear]; +"98 DequantizeLinear_onnx^^Conv_542_1" [id=98, label="98 DequantizeLinear_onnx::Conv_542_1", type=DequantizeLinear]; +"99 /layer2/layer2.1/conv1/Conv" [id=99, type=Conv]; +"100 /layer2/layer2.1/relu/Relu" [id=100, type=Relu]; +"101 QuantizeLinear_/layer2/layer2.1/relu/Relu_output_0_1" [id=101, type=QuantizeLinear]; +"102 DequantizeLinear_/layer2/layer2.1/relu/Relu_output_0_1" [id=102, type=DequantizeLinear]; +"103 QuantizeLinear_onnx^^Conv_545_1" [id=103, label="103 QuantizeLinear_onnx::Conv_545_1", type=QuantizeLinear]; +"104 DequantizeLinear_onnx^^Conv_545_1" [id=104, label="104 DequantizeLinear_onnx::Conv_545_1", type=DequantizeLinear]; +"105 /layer2/layer2.1/conv2/Conv" [id=105, type=Conv]; +"106 /layer2/layer2.1/relu_1/Relu" [id=106, type=Relu]; +"107 QuantizeLinear_/layer2/layer2.1/relu_1/Relu_output_0_1" [id=107, type=QuantizeLinear]; +"108 DequantizeLinear_/layer2/layer2.1/relu_1/Relu_output_0_1" [id=108, type=DequantizeLinear]; +"109 /layer2/layer2.1/conv3/Conv" [id=109, type=Conv]; +"110 QuantizeLinear_/layer2/layer2.1/conv3/Conv_output_0_1" [id=110, type=QuantizeLinear]; +"111 DequantizeLinear_/layer2/layer2.1/conv3/Conv_output_0_1" [id=111, type=DequantizeLinear]; +"112 /layer2/layer2.1/Add" [id=112, type=Add]; +"113 /layer2/layer2.1/relu_2/Relu" [id=113, type=Relu]; +"114 QuantizeLinear_/layer2/layer2.1/relu_2/Relu_output_0_1" [id=114, type=QuantizeLinear]; +"115 DequantizeLinear_/layer2/layer2.1/relu_2/Relu_output_0_1" [id=115, type=DequantizeLinear]; +"116 QuantizeLinear_onnx^^Conv_551_1" [id=116, label="116 QuantizeLinear_onnx::Conv_551_1", type=QuantizeLinear]; +"117 DequantizeLinear_onnx^^Conv_551_1" [id=117, label="117 DequantizeLinear_onnx::Conv_551_1", type=DequantizeLinear]; +"118 /layer2/layer2.2/conv1/Conv" [id=118, type=Conv]; +"119 /layer2/layer2.2/relu/Relu" [id=119, type=Relu]; +"120 QuantizeLinear_/layer2/layer2.2/relu/Relu_output_0_1" [id=120, type=QuantizeLinear]; +"121 DequantizeLinear_/layer2/layer2.2/relu/Relu_output_0_1" [id=121, type=DequantizeLinear]; +"122 QuantizeLinear_onnx^^Conv_554_1" [id=122, label="122 QuantizeLinear_onnx::Conv_554_1", type=QuantizeLinear]; +"123 DequantizeLinear_onnx^^Conv_554_1" [id=123, label="123 DequantizeLinear_onnx::Conv_554_1", type=DequantizeLinear]; +"124 /layer2/layer2.2/conv2/Conv" [id=124, type=Conv]; +"125 /layer2/layer2.2/relu_1/Relu" [id=125, type=Relu]; +"126 QuantizeLinear_/layer2/layer2.2/relu_1/Relu_output_0_1" [id=126, type=QuantizeLinear]; +"127 DequantizeLinear_/layer2/layer2.2/relu_1/Relu_output_0_1" [id=127, type=DequantizeLinear]; +"128 /layer2/layer2.2/conv3/Conv" [id=128, type=Conv]; +"129 QuantizeLinear_/layer2/layer2.2/conv3/Conv_output_0_1" [id=129, type=QuantizeLinear]; +"130 DequantizeLinear_/layer2/layer2.2/conv3/Conv_output_0_1" [id=130, type=DequantizeLinear]; +"131 /layer2/layer2.2/Add" [id=131, type=Add]; +"132 /layer2/layer2.2/relu_2/Relu" [id=132, type=Relu]; +"133 QuantizeLinear_/layer2/layer2.2/relu_2/Relu_output_0_1" [id=133, type=QuantizeLinear]; +"134 DequantizeLinear_/layer2/layer2.2/relu_2/Relu_output_0_1" [id=134, type=DequantizeLinear]; +"135 QuantizeLinear_onnx^^Conv_560_1" [id=135, label="135 QuantizeLinear_onnx::Conv_560_1", type=QuantizeLinear]; +"136 DequantizeLinear_onnx^^Conv_560_1" [id=136, label="136 DequantizeLinear_onnx::Conv_560_1", type=DequantizeLinear]; +"137 /layer2/layer2.3/conv1/Conv" [id=137, type=Conv]; +"138 /layer2/layer2.3/relu/Relu" [id=138, type=Relu]; +"139 QuantizeLinear_/layer2/layer2.3/relu/Relu_output_0_1" [id=139, type=QuantizeLinear]; +"140 DequantizeLinear_/layer2/layer2.3/relu/Relu_output_0_1" [id=140, type=DequantizeLinear]; +"141 QuantizeLinear_onnx^^Conv_563_1" [id=141, label="141 QuantizeLinear_onnx::Conv_563_1", type=QuantizeLinear]; +"142 DequantizeLinear_onnx^^Conv_563_1" [id=142, label="142 DequantizeLinear_onnx::Conv_563_1", type=DequantizeLinear]; +"143 /layer2/layer2.3/conv2/Conv" [id=143, type=Conv]; +"144 /layer2/layer2.3/relu_1/Relu" [id=144, type=Relu]; +"145 QuantizeLinear_/layer2/layer2.3/relu_1/Relu_output_0_1" [id=145, type=QuantizeLinear]; +"146 DequantizeLinear_/layer2/layer2.3/relu_1/Relu_output_0_1" [id=146, type=DequantizeLinear]; +"147 /layer2/layer2.3/conv3/Conv" [id=147, type=Conv]; +"148 QuantizeLinear_/layer2/layer2.3/conv3/Conv_output_0_1" [id=148, type=QuantizeLinear]; +"149 DequantizeLinear_/layer2/layer2.3/conv3/Conv_output_0_1" [id=149, type=DequantizeLinear]; +"150 /layer2/layer2.3/Add" [id=150, type=Add]; +"151 /layer2/layer2.3/relu_2/Relu" [id=151, type=Relu]; +"152 QuantizeLinear_/layer2/layer2.3/relu_2/Relu_output_0_1" [id=152, type=QuantizeLinear]; +"153 DequantizeLinear_/layer2/layer2.3/relu_2/Relu_output_0_1" [id=153, type=DequantizeLinear]; +"154 QuantizeLinear_onnx^^Conv_569_1" [id=154, label="154 QuantizeLinear_onnx::Conv_569_1", type=QuantizeLinear]; +"155 DequantizeLinear_onnx^^Conv_569_1" [id=155, label="155 DequantizeLinear_onnx::Conv_569_1", type=DequantizeLinear]; +"156 /layer3/layer3.0/conv1/Conv" [id=156, type=Conv]; +"157 /layer3/layer3.0/relu/Relu" [id=157, type=Relu]; +"158 QuantizeLinear_/layer3/layer3.0/relu/Relu_output_0_1" [id=158, type=QuantizeLinear]; +"159 DequantizeLinear_/layer3/layer3.0/relu/Relu_output_0_1" [id=159, type=DequantizeLinear]; +"160 QuantizeLinear_onnx^^Conv_572_1" [id=160, label="160 QuantizeLinear_onnx::Conv_572_1", type=QuantizeLinear]; +"161 DequantizeLinear_onnx^^Conv_572_1" [id=161, label="161 DequantizeLinear_onnx::Conv_572_1", type=DequantizeLinear]; +"162 /layer3/layer3.0/conv2/Conv" [id=162, type=Conv]; +"163 /layer3/layer3.0/relu_1/Relu" [id=163, type=Relu]; +"164 QuantizeLinear_/layer3/layer3.0/relu_1/Relu_output_0_1" [id=164, type=QuantizeLinear]; +"165 DequantizeLinear_/layer3/layer3.0/relu_1/Relu_output_0_1" [id=165, type=DequantizeLinear]; +"166 QuantizeLinear_onnx^^Conv_575_1" [id=166, label="166 QuantizeLinear_onnx::Conv_575_1", type=QuantizeLinear]; +"167 DequantizeLinear_onnx^^Conv_575_1" [id=167, label="167 DequantizeLinear_onnx::Conv_575_1", type=DequantizeLinear]; +"168 /layer3/layer3.0/conv3/Conv" [id=168, type=Conv]; +"169 QuantizeLinear_onnx^^Conv_578_1" [id=169, label="169 QuantizeLinear_onnx::Conv_578_1", type=QuantizeLinear]; +"170 DequantizeLinear_onnx^^Conv_578_1" [id=170, label="170 DequantizeLinear_onnx::Conv_578_1", type=DequantizeLinear]; +"171 /layer3/layer3.0/downsample/downsample.0/Conv" [id=171, type=Conv]; +"172 QuantizeLinear_/layer3/layer3.0/conv3/Conv_output_0_1" [id=172, type=QuantizeLinear]; +"173 DequantizeLinear_/layer3/layer3.0/conv3/Conv_output_0_1" [id=173, type=DequantizeLinear]; +"174 QuantizeLinear_/layer3/layer3.0/downsample/downsample.0/Conv_output_0_1" [id=174, type=QuantizeLinear]; +"175 DequantizeLinear_/layer3/layer3.0/downsample/downsample.0/Conv_output_0_1" [id=175, type=DequantizeLinear]; +"176 /layer3/layer3.0/Add" [id=176, type=Add]; +"177 /layer3/layer3.0/relu_2/Relu" [id=177, type=Relu]; +"178 QuantizeLinear_/layer3/layer3.0/relu_2/Relu_output_0_1" [id=178, type=QuantizeLinear]; +"179 DequantizeLinear_/layer3/layer3.0/relu_2/Relu_output_0_1" [id=179, type=DequantizeLinear]; +"180 QuantizeLinear_onnx^^Conv_581_1" [id=180, label="180 QuantizeLinear_onnx::Conv_581_1", type=QuantizeLinear]; +"181 DequantizeLinear_onnx^^Conv_581_1" [id=181, label="181 DequantizeLinear_onnx::Conv_581_1", type=DequantizeLinear]; +"182 /layer3/layer3.1/conv1/Conv" [id=182, type=Conv]; +"183 /layer3/layer3.1/relu/Relu" [id=183, type=Relu]; +"184 QuantizeLinear_/layer3/layer3.1/relu/Relu_output_0_1" [id=184, type=QuantizeLinear]; +"185 DequantizeLinear_/layer3/layer3.1/relu/Relu_output_0_1" [id=185, type=DequantizeLinear]; +"186 QuantizeLinear_onnx^^Conv_584_1" [id=186, label="186 QuantizeLinear_onnx::Conv_584_1", type=QuantizeLinear]; +"187 DequantizeLinear_onnx^^Conv_584_1" [id=187, label="187 DequantizeLinear_onnx::Conv_584_1", type=DequantizeLinear]; +"188 /layer3/layer3.1/conv2/Conv" [id=188, type=Conv]; +"189 /layer3/layer3.1/relu_1/Relu" [id=189, type=Relu]; +"190 QuantizeLinear_/layer3/layer3.1/relu_1/Relu_output_0_1" [id=190, type=QuantizeLinear]; +"191 DequantizeLinear_/layer3/layer3.1/relu_1/Relu_output_0_1" [id=191, type=DequantizeLinear]; +"192 /layer3/layer3.1/conv3/Conv" [id=192, type=Conv]; +"193 QuantizeLinear_/layer3/layer3.1/conv3/Conv_output_0_1" [id=193, type=QuantizeLinear]; +"194 DequantizeLinear_/layer3/layer3.1/conv3/Conv_output_0_1" [id=194, type=DequantizeLinear]; +"195 /layer3/layer3.1/Add" [id=195, type=Add]; +"196 /layer3/layer3.1/relu_2/Relu" [id=196, type=Relu]; +"197 QuantizeLinear_/layer3/layer3.1/relu_2/Relu_output_0_1" [id=197, type=QuantizeLinear]; +"198 DequantizeLinear_/layer3/layer3.1/relu_2/Relu_output_0_1" [id=198, type=DequantizeLinear]; +"199 QuantizeLinear_onnx^^Conv_590_1" [id=199, label="199 QuantizeLinear_onnx::Conv_590_1", type=QuantizeLinear]; +"200 DequantizeLinear_onnx^^Conv_590_1" [id=200, label="200 DequantizeLinear_onnx::Conv_590_1", type=DequantizeLinear]; +"201 /layer3/layer3.2/conv1/Conv" [id=201, type=Conv]; +"202 /layer3/layer3.2/relu/Relu" [id=202, type=Relu]; +"203 QuantizeLinear_/layer3/layer3.2/relu/Relu_output_0_1" [id=203, type=QuantizeLinear]; +"204 DequantizeLinear_/layer3/layer3.2/relu/Relu_output_0_1" [id=204, type=DequantizeLinear]; +"205 QuantizeLinear_onnx^^Conv_593_1" [id=205, label="205 QuantizeLinear_onnx::Conv_593_1", type=QuantizeLinear]; +"206 DequantizeLinear_onnx^^Conv_593_1" [id=206, label="206 DequantizeLinear_onnx::Conv_593_1", type=DequantizeLinear]; +"207 /layer3/layer3.2/conv2/Conv" [id=207, type=Conv]; +"208 /layer3/layer3.2/relu_1/Relu" [id=208, type=Relu]; +"209 QuantizeLinear_/layer3/layer3.2/relu_1/Relu_output_0_1" [id=209, type=QuantizeLinear]; +"210 DequantizeLinear_/layer3/layer3.2/relu_1/Relu_output_0_1" [id=210, type=DequantizeLinear]; +"211 /layer3/layer3.2/conv3/Conv" [id=211, type=Conv]; +"212 QuantizeLinear_/layer3/layer3.2/conv3/Conv_output_0_1" [id=212, type=QuantizeLinear]; +"213 DequantizeLinear_/layer3/layer3.2/conv3/Conv_output_0_1" [id=213, type=DequantizeLinear]; +"214 /layer3/layer3.2/Add" [id=214, type=Add]; +"215 /layer3/layer3.2/relu_2/Relu" [id=215, type=Relu]; +"216 QuantizeLinear_/layer3/layer3.2/relu_2/Relu_output_0_1" [id=216, type=QuantizeLinear]; +"217 DequantizeLinear_/layer3/layer3.2/relu_2/Relu_output_0_1" [id=217, type=DequantizeLinear]; +"218 QuantizeLinear_onnx^^Conv_599_1" [id=218, label="218 QuantizeLinear_onnx::Conv_599_1", type=QuantizeLinear]; +"219 DequantizeLinear_onnx^^Conv_599_1" [id=219, label="219 DequantizeLinear_onnx::Conv_599_1", type=DequantizeLinear]; +"220 /layer3/layer3.3/conv1/Conv" [id=220, type=Conv]; +"221 /layer3/layer3.3/relu/Relu" [id=221, type=Relu]; +"222 QuantizeLinear_/layer3/layer3.3/relu/Relu_output_0_1" [id=222, type=QuantizeLinear]; +"223 DequantizeLinear_/layer3/layer3.3/relu/Relu_output_0_1" [id=223, type=DequantizeLinear]; +"224 QuantizeLinear_onnx^^Conv_602_1" [id=224, label="224 QuantizeLinear_onnx::Conv_602_1", type=QuantizeLinear]; +"225 DequantizeLinear_onnx^^Conv_602_1" [id=225, label="225 DequantizeLinear_onnx::Conv_602_1", type=DequantizeLinear]; +"226 /layer3/layer3.3/conv2/Conv" [id=226, type=Conv]; +"227 /layer3/layer3.3/relu_1/Relu" [id=227, type=Relu]; +"228 QuantizeLinear_/layer3/layer3.3/relu_1/Relu_output_0_1" [id=228, type=QuantizeLinear]; +"229 DequantizeLinear_/layer3/layer3.3/relu_1/Relu_output_0_1" [id=229, type=DequantizeLinear]; +"230 /layer3/layer3.3/conv3/Conv" [id=230, type=Conv]; +"231 QuantizeLinear_/layer3/layer3.3/conv3/Conv_output_0_1" [id=231, type=QuantizeLinear]; +"232 DequantizeLinear_/layer3/layer3.3/conv3/Conv_output_0_1" [id=232, type=DequantizeLinear]; +"233 /layer3/layer3.3/Add" [id=233, type=Add]; +"234 /layer3/layer3.3/relu_2/Relu" [id=234, type=Relu]; +"235 QuantizeLinear_/layer3/layer3.3/relu_2/Relu_output_0_1" [id=235, type=QuantizeLinear]; +"236 DequantizeLinear_/layer3/layer3.3/relu_2/Relu_output_0_1" [id=236, type=DequantizeLinear]; +"237 QuantizeLinear_onnx^^Conv_608_1" [id=237, label="237 QuantizeLinear_onnx::Conv_608_1", type=QuantizeLinear]; +"238 DequantizeLinear_onnx^^Conv_608_1" [id=238, label="238 DequantizeLinear_onnx::Conv_608_1", type=DequantizeLinear]; +"239 /layer3/layer3.4/conv1/Conv" [id=239, type=Conv]; +"240 /layer3/layer3.4/relu/Relu" [id=240, type=Relu]; +"241 QuantizeLinear_/layer3/layer3.4/relu/Relu_output_0_1" [id=241, type=QuantizeLinear]; +"242 DequantizeLinear_/layer3/layer3.4/relu/Relu_output_0_1" [id=242, type=DequantizeLinear]; +"243 QuantizeLinear_onnx^^Conv_611_1" [id=243, label="243 QuantizeLinear_onnx::Conv_611_1", type=QuantizeLinear]; +"244 DequantizeLinear_onnx^^Conv_611_1" [id=244, label="244 DequantizeLinear_onnx::Conv_611_1", type=DequantizeLinear]; +"245 /layer3/layer3.4/conv2/Conv" [id=245, type=Conv]; +"246 /layer3/layer3.4/relu_1/Relu" [id=246, type=Relu]; +"247 QuantizeLinear_/layer3/layer3.4/relu_1/Relu_output_0_1" [id=247, type=QuantizeLinear]; +"248 DequantizeLinear_/layer3/layer3.4/relu_1/Relu_output_0_1" [id=248, type=DequantizeLinear]; +"249 /layer3/layer3.4/conv3/Conv" [id=249, type=Conv]; +"250 QuantizeLinear_/layer3/layer3.4/conv3/Conv_output_0_1" [id=250, type=QuantizeLinear]; +"251 DequantizeLinear_/layer3/layer3.4/conv3/Conv_output_0_1" [id=251, type=DequantizeLinear]; +"252 /layer3/layer3.4/Add" [id=252, type=Add]; +"253 /layer3/layer3.4/relu_2/Relu" [id=253, type=Relu]; +"254 QuantizeLinear_/layer3/layer3.4/relu_2/Relu_output_0_1" [id=254, type=QuantizeLinear]; +"255 DequantizeLinear_/layer3/layer3.4/relu_2/Relu_output_0_1" [id=255, type=DequantizeLinear]; +"256 QuantizeLinear_onnx^^Conv_617_1" [id=256, label="256 QuantizeLinear_onnx::Conv_617_1", type=QuantizeLinear]; +"257 DequantizeLinear_onnx^^Conv_617_1" [id=257, label="257 DequantizeLinear_onnx::Conv_617_1", type=DequantizeLinear]; +"258 /layer3/layer3.5/conv1/Conv" [id=258, type=Conv]; +"259 /layer3/layer3.5/relu/Relu" [id=259, type=Relu]; +"260 QuantizeLinear_/layer3/layer3.5/relu/Relu_output_0_1" [id=260, type=QuantizeLinear]; +"261 DequantizeLinear_/layer3/layer3.5/relu/Relu_output_0_1" [id=261, type=DequantizeLinear]; +"262 QuantizeLinear_onnx^^Conv_620_1" [id=262, label="262 QuantizeLinear_onnx::Conv_620_1", type=QuantizeLinear]; +"263 DequantizeLinear_onnx^^Conv_620_1" [id=263, label="263 DequantizeLinear_onnx::Conv_620_1", type=DequantizeLinear]; +"264 /layer3/layer3.5/conv2/Conv" [id=264, type=Conv]; +"265 /layer3/layer3.5/relu_1/Relu" [id=265, type=Relu]; +"266 QuantizeLinear_/layer3/layer3.5/relu_1/Relu_output_0_1" [id=266, type=QuantizeLinear]; +"267 DequantizeLinear_/layer3/layer3.5/relu_1/Relu_output_0_1" [id=267, type=DequantizeLinear]; +"268 /layer3/layer3.5/conv3/Conv" [id=268, type=Conv]; +"269 QuantizeLinear_/layer3/layer3.5/conv3/Conv_output_0_1" [id=269, type=QuantizeLinear]; +"270 DequantizeLinear_/layer3/layer3.5/conv3/Conv_output_0_1" [id=270, type=DequantizeLinear]; +"271 /layer3/layer3.5/Add" [id=271, type=Add]; +"272 /layer3/layer3.5/relu_2/Relu" [id=272, type=Relu]; +"273 QuantizeLinear_/layer3/layer3.5/relu_2/Relu_output_0_1" [id=273, type=QuantizeLinear]; +"274 DequantizeLinear_/layer3/layer3.5/relu_2/Relu_output_0_1" [id=274, type=DequantizeLinear]; +"275 QuantizeLinear_onnx^^Conv_626_1" [id=275, label="275 QuantizeLinear_onnx::Conv_626_1", type=QuantizeLinear]; +"276 DequantizeLinear_onnx^^Conv_626_1" [id=276, label="276 DequantizeLinear_onnx::Conv_626_1", type=DequantizeLinear]; +"277 /layer4/layer4.0/conv1/Conv" [id=277, type=Conv]; +"278 /layer4/layer4.0/relu/Relu" [id=278, type=Relu]; +"279 QuantizeLinear_/layer4/layer4.0/relu/Relu_output_0_1" [id=279, type=QuantizeLinear]; +"280 DequantizeLinear_/layer4/layer4.0/relu/Relu_output_0_1" [id=280, type=DequantizeLinear]; +"281 QuantizeLinear_onnx^^Conv_629_1" [id=281, label="281 QuantizeLinear_onnx::Conv_629_1", type=QuantizeLinear]; +"282 DequantizeLinear_onnx^^Conv_629_1" [id=282, label="282 DequantizeLinear_onnx::Conv_629_1", type=DequantizeLinear]; +"283 /layer4/layer4.0/conv2/Conv" [id=283, type=Conv]; +"284 /layer4/layer4.0/relu_1/Relu" [id=284, type=Relu]; +"285 QuantizeLinear_/layer4/layer4.0/relu_1/Relu_output_0_1" [id=285, type=QuantizeLinear]; +"286 DequantizeLinear_/layer4/layer4.0/relu_1/Relu_output_0_1" [id=286, type=DequantizeLinear]; +"287 QuantizeLinear_onnx^^Conv_632_1" [id=287, label="287 QuantizeLinear_onnx::Conv_632_1", type=QuantizeLinear]; +"288 DequantizeLinear_onnx^^Conv_632_1" [id=288, label="288 DequantizeLinear_onnx::Conv_632_1", type=DequantizeLinear]; +"289 /layer4/layer4.0/conv3/Conv" [id=289, type=Conv]; +"290 QuantizeLinear_onnx^^Conv_635_1" [id=290, label="290 QuantizeLinear_onnx::Conv_635_1", type=QuantizeLinear]; +"291 DequantizeLinear_onnx^^Conv_635_1" [id=291, label="291 DequantizeLinear_onnx::Conv_635_1", type=DequantizeLinear]; +"292 /layer4/layer4.0/downsample/downsample.0/Conv" [id=292, type=Conv]; +"293 QuantizeLinear_/layer4/layer4.0/conv3/Conv_output_0_1" [id=293, type=QuantizeLinear]; +"294 DequantizeLinear_/layer4/layer4.0/conv3/Conv_output_0_1" [id=294, type=DequantizeLinear]; +"295 QuantizeLinear_/layer4/layer4.0/downsample/downsample.0/Conv_output_0_1" [id=295, type=QuantizeLinear]; +"296 DequantizeLinear_/layer4/layer4.0/downsample/downsample.0/Conv_output_0_1" [id=296, type=DequantizeLinear]; +"297 /layer4/layer4.0/Add" [id=297, type=Add]; +"298 /layer4/layer4.0/relu_2/Relu" [id=298, type=Relu]; +"299 QuantizeLinear_/layer4/layer4.0/relu_2/Relu_output_0_1" [id=299, type=QuantizeLinear]; +"300 DequantizeLinear_/layer4/layer4.0/relu_2/Relu_output_0_1" [id=300, type=DequantizeLinear]; +"301 QuantizeLinear_onnx^^Conv_638_1" [id=301, label="301 QuantizeLinear_onnx::Conv_638_1", type=QuantizeLinear]; +"302 DequantizeLinear_onnx^^Conv_638_1" [id=302, label="302 DequantizeLinear_onnx::Conv_638_1", type=DequantizeLinear]; +"303 /layer4/layer4.1/conv1/Conv" [id=303, type=Conv]; +"304 /layer4/layer4.1/relu/Relu" [id=304, type=Relu]; +"305 QuantizeLinear_/layer4/layer4.1/relu/Relu_output_0_1" [id=305, type=QuantizeLinear]; +"306 DequantizeLinear_/layer4/layer4.1/relu/Relu_output_0_1" [id=306, type=DequantizeLinear]; +"307 QuantizeLinear_onnx^^Conv_641_1" [id=307, label="307 QuantizeLinear_onnx::Conv_641_1", type=QuantizeLinear]; +"308 DequantizeLinear_onnx^^Conv_641_1" [id=308, label="308 DequantizeLinear_onnx::Conv_641_1", type=DequantizeLinear]; +"309 /layer4/layer4.1/conv2/Conv" [id=309, type=Conv]; +"310 /layer4/layer4.1/relu_1/Relu" [id=310, type=Relu]; +"311 QuantizeLinear_/layer4/layer4.1/relu_1/Relu_output_0_1" [id=311, type=QuantizeLinear]; +"312 DequantizeLinear_/layer4/layer4.1/relu_1/Relu_output_0_1" [id=312, type=DequantizeLinear]; +"313 /layer4/layer4.1/conv3/Conv" [id=313, type=Conv]; +"314 QuantizeLinear_/layer4/layer4.1/conv3/Conv_output_0_1" [id=314, type=QuantizeLinear]; +"315 DequantizeLinear_/layer4/layer4.1/conv3/Conv_output_0_1" [id=315, type=DequantizeLinear]; +"316 /layer4/layer4.1/Add" [id=316, type=Add]; +"317 /layer4/layer4.1/relu_2/Relu" [id=317, type=Relu]; +"318 QuantizeLinear_/layer4/layer4.1/relu_2/Relu_output_0_1" [id=318, type=QuantizeLinear]; +"319 DequantizeLinear_/layer4/layer4.1/relu_2/Relu_output_0_1" [id=319, type=DequantizeLinear]; +"320 QuantizeLinear_onnx^^Conv_647_1" [id=320, label="320 QuantizeLinear_onnx::Conv_647_1", type=QuantizeLinear]; +"321 DequantizeLinear_onnx^^Conv_647_1" [id=321, label="321 DequantizeLinear_onnx::Conv_647_1", type=DequantizeLinear]; +"322 /layer4/layer4.2/conv1/Conv" [id=322, type=Conv]; +"323 /layer4/layer4.2/relu/Relu" [id=323, type=Relu]; +"324 QuantizeLinear_/layer4/layer4.2/relu/Relu_output_0_1" [id=324, type=QuantizeLinear]; +"325 DequantizeLinear_/layer4/layer4.2/relu/Relu_output_0_1" [id=325, type=DequantizeLinear]; +"326 QuantizeLinear_onnx^^Conv_650_1" [id=326, label="326 QuantizeLinear_onnx::Conv_650_1", type=QuantizeLinear]; +"327 DequantizeLinear_onnx^^Conv_650_1" [id=327, label="327 DequantizeLinear_onnx::Conv_650_1", type=DequantizeLinear]; +"328 /layer4/layer4.2/conv2/Conv" [id=328, type=Conv]; +"329 /layer4/layer4.2/relu_1/Relu" [id=329, type=Relu]; +"330 QuantizeLinear_/layer4/layer4.2/relu_1/Relu_output_0_1" [id=330, type=QuantizeLinear]; +"331 DequantizeLinear_/layer4/layer4.2/relu_1/Relu_output_0_1" [id=331, type=DequantizeLinear]; +"332 /layer4/layer4.2/conv3/Conv" [id=332, type=Conv]; +"333 QuantizeLinear_/layer4/layer4.2/conv3/Conv_output_0_1" [id=333, type=QuantizeLinear]; +"334 DequantizeLinear_/layer4/layer4.2/conv3/Conv_output_0_1" [id=334, type=DequantizeLinear]; +"335 /layer4/layer4.2/Add" [id=335, type=Add]; +"336 /layer4/layer4.2/relu_2/Relu" [id=336, type=Relu]; +"337 QuantizeLinear_/layer4/layer4.2/relu_2/Relu_output_0_1" [id=337, type=QuantizeLinear]; +"338 DequantizeLinear_/layer4/layer4.2/relu_2/Relu_output_0_1" [id=338, type=DequantizeLinear]; +"339 /avgpool/GlobalAveragePool" [id=339, type=GlobalAveragePool]; +"340 QuantizeLinear_/avgpool/GlobalAveragePool_output_0_1" [id=340, type=QuantizeLinear]; +"341 DequantizeLinear_/avgpool/GlobalAveragePool_output_0_1" [id=341, type=DequantizeLinear]; +"342 /Flatten" [id=342, type=Flatten]; +"343 QuantizeLinear_fc.weight_1" [id=343, type=QuantizeLinear]; +"344 DequantizeLinear_fc.weight_1" [id=344, type=DequantizeLinear]; +"345 /fc/Gemm" [id=345, type=Gemm]; +"346 nncf_model_input_0" [id=346, type=nncf_model_input]; +"347 nncf_model_output_0" [id=347, type=nncf_model_output]; +"0 QuantizeLinear_input.1_1" -> "1 DequantizeLinear_input.1_1" [label="[1, 3, 224, 224]", style=dashed]; +"1 DequantizeLinear_input.1_1" -> "4 /conv1/Conv" [label="[1, 3, 224, 224]", style=solid]; +"2 QuantizeLinear_onnx^^Conv_497_1" -> "3 DequantizeLinear_onnx^^Conv_497_1" [label="[64, 3, 7, 7]", style=dashed]; +"3 DequantizeLinear_onnx^^Conv_497_1" -> "4 /conv1/Conv" [label="[64, 3, 7, 7]", style=solid]; +"4 /conv1/Conv" -> "5 /relu/Relu" [label="[1, 64, 112, 112]", style=solid]; +"5 /relu/Relu" -> "6 QuantizeLinear_/relu/Relu_output_0_1" [label="[1, 64, 112, 112]", style=solid]; +"6 QuantizeLinear_/relu/Relu_output_0_1" -> "7 DequantizeLinear_/relu/Relu_output_0_1" [label="[1, 64, 112, 112]", style=dashed]; +"7 DequantizeLinear_/relu/Relu_output_0_1" -> "8 /maxpool/MaxPool" [label="[1, 64, 112, 112]", style=solid]; +"8 /maxpool/MaxPool" -> "11 /layer1/layer1.0/conv1/Conv" [label="[1, 64, 56, 56]", style=solid]; +"8 /maxpool/MaxPool" -> "24 /layer1/layer1.0/downsample/downsample.0/Conv" [label="[1, 64, 56, 56]", style=solid]; +"9 QuantizeLinear_onnx^^Conv_500_1" -> "10 DequantizeLinear_onnx^^Conv_500_1" [label="[64, 64, 1, 1]", style=dashed]; +"10 DequantizeLinear_onnx^^Conv_500_1" -> "11 /layer1/layer1.0/conv1/Conv" [label="[64, 64, 1, 1]", style=solid]; +"11 /layer1/layer1.0/conv1/Conv" -> "12 /layer1/layer1.0/relu/Relu" [label="[1, 64, 56, 56]", style=solid]; +"12 /layer1/layer1.0/relu/Relu" -> "13 QuantizeLinear_/layer1/layer1.0/relu/Relu_output_0_1" [label="[1, 64, 56, 56]", style=solid]; +"13 QuantizeLinear_/layer1/layer1.0/relu/Relu_output_0_1" -> "14 DequantizeLinear_/layer1/layer1.0/relu/Relu_output_0_1" [label="[1, 64, 56, 56]", style=dashed]; +"14 DequantizeLinear_/layer1/layer1.0/relu/Relu_output_0_1" -> "17 /layer1/layer1.0/conv2/Conv" [label="[1, 64, 56, 56]", style=solid]; +"15 QuantizeLinear_onnx^^Conv_503_1" -> "16 DequantizeLinear_onnx^^Conv_503_1" [label="[64, 64, 3, 3]", style=dashed]; +"16 DequantizeLinear_onnx^^Conv_503_1" -> "17 /layer1/layer1.0/conv2/Conv" [label="[64, 64, 3, 3]", style=solid]; +"17 /layer1/layer1.0/conv2/Conv" -> "18 /layer1/layer1.0/relu_1/Relu" [label="[1, 64, 56, 56]", style=solid]; +"18 /layer1/layer1.0/relu_1/Relu" -> "19 QuantizeLinear_/layer1/layer1.0/relu_1/Relu_output_0_1" [label="[1, 64, 56, 56]", style=solid]; +"19 QuantizeLinear_/layer1/layer1.0/relu_1/Relu_output_0_1" -> "20 DequantizeLinear_/layer1/layer1.0/relu_1/Relu_output_0_1" [label="[1, 64, 56, 56]", style=dashed]; +"20 DequantizeLinear_/layer1/layer1.0/relu_1/Relu_output_0_1" -> "21 /layer1/layer1.0/conv3/Conv" [label="[1, 64, 56, 56]", style=solid]; +"21 /layer1/layer1.0/conv3/Conv" -> "25 QuantizeLinear_/layer1/layer1.0/conv3/Conv_output_0_1" [label="[1, 256, 56, 56]", style=solid]; +"22 QuantizeLinear_onnx^^Conv_509_1" -> "23 DequantizeLinear_onnx^^Conv_509_1" [label="[256, 64, 1, 1]", style=dashed]; +"23 DequantizeLinear_onnx^^Conv_509_1" -> "24 /layer1/layer1.0/downsample/downsample.0/Conv" [label="[256, 64, 1, 1]", style=solid]; +"24 /layer1/layer1.0/downsample/downsample.0/Conv" -> "27 QuantizeLinear_/layer1/layer1.0/downsample/downsample.0/Conv_output_0_1" [label="[1, 256, 56, 56]", style=solid]; +"25 QuantizeLinear_/layer1/layer1.0/conv3/Conv_output_0_1" -> "26 DequantizeLinear_/layer1/layer1.0/conv3/Conv_output_0_1" [label="[1, 256, 56, 56]", style=dashed]; +"26 DequantizeLinear_/layer1/layer1.0/conv3/Conv_output_0_1" -> "29 /layer1/layer1.0/Add" [label="[1, 256, 56, 56]", style=solid]; +"27 QuantizeLinear_/layer1/layer1.0/downsample/downsample.0/Conv_output_0_1" -> "28 DequantizeLinear_/layer1/layer1.0/downsample/downsample.0/Conv_output_0_1" [label="[1, 256, 56, 56]", style=dashed]; +"28 DequantizeLinear_/layer1/layer1.0/downsample/downsample.0/Conv_output_0_1" -> "29 /layer1/layer1.0/Add" [label="[1, 256, 56, 56]", style=solid]; +"29 /layer1/layer1.0/Add" -> "30 /layer1/layer1.0/relu_2/Relu" [label="[1, 256, 56, 56]", style=solid]; +"30 /layer1/layer1.0/relu_2/Relu" -> "31 QuantizeLinear_/layer1/layer1.0/relu_2/Relu_output_0_1" [label="[1, 256, 56, 56]", style=solid]; +"31 QuantizeLinear_/layer1/layer1.0/relu_2/Relu_output_0_1" -> "32 DequantizeLinear_/layer1/layer1.0/relu_2/Relu_output_0_1" [label="[1, 256, 56, 56]", style=dashed]; +"32 DequantizeLinear_/layer1/layer1.0/relu_2/Relu_output_0_1" -> "35 /layer1/layer1.1/conv1/Conv" [label="[1, 256, 56, 56]", style=solid]; +"32 DequantizeLinear_/layer1/layer1.0/relu_2/Relu_output_0_1" -> "48 /layer1/layer1.1/Add" [label="[1, 256, 56, 56]", style=solid]; +"33 QuantizeLinear_onnx^^Conv_512_1" -> "34 DequantizeLinear_onnx^^Conv_512_1" [label="[64, 256, 1, 1]", style=dashed]; +"34 DequantizeLinear_onnx^^Conv_512_1" -> "35 /layer1/layer1.1/conv1/Conv" [label="[64, 256, 1, 1]", style=solid]; +"35 /layer1/layer1.1/conv1/Conv" -> "36 /layer1/layer1.1/relu/Relu" [label="[1, 64, 56, 56]", style=solid]; +"36 /layer1/layer1.1/relu/Relu" -> "37 QuantizeLinear_/layer1/layer1.1/relu/Relu_output_0_1" [label="[1, 64, 56, 56]", style=solid]; +"37 QuantizeLinear_/layer1/layer1.1/relu/Relu_output_0_1" -> "38 DequantizeLinear_/layer1/layer1.1/relu/Relu_output_0_1" [label="[1, 64, 56, 56]", style=dashed]; +"38 DequantizeLinear_/layer1/layer1.1/relu/Relu_output_0_1" -> "41 /layer1/layer1.1/conv2/Conv" [label="[1, 64, 56, 56]", style=solid]; +"39 QuantizeLinear_onnx^^Conv_515_1" -> "40 DequantizeLinear_onnx^^Conv_515_1" [label="[64, 64, 3, 3]", style=dashed]; +"40 DequantizeLinear_onnx^^Conv_515_1" -> "41 /layer1/layer1.1/conv2/Conv" [label="[64, 64, 3, 3]", style=solid]; +"41 /layer1/layer1.1/conv2/Conv" -> "42 /layer1/layer1.1/relu_1/Relu" [label="[1, 64, 56, 56]", style=solid]; +"42 /layer1/layer1.1/relu_1/Relu" -> "43 QuantizeLinear_/layer1/layer1.1/relu_1/Relu_output_0_1" [label="[1, 64, 56, 56]", style=solid]; +"43 QuantizeLinear_/layer1/layer1.1/relu_1/Relu_output_0_1" -> "44 DequantizeLinear_/layer1/layer1.1/relu_1/Relu_output_0_1" [label="[1, 64, 56, 56]", style=dashed]; +"44 DequantizeLinear_/layer1/layer1.1/relu_1/Relu_output_0_1" -> "45 /layer1/layer1.1/conv3/Conv" [label="[1, 64, 56, 56]", style=solid]; +"45 /layer1/layer1.1/conv3/Conv" -> "46 QuantizeLinear_/layer1/layer1.1/conv3/Conv_output_0_1" [label="[1, 256, 56, 56]", style=solid]; +"46 QuantizeLinear_/layer1/layer1.1/conv3/Conv_output_0_1" -> "47 DequantizeLinear_/layer1/layer1.1/conv3/Conv_output_0_1" [label="[1, 256, 56, 56]", style=dashed]; +"47 DequantizeLinear_/layer1/layer1.1/conv3/Conv_output_0_1" -> "48 /layer1/layer1.1/Add" [label="[1, 256, 56, 56]", style=solid]; +"48 /layer1/layer1.1/Add" -> "49 /layer1/layer1.1/relu_2/Relu" [label="[1, 256, 56, 56]", style=solid]; +"49 /layer1/layer1.1/relu_2/Relu" -> "50 QuantizeLinear_/layer1/layer1.1/relu_2/Relu_output_0_1" [label="[1, 256, 56, 56]", style=solid]; +"50 QuantizeLinear_/layer1/layer1.1/relu_2/Relu_output_0_1" -> "51 DequantizeLinear_/layer1/layer1.1/relu_2/Relu_output_0_1" [label="[1, 256, 56, 56]", style=dashed]; +"51 DequantizeLinear_/layer1/layer1.1/relu_2/Relu_output_0_1" -> "54 /layer1/layer1.2/conv1/Conv" [label="[1, 256, 56, 56]", style=solid]; +"51 DequantizeLinear_/layer1/layer1.1/relu_2/Relu_output_0_1" -> "67 /layer1/layer1.2/Add" [label="[1, 256, 56, 56]", style=solid]; +"52 QuantizeLinear_onnx^^Conv_521_1" -> "53 DequantizeLinear_onnx^^Conv_521_1" [label="[64, 256, 1, 1]", style=dashed]; +"53 DequantizeLinear_onnx^^Conv_521_1" -> "54 /layer1/layer1.2/conv1/Conv" [label="[64, 256, 1, 1]", style=solid]; +"54 /layer1/layer1.2/conv1/Conv" -> "55 /layer1/layer1.2/relu/Relu" [label="[1, 64, 56, 56]", style=solid]; +"55 /layer1/layer1.2/relu/Relu" -> "56 QuantizeLinear_/layer1/layer1.2/relu/Relu_output_0_1" [label="[1, 64, 56, 56]", style=solid]; +"56 QuantizeLinear_/layer1/layer1.2/relu/Relu_output_0_1" -> "57 DequantizeLinear_/layer1/layer1.2/relu/Relu_output_0_1" [label="[1, 64, 56, 56]", style=dashed]; +"57 DequantizeLinear_/layer1/layer1.2/relu/Relu_output_0_1" -> "60 /layer1/layer1.2/conv2/Conv" [label="[1, 64, 56, 56]", style=solid]; +"58 QuantizeLinear_onnx^^Conv_524_1" -> "59 DequantizeLinear_onnx^^Conv_524_1" [label="[64, 64, 3, 3]", style=dashed]; +"59 DequantizeLinear_onnx^^Conv_524_1" -> "60 /layer1/layer1.2/conv2/Conv" [label="[64, 64, 3, 3]", style=solid]; +"60 /layer1/layer1.2/conv2/Conv" -> "61 /layer1/layer1.2/relu_1/Relu" [label="[1, 64, 56, 56]", style=solid]; +"61 /layer1/layer1.2/relu_1/Relu" -> "62 QuantizeLinear_/layer1/layer1.2/relu_1/Relu_output_0_1" [label="[1, 64, 56, 56]", style=solid]; +"62 QuantizeLinear_/layer1/layer1.2/relu_1/Relu_output_0_1" -> "63 DequantizeLinear_/layer1/layer1.2/relu_1/Relu_output_0_1" [label="[1, 64, 56, 56]", style=dashed]; +"63 DequantizeLinear_/layer1/layer1.2/relu_1/Relu_output_0_1" -> "64 /layer1/layer1.2/conv3/Conv" [label="[1, 64, 56, 56]", style=solid]; +"64 /layer1/layer1.2/conv3/Conv" -> "65 QuantizeLinear_/layer1/layer1.2/conv3/Conv_output_0_1" [label="[1, 256, 56, 56]", style=solid]; +"65 QuantizeLinear_/layer1/layer1.2/conv3/Conv_output_0_1" -> "66 DequantizeLinear_/layer1/layer1.2/conv3/Conv_output_0_1" [label="[1, 256, 56, 56]", style=dashed]; +"66 DequantizeLinear_/layer1/layer1.2/conv3/Conv_output_0_1" -> "67 /layer1/layer1.2/Add" [label="[1, 256, 56, 56]", style=solid]; +"67 /layer1/layer1.2/Add" -> "68 /layer1/layer1.2/relu_2/Relu" [label="[1, 256, 56, 56]", style=solid]; +"68 /layer1/layer1.2/relu_2/Relu" -> "69 QuantizeLinear_/layer1/layer1.2/relu_2/Relu_output_0_1" [label="[1, 256, 56, 56]", style=solid]; +"69 QuantizeLinear_/layer1/layer1.2/relu_2/Relu_output_0_1" -> "70 DequantizeLinear_/layer1/layer1.2/relu_2/Relu_output_0_1" [label="[1, 256, 56, 56]", style=dashed]; +"70 DequantizeLinear_/layer1/layer1.2/relu_2/Relu_output_0_1" -> "73 /layer2/layer2.0/conv1/Conv" [label="[1, 256, 56, 56]", style=solid]; +"70 DequantizeLinear_/layer1/layer1.2/relu_2/Relu_output_0_1" -> "88 /layer2/layer2.0/downsample/downsample.0/Conv" [label="[1, 256, 56, 56]", style=solid]; +"71 QuantizeLinear_onnx^^Conv_530_1" -> "72 DequantizeLinear_onnx^^Conv_530_1" [label="[128, 256, 1, 1]", style=dashed]; +"72 DequantizeLinear_onnx^^Conv_530_1" -> "73 /layer2/layer2.0/conv1/Conv" [label="[128, 256, 1, 1]", style=solid]; +"73 /layer2/layer2.0/conv1/Conv" -> "74 /layer2/layer2.0/relu/Relu" [label="[1, 128, 56, 56]", style=solid]; +"74 /layer2/layer2.0/relu/Relu" -> "75 QuantizeLinear_/layer2/layer2.0/relu/Relu_output_0_1" [label="[1, 128, 56, 56]", style=solid]; +"75 QuantizeLinear_/layer2/layer2.0/relu/Relu_output_0_1" -> "76 DequantizeLinear_/layer2/layer2.0/relu/Relu_output_0_1" [label="[1, 128, 56, 56]", style=dashed]; +"76 DequantizeLinear_/layer2/layer2.0/relu/Relu_output_0_1" -> "79 /layer2/layer2.0/conv2/Conv" [label="[1, 128, 56, 56]", style=solid]; +"77 QuantizeLinear_onnx^^Conv_533_1" -> "78 DequantizeLinear_onnx^^Conv_533_1" [label="[128, 128, 3, 3]", style=dashed]; +"78 DequantizeLinear_onnx^^Conv_533_1" -> "79 /layer2/layer2.0/conv2/Conv" [label="[128, 128, 3, 3]", style=solid]; +"79 /layer2/layer2.0/conv2/Conv" -> "80 /layer2/layer2.0/relu_1/Relu" [label="[1, 128, 28, 28]", style=solid]; +"80 /layer2/layer2.0/relu_1/Relu" -> "81 QuantizeLinear_/layer2/layer2.0/relu_1/Relu_output_0_1" [label="[1, 128, 28, 28]", style=solid]; +"81 QuantizeLinear_/layer2/layer2.0/relu_1/Relu_output_0_1" -> "82 DequantizeLinear_/layer2/layer2.0/relu_1/Relu_output_0_1" [label="[1, 128, 28, 28]", style=dashed]; +"82 DequantizeLinear_/layer2/layer2.0/relu_1/Relu_output_0_1" -> "85 /layer2/layer2.0/conv3/Conv" [label="[1, 128, 28, 28]", style=solid]; +"83 QuantizeLinear_onnx^^Conv_536_1" -> "84 DequantizeLinear_onnx^^Conv_536_1" [label="[512, 128, 1, 1]", style=dashed]; +"84 DequantizeLinear_onnx^^Conv_536_1" -> "85 /layer2/layer2.0/conv3/Conv" [label="[512, 128, 1, 1]", style=solid]; +"85 /layer2/layer2.0/conv3/Conv" -> "89 QuantizeLinear_/layer2/layer2.0/conv3/Conv_output_0_1" [label="[1, 512, 28, 28]", style=solid]; +"86 QuantizeLinear_onnx^^Conv_539_1" -> "87 DequantizeLinear_onnx^^Conv_539_1" [label="[512, 256, 1, 1]", style=dashed]; +"87 DequantizeLinear_onnx^^Conv_539_1" -> "88 /layer2/layer2.0/downsample/downsample.0/Conv" [label="[512, 256, 1, 1]", style=solid]; +"88 /layer2/layer2.0/downsample/downsample.0/Conv" -> "91 QuantizeLinear_/layer2/layer2.0/downsample/downsample.0/Conv_output_0_1" [label="[1, 512, 28, 28]", style=solid]; +"89 QuantizeLinear_/layer2/layer2.0/conv3/Conv_output_0_1" -> "90 DequantizeLinear_/layer2/layer2.0/conv3/Conv_output_0_1" [label="[1, 512, 28, 28]", style=dashed]; +"90 DequantizeLinear_/layer2/layer2.0/conv3/Conv_output_0_1" -> "93 /layer2/layer2.0/Add" [label="[1, 512, 28, 28]", style=solid]; +"91 QuantizeLinear_/layer2/layer2.0/downsample/downsample.0/Conv_output_0_1" -> "92 DequantizeLinear_/layer2/layer2.0/downsample/downsample.0/Conv_output_0_1" [label="[1, 512, 28, 28]", style=dashed]; +"92 DequantizeLinear_/layer2/layer2.0/downsample/downsample.0/Conv_output_0_1" -> "93 /layer2/layer2.0/Add" [label="[1, 512, 28, 28]", style=solid]; +"93 /layer2/layer2.0/Add" -> "94 /layer2/layer2.0/relu_2/Relu" [label="[1, 512, 28, 28]", style=solid]; +"94 /layer2/layer2.0/relu_2/Relu" -> "95 QuantizeLinear_/layer2/layer2.0/relu_2/Relu_output_0_1" [label="[1, 512, 28, 28]", style=solid]; +"95 QuantizeLinear_/layer2/layer2.0/relu_2/Relu_output_0_1" -> "96 DequantizeLinear_/layer2/layer2.0/relu_2/Relu_output_0_1" [label="[1, 512, 28, 28]", style=dashed]; +"96 DequantizeLinear_/layer2/layer2.0/relu_2/Relu_output_0_1" -> "99 /layer2/layer2.1/conv1/Conv" [label="[1, 512, 28, 28]", style=solid]; +"96 DequantizeLinear_/layer2/layer2.0/relu_2/Relu_output_0_1" -> "112 /layer2/layer2.1/Add" [label="[1, 512, 28, 28]", style=solid]; +"97 QuantizeLinear_onnx^^Conv_542_1" -> "98 DequantizeLinear_onnx^^Conv_542_1" [label="[128, 512, 1, 1]", style=dashed]; +"98 DequantizeLinear_onnx^^Conv_542_1" -> "99 /layer2/layer2.1/conv1/Conv" [label="[128, 512, 1, 1]", style=solid]; +"99 /layer2/layer2.1/conv1/Conv" -> "100 /layer2/layer2.1/relu/Relu" [label="[1, 128, 28, 28]", style=solid]; +"100 /layer2/layer2.1/relu/Relu" -> "101 QuantizeLinear_/layer2/layer2.1/relu/Relu_output_0_1" [label="[1, 128, 28, 28]", style=solid]; +"101 QuantizeLinear_/layer2/layer2.1/relu/Relu_output_0_1" -> "102 DequantizeLinear_/layer2/layer2.1/relu/Relu_output_0_1" [label="[1, 128, 28, 28]", style=dashed]; +"102 DequantizeLinear_/layer2/layer2.1/relu/Relu_output_0_1" -> "105 /layer2/layer2.1/conv2/Conv" [label="[1, 128, 28, 28]", style=solid]; +"103 QuantizeLinear_onnx^^Conv_545_1" -> "104 DequantizeLinear_onnx^^Conv_545_1" [label="[128, 128, 3, 3]", style=dashed]; +"104 DequantizeLinear_onnx^^Conv_545_1" -> "105 /layer2/layer2.1/conv2/Conv" [label="[128, 128, 3, 3]", style=solid]; +"105 /layer2/layer2.1/conv2/Conv" -> "106 /layer2/layer2.1/relu_1/Relu" [label="[1, 128, 28, 28]", style=solid]; +"106 /layer2/layer2.1/relu_1/Relu" -> "107 QuantizeLinear_/layer2/layer2.1/relu_1/Relu_output_0_1" [label="[1, 128, 28, 28]", style=solid]; +"107 QuantizeLinear_/layer2/layer2.1/relu_1/Relu_output_0_1" -> "108 DequantizeLinear_/layer2/layer2.1/relu_1/Relu_output_0_1" [label="[1, 128, 28, 28]", style=dashed]; +"108 DequantizeLinear_/layer2/layer2.1/relu_1/Relu_output_0_1" -> "109 /layer2/layer2.1/conv3/Conv" [label="[1, 128, 28, 28]", style=solid]; +"109 /layer2/layer2.1/conv3/Conv" -> "110 QuantizeLinear_/layer2/layer2.1/conv3/Conv_output_0_1" [label="[1, 512, 28, 28]", style=solid]; +"110 QuantizeLinear_/layer2/layer2.1/conv3/Conv_output_0_1" -> "111 DequantizeLinear_/layer2/layer2.1/conv3/Conv_output_0_1" [label="[1, 512, 28, 28]", style=dashed]; +"111 DequantizeLinear_/layer2/layer2.1/conv3/Conv_output_0_1" -> "112 /layer2/layer2.1/Add" [label="[1, 512, 28, 28]", style=solid]; +"112 /layer2/layer2.1/Add" -> "113 /layer2/layer2.1/relu_2/Relu" [label="[1, 512, 28, 28]", style=solid]; +"113 /layer2/layer2.1/relu_2/Relu" -> "114 QuantizeLinear_/layer2/layer2.1/relu_2/Relu_output_0_1" [label="[1, 512, 28, 28]", style=solid]; +"114 QuantizeLinear_/layer2/layer2.1/relu_2/Relu_output_0_1" -> "115 DequantizeLinear_/layer2/layer2.1/relu_2/Relu_output_0_1" [label="[1, 512, 28, 28]", style=dashed]; +"115 DequantizeLinear_/layer2/layer2.1/relu_2/Relu_output_0_1" -> "118 /layer2/layer2.2/conv1/Conv" [label="[1, 512, 28, 28]", style=solid]; +"115 DequantizeLinear_/layer2/layer2.1/relu_2/Relu_output_0_1" -> "131 /layer2/layer2.2/Add" [label="[1, 512, 28, 28]", style=solid]; +"116 QuantizeLinear_onnx^^Conv_551_1" -> "117 DequantizeLinear_onnx^^Conv_551_1" [label="[128, 512, 1, 1]", style=dashed]; +"117 DequantizeLinear_onnx^^Conv_551_1" -> "118 /layer2/layer2.2/conv1/Conv" [label="[128, 512, 1, 1]", style=solid]; +"118 /layer2/layer2.2/conv1/Conv" -> "119 /layer2/layer2.2/relu/Relu" [label="[1, 128, 28, 28]", style=solid]; +"119 /layer2/layer2.2/relu/Relu" -> "120 QuantizeLinear_/layer2/layer2.2/relu/Relu_output_0_1" [label="[1, 128, 28, 28]", style=solid]; +"120 QuantizeLinear_/layer2/layer2.2/relu/Relu_output_0_1" -> "121 DequantizeLinear_/layer2/layer2.2/relu/Relu_output_0_1" [label="[1, 128, 28, 28]", style=dashed]; +"121 DequantizeLinear_/layer2/layer2.2/relu/Relu_output_0_1" -> "124 /layer2/layer2.2/conv2/Conv" [label="[1, 128, 28, 28]", style=solid]; +"122 QuantizeLinear_onnx^^Conv_554_1" -> "123 DequantizeLinear_onnx^^Conv_554_1" [label="[128, 128, 3, 3]", style=dashed]; +"123 DequantizeLinear_onnx^^Conv_554_1" -> "124 /layer2/layer2.2/conv2/Conv" [label="[128, 128, 3, 3]", style=solid]; +"124 /layer2/layer2.2/conv2/Conv" -> "125 /layer2/layer2.2/relu_1/Relu" [label="[1, 128, 28, 28]", style=solid]; +"125 /layer2/layer2.2/relu_1/Relu" -> "126 QuantizeLinear_/layer2/layer2.2/relu_1/Relu_output_0_1" [label="[1, 128, 28, 28]", style=solid]; +"126 QuantizeLinear_/layer2/layer2.2/relu_1/Relu_output_0_1" -> "127 DequantizeLinear_/layer2/layer2.2/relu_1/Relu_output_0_1" [label="[1, 128, 28, 28]", style=dashed]; +"127 DequantizeLinear_/layer2/layer2.2/relu_1/Relu_output_0_1" -> "128 /layer2/layer2.2/conv3/Conv" [label="[1, 128, 28, 28]", style=solid]; +"128 /layer2/layer2.2/conv3/Conv" -> "129 QuantizeLinear_/layer2/layer2.2/conv3/Conv_output_0_1" [label="[1, 512, 28, 28]", style=solid]; +"129 QuantizeLinear_/layer2/layer2.2/conv3/Conv_output_0_1" -> "130 DequantizeLinear_/layer2/layer2.2/conv3/Conv_output_0_1" [label="[1, 512, 28, 28]", style=dashed]; +"130 DequantizeLinear_/layer2/layer2.2/conv3/Conv_output_0_1" -> "131 /layer2/layer2.2/Add" [label="[1, 512, 28, 28]", style=solid]; +"131 /layer2/layer2.2/Add" -> "132 /layer2/layer2.2/relu_2/Relu" [label="[1, 512, 28, 28]", style=solid]; +"132 /layer2/layer2.2/relu_2/Relu" -> "133 QuantizeLinear_/layer2/layer2.2/relu_2/Relu_output_0_1" [label="[1, 512, 28, 28]", style=solid]; +"133 QuantizeLinear_/layer2/layer2.2/relu_2/Relu_output_0_1" -> "134 DequantizeLinear_/layer2/layer2.2/relu_2/Relu_output_0_1" [label="[1, 512, 28, 28]", style=dashed]; +"134 DequantizeLinear_/layer2/layer2.2/relu_2/Relu_output_0_1" -> "137 /layer2/layer2.3/conv1/Conv" [label="[1, 512, 28, 28]", style=solid]; +"134 DequantizeLinear_/layer2/layer2.2/relu_2/Relu_output_0_1" -> "150 /layer2/layer2.3/Add" [label="[1, 512, 28, 28]", style=solid]; +"135 QuantizeLinear_onnx^^Conv_560_1" -> "136 DequantizeLinear_onnx^^Conv_560_1" [label="[128, 512, 1, 1]", style=dashed]; +"136 DequantizeLinear_onnx^^Conv_560_1" -> "137 /layer2/layer2.3/conv1/Conv" [label="[128, 512, 1, 1]", style=solid]; +"137 /layer2/layer2.3/conv1/Conv" -> "138 /layer2/layer2.3/relu/Relu" [label="[1, 128, 28, 28]", style=solid]; +"138 /layer2/layer2.3/relu/Relu" -> "139 QuantizeLinear_/layer2/layer2.3/relu/Relu_output_0_1" [label="[1, 128, 28, 28]", style=solid]; +"139 QuantizeLinear_/layer2/layer2.3/relu/Relu_output_0_1" -> "140 DequantizeLinear_/layer2/layer2.3/relu/Relu_output_0_1" [label="[1, 128, 28, 28]", style=dashed]; +"140 DequantizeLinear_/layer2/layer2.3/relu/Relu_output_0_1" -> "143 /layer2/layer2.3/conv2/Conv" [label="[1, 128, 28, 28]", style=solid]; +"141 QuantizeLinear_onnx^^Conv_563_1" -> "142 DequantizeLinear_onnx^^Conv_563_1" [label="[128, 128, 3, 3]", style=dashed]; +"142 DequantizeLinear_onnx^^Conv_563_1" -> "143 /layer2/layer2.3/conv2/Conv" [label="[128, 128, 3, 3]", style=solid]; +"143 /layer2/layer2.3/conv2/Conv" -> "144 /layer2/layer2.3/relu_1/Relu" [label="[1, 128, 28, 28]", style=solid]; +"144 /layer2/layer2.3/relu_1/Relu" -> "145 QuantizeLinear_/layer2/layer2.3/relu_1/Relu_output_0_1" [label="[1, 128, 28, 28]", style=solid]; +"145 QuantizeLinear_/layer2/layer2.3/relu_1/Relu_output_0_1" -> "146 DequantizeLinear_/layer2/layer2.3/relu_1/Relu_output_0_1" [label="[1, 128, 28, 28]", style=dashed]; +"146 DequantizeLinear_/layer2/layer2.3/relu_1/Relu_output_0_1" -> "147 /layer2/layer2.3/conv3/Conv" [label="[1, 128, 28, 28]", style=solid]; +"147 /layer2/layer2.3/conv3/Conv" -> "148 QuantizeLinear_/layer2/layer2.3/conv3/Conv_output_0_1" [label="[1, 512, 28, 28]", style=solid]; +"148 QuantizeLinear_/layer2/layer2.3/conv3/Conv_output_0_1" -> "149 DequantizeLinear_/layer2/layer2.3/conv3/Conv_output_0_1" [label="[1, 512, 28, 28]", style=dashed]; +"149 DequantizeLinear_/layer2/layer2.3/conv3/Conv_output_0_1" -> "150 /layer2/layer2.3/Add" [label="[1, 512, 28, 28]", style=solid]; +"150 /layer2/layer2.3/Add" -> "151 /layer2/layer2.3/relu_2/Relu" [label="[1, 512, 28, 28]", style=solid]; +"151 /layer2/layer2.3/relu_2/Relu" -> "152 QuantizeLinear_/layer2/layer2.3/relu_2/Relu_output_0_1" [label="[1, 512, 28, 28]", style=solid]; +"152 QuantizeLinear_/layer2/layer2.3/relu_2/Relu_output_0_1" -> "153 DequantizeLinear_/layer2/layer2.3/relu_2/Relu_output_0_1" [label="[1, 512, 28, 28]", style=dashed]; +"153 DequantizeLinear_/layer2/layer2.3/relu_2/Relu_output_0_1" -> "156 /layer3/layer3.0/conv1/Conv" [label="[1, 512, 28, 28]", style=solid]; +"153 DequantizeLinear_/layer2/layer2.3/relu_2/Relu_output_0_1" -> "171 /layer3/layer3.0/downsample/downsample.0/Conv" [label="[1, 512, 28, 28]", style=solid]; +"154 QuantizeLinear_onnx^^Conv_569_1" -> "155 DequantizeLinear_onnx^^Conv_569_1" [label="[256, 512, 1, 1]", style=dashed]; +"155 DequantizeLinear_onnx^^Conv_569_1" -> "156 /layer3/layer3.0/conv1/Conv" [label="[256, 512, 1, 1]", style=solid]; +"156 /layer3/layer3.0/conv1/Conv" -> "157 /layer3/layer3.0/relu/Relu" [label="[1, 256, 28, 28]", style=solid]; +"157 /layer3/layer3.0/relu/Relu" -> "158 QuantizeLinear_/layer3/layer3.0/relu/Relu_output_0_1" [label="[1, 256, 28, 28]", style=solid]; +"158 QuantizeLinear_/layer3/layer3.0/relu/Relu_output_0_1" -> "159 DequantizeLinear_/layer3/layer3.0/relu/Relu_output_0_1" [label="[1, 256, 28, 28]", style=dashed]; +"159 DequantizeLinear_/layer3/layer3.0/relu/Relu_output_0_1" -> "162 /layer3/layer3.0/conv2/Conv" [label="[1, 256, 28, 28]", style=solid]; +"160 QuantizeLinear_onnx^^Conv_572_1" -> "161 DequantizeLinear_onnx^^Conv_572_1" [label="[256, 256, 3, 3]", style=dashed]; +"161 DequantizeLinear_onnx^^Conv_572_1" -> "162 /layer3/layer3.0/conv2/Conv" [label="[256, 256, 3, 3]", style=solid]; +"162 /layer3/layer3.0/conv2/Conv" -> "163 /layer3/layer3.0/relu_1/Relu" [label="[1, 256, 14, 14]", style=solid]; +"163 /layer3/layer3.0/relu_1/Relu" -> "164 QuantizeLinear_/layer3/layer3.0/relu_1/Relu_output_0_1" [label="[1, 256, 14, 14]", style=solid]; +"164 QuantizeLinear_/layer3/layer3.0/relu_1/Relu_output_0_1" -> "165 DequantizeLinear_/layer3/layer3.0/relu_1/Relu_output_0_1" [label="[1, 256, 14, 14]", style=dashed]; +"165 DequantizeLinear_/layer3/layer3.0/relu_1/Relu_output_0_1" -> "168 /layer3/layer3.0/conv3/Conv" [label="[1, 256, 14, 14]", style=solid]; +"166 QuantizeLinear_onnx^^Conv_575_1" -> "167 DequantizeLinear_onnx^^Conv_575_1" [label="[1024, 256, 1, 1]", style=dashed]; +"167 DequantizeLinear_onnx^^Conv_575_1" -> "168 /layer3/layer3.0/conv3/Conv" [label="[1024, 256, 1, 1]", style=solid]; +"168 /layer3/layer3.0/conv3/Conv" -> "172 QuantizeLinear_/layer3/layer3.0/conv3/Conv_output_0_1" [label="[1, 1024, 14, 14]", style=solid]; +"169 QuantizeLinear_onnx^^Conv_578_1" -> "170 DequantizeLinear_onnx^^Conv_578_1" [label="[1024, 512, 1, 1]", style=dashed]; +"170 DequantizeLinear_onnx^^Conv_578_1" -> "171 /layer3/layer3.0/downsample/downsample.0/Conv" [label="[1024, 512, 1, 1]", style=solid]; +"171 /layer3/layer3.0/downsample/downsample.0/Conv" -> "174 QuantizeLinear_/layer3/layer3.0/downsample/downsample.0/Conv_output_0_1" [label="[1, 1024, 14, 14]", style=solid]; +"172 QuantizeLinear_/layer3/layer3.0/conv3/Conv_output_0_1" -> "173 DequantizeLinear_/layer3/layer3.0/conv3/Conv_output_0_1" [label="[1, 1024, 14, 14]", style=dashed]; +"173 DequantizeLinear_/layer3/layer3.0/conv3/Conv_output_0_1" -> "176 /layer3/layer3.0/Add" [label="[1, 1024, 14, 14]", style=solid]; +"174 QuantizeLinear_/layer3/layer3.0/downsample/downsample.0/Conv_output_0_1" -> "175 DequantizeLinear_/layer3/layer3.0/downsample/downsample.0/Conv_output_0_1" [label="[1, 1024, 14, 14]", style=dashed]; +"175 DequantizeLinear_/layer3/layer3.0/downsample/downsample.0/Conv_output_0_1" -> "176 /layer3/layer3.0/Add" [label="[1, 1024, 14, 14]", style=solid]; +"176 /layer3/layer3.0/Add" -> "177 /layer3/layer3.0/relu_2/Relu" [label="[1, 1024, 14, 14]", style=solid]; +"177 /layer3/layer3.0/relu_2/Relu" -> "178 QuantizeLinear_/layer3/layer3.0/relu_2/Relu_output_0_1" [label="[1, 1024, 14, 14]", style=solid]; +"178 QuantizeLinear_/layer3/layer3.0/relu_2/Relu_output_0_1" -> "179 DequantizeLinear_/layer3/layer3.0/relu_2/Relu_output_0_1" [label="[1, 1024, 14, 14]", style=dashed]; +"179 DequantizeLinear_/layer3/layer3.0/relu_2/Relu_output_0_1" -> "182 /layer3/layer3.1/conv1/Conv" [label="[1, 1024, 14, 14]", style=solid]; +"179 DequantizeLinear_/layer3/layer3.0/relu_2/Relu_output_0_1" -> "195 /layer3/layer3.1/Add" [label="[1, 1024, 14, 14]", style=solid]; +"180 QuantizeLinear_onnx^^Conv_581_1" -> "181 DequantizeLinear_onnx^^Conv_581_1" [label="[256, 1024, 1, 1]", style=dashed]; +"181 DequantizeLinear_onnx^^Conv_581_1" -> "182 /layer3/layer3.1/conv1/Conv" [label="[256, 1024, 1, 1]", style=solid]; +"182 /layer3/layer3.1/conv1/Conv" -> "183 /layer3/layer3.1/relu/Relu" [label="[1, 256, 14, 14]", style=solid]; +"183 /layer3/layer3.1/relu/Relu" -> "184 QuantizeLinear_/layer3/layer3.1/relu/Relu_output_0_1" [label="[1, 256, 14, 14]", style=solid]; +"184 QuantizeLinear_/layer3/layer3.1/relu/Relu_output_0_1" -> "185 DequantizeLinear_/layer3/layer3.1/relu/Relu_output_0_1" [label="[1, 256, 14, 14]", style=dashed]; +"185 DequantizeLinear_/layer3/layer3.1/relu/Relu_output_0_1" -> "188 /layer3/layer3.1/conv2/Conv" [label="[1, 256, 14, 14]", style=solid]; +"186 QuantizeLinear_onnx^^Conv_584_1" -> "187 DequantizeLinear_onnx^^Conv_584_1" [label="[256, 256, 3, 3]", style=dashed]; +"187 DequantizeLinear_onnx^^Conv_584_1" -> "188 /layer3/layer3.1/conv2/Conv" [label="[256, 256, 3, 3]", style=solid]; +"188 /layer3/layer3.1/conv2/Conv" -> "189 /layer3/layer3.1/relu_1/Relu" [label="[1, 256, 14, 14]", style=solid]; +"189 /layer3/layer3.1/relu_1/Relu" -> "190 QuantizeLinear_/layer3/layer3.1/relu_1/Relu_output_0_1" [label="[1, 256, 14, 14]", style=solid]; +"190 QuantizeLinear_/layer3/layer3.1/relu_1/Relu_output_0_1" -> "191 DequantizeLinear_/layer3/layer3.1/relu_1/Relu_output_0_1" [label="[1, 256, 14, 14]", style=dashed]; +"191 DequantizeLinear_/layer3/layer3.1/relu_1/Relu_output_0_1" -> "192 /layer3/layer3.1/conv3/Conv" [label="[1, 256, 14, 14]", style=solid]; +"192 /layer3/layer3.1/conv3/Conv" -> "193 QuantizeLinear_/layer3/layer3.1/conv3/Conv_output_0_1" [label="[1, 1024, 14, 14]", style=solid]; +"193 QuantizeLinear_/layer3/layer3.1/conv3/Conv_output_0_1" -> "194 DequantizeLinear_/layer3/layer3.1/conv3/Conv_output_0_1" [label="[1, 1024, 14, 14]", style=dashed]; +"194 DequantizeLinear_/layer3/layer3.1/conv3/Conv_output_0_1" -> "195 /layer3/layer3.1/Add" [label="[1, 1024, 14, 14]", style=solid]; +"195 /layer3/layer3.1/Add" -> "196 /layer3/layer3.1/relu_2/Relu" [label="[1, 1024, 14, 14]", style=solid]; +"196 /layer3/layer3.1/relu_2/Relu" -> "197 QuantizeLinear_/layer3/layer3.1/relu_2/Relu_output_0_1" [label="[1, 1024, 14, 14]", style=solid]; +"197 QuantizeLinear_/layer3/layer3.1/relu_2/Relu_output_0_1" -> "198 DequantizeLinear_/layer3/layer3.1/relu_2/Relu_output_0_1" [label="[1, 1024, 14, 14]", style=dashed]; +"198 DequantizeLinear_/layer3/layer3.1/relu_2/Relu_output_0_1" -> "201 /layer3/layer3.2/conv1/Conv" [label="[1, 1024, 14, 14]", style=solid]; +"198 DequantizeLinear_/layer3/layer3.1/relu_2/Relu_output_0_1" -> "214 /layer3/layer3.2/Add" [label="[1, 1024, 14, 14]", style=solid]; +"199 QuantizeLinear_onnx^^Conv_590_1" -> "200 DequantizeLinear_onnx^^Conv_590_1" [label="[256, 1024, 1, 1]", style=dashed]; +"200 DequantizeLinear_onnx^^Conv_590_1" -> "201 /layer3/layer3.2/conv1/Conv" [label="[256, 1024, 1, 1]", style=solid]; +"201 /layer3/layer3.2/conv1/Conv" -> "202 /layer3/layer3.2/relu/Relu" [label="[1, 256, 14, 14]", style=solid]; +"202 /layer3/layer3.2/relu/Relu" -> "203 QuantizeLinear_/layer3/layer3.2/relu/Relu_output_0_1" [label="[1, 256, 14, 14]", style=solid]; +"203 QuantizeLinear_/layer3/layer3.2/relu/Relu_output_0_1" -> "204 DequantizeLinear_/layer3/layer3.2/relu/Relu_output_0_1" [label="[1, 256, 14, 14]", style=dashed]; +"204 DequantizeLinear_/layer3/layer3.2/relu/Relu_output_0_1" -> "207 /layer3/layer3.2/conv2/Conv" [label="[1, 256, 14, 14]", style=solid]; +"205 QuantizeLinear_onnx^^Conv_593_1" -> "206 DequantizeLinear_onnx^^Conv_593_1" [label="[256, 256, 3, 3]", style=dashed]; +"206 DequantizeLinear_onnx^^Conv_593_1" -> "207 /layer3/layer3.2/conv2/Conv" [label="[256, 256, 3, 3]", style=solid]; +"207 /layer3/layer3.2/conv2/Conv" -> "208 /layer3/layer3.2/relu_1/Relu" [label="[1, 256, 14, 14]", style=solid]; +"208 /layer3/layer3.2/relu_1/Relu" -> "209 QuantizeLinear_/layer3/layer3.2/relu_1/Relu_output_0_1" [label="[1, 256, 14, 14]", style=solid]; +"209 QuantizeLinear_/layer3/layer3.2/relu_1/Relu_output_0_1" -> "210 DequantizeLinear_/layer3/layer3.2/relu_1/Relu_output_0_1" [label="[1, 256, 14, 14]", style=dashed]; +"210 DequantizeLinear_/layer3/layer3.2/relu_1/Relu_output_0_1" -> "211 /layer3/layer3.2/conv3/Conv" [label="[1, 256, 14, 14]", style=solid]; +"211 /layer3/layer3.2/conv3/Conv" -> "212 QuantizeLinear_/layer3/layer3.2/conv3/Conv_output_0_1" [label="[1, 1024, 14, 14]", style=solid]; +"212 QuantizeLinear_/layer3/layer3.2/conv3/Conv_output_0_1" -> "213 DequantizeLinear_/layer3/layer3.2/conv3/Conv_output_0_1" [label="[1, 1024, 14, 14]", style=dashed]; +"213 DequantizeLinear_/layer3/layer3.2/conv3/Conv_output_0_1" -> "214 /layer3/layer3.2/Add" [label="[1, 1024, 14, 14]", style=solid]; +"214 /layer3/layer3.2/Add" -> "215 /layer3/layer3.2/relu_2/Relu" [label="[1, 1024, 14, 14]", style=solid]; +"215 /layer3/layer3.2/relu_2/Relu" -> "216 QuantizeLinear_/layer3/layer3.2/relu_2/Relu_output_0_1" [label="[1, 1024, 14, 14]", style=solid]; +"216 QuantizeLinear_/layer3/layer3.2/relu_2/Relu_output_0_1" -> "217 DequantizeLinear_/layer3/layer3.2/relu_2/Relu_output_0_1" [label="[1, 1024, 14, 14]", style=dashed]; +"217 DequantizeLinear_/layer3/layer3.2/relu_2/Relu_output_0_1" -> "220 /layer3/layer3.3/conv1/Conv" [label="[1, 1024, 14, 14]", style=solid]; +"217 DequantizeLinear_/layer3/layer3.2/relu_2/Relu_output_0_1" -> "233 /layer3/layer3.3/Add" [label="[1, 1024, 14, 14]", style=solid]; +"218 QuantizeLinear_onnx^^Conv_599_1" -> "219 DequantizeLinear_onnx^^Conv_599_1" [label="[256, 1024, 1, 1]", style=dashed]; +"219 DequantizeLinear_onnx^^Conv_599_1" -> "220 /layer3/layer3.3/conv1/Conv" [label="[256, 1024, 1, 1]", style=solid]; +"220 /layer3/layer3.3/conv1/Conv" -> "221 /layer3/layer3.3/relu/Relu" [label="[1, 256, 14, 14]", style=solid]; +"221 /layer3/layer3.3/relu/Relu" -> "222 QuantizeLinear_/layer3/layer3.3/relu/Relu_output_0_1" [label="[1, 256, 14, 14]", style=solid]; +"222 QuantizeLinear_/layer3/layer3.3/relu/Relu_output_0_1" -> "223 DequantizeLinear_/layer3/layer3.3/relu/Relu_output_0_1" [label="[1, 256, 14, 14]", style=dashed]; +"223 DequantizeLinear_/layer3/layer3.3/relu/Relu_output_0_1" -> "226 /layer3/layer3.3/conv2/Conv" [label="[1, 256, 14, 14]", style=solid]; +"224 QuantizeLinear_onnx^^Conv_602_1" -> "225 DequantizeLinear_onnx^^Conv_602_1" [label="[256, 256, 3, 3]", style=dashed]; +"225 DequantizeLinear_onnx^^Conv_602_1" -> "226 /layer3/layer3.3/conv2/Conv" [label="[256, 256, 3, 3]", style=solid]; +"226 /layer3/layer3.3/conv2/Conv" -> "227 /layer3/layer3.3/relu_1/Relu" [label="[1, 256, 14, 14]", style=solid]; +"227 /layer3/layer3.3/relu_1/Relu" -> "228 QuantizeLinear_/layer3/layer3.3/relu_1/Relu_output_0_1" [label="[1, 256, 14, 14]", style=solid]; +"228 QuantizeLinear_/layer3/layer3.3/relu_1/Relu_output_0_1" -> "229 DequantizeLinear_/layer3/layer3.3/relu_1/Relu_output_0_1" [label="[1, 256, 14, 14]", style=dashed]; +"229 DequantizeLinear_/layer3/layer3.3/relu_1/Relu_output_0_1" -> "230 /layer3/layer3.3/conv3/Conv" [label="[1, 256, 14, 14]", style=solid]; +"230 /layer3/layer3.3/conv3/Conv" -> "231 QuantizeLinear_/layer3/layer3.3/conv3/Conv_output_0_1" [label="[1, 1024, 14, 14]", style=solid]; +"231 QuantizeLinear_/layer3/layer3.3/conv3/Conv_output_0_1" -> "232 DequantizeLinear_/layer3/layer3.3/conv3/Conv_output_0_1" [label="[1, 1024, 14, 14]", style=dashed]; +"232 DequantizeLinear_/layer3/layer3.3/conv3/Conv_output_0_1" -> "233 /layer3/layer3.3/Add" [label="[1, 1024, 14, 14]", style=solid]; +"233 /layer3/layer3.3/Add" -> "234 /layer3/layer3.3/relu_2/Relu" [label="[1, 1024, 14, 14]", style=solid]; +"234 /layer3/layer3.3/relu_2/Relu" -> "235 QuantizeLinear_/layer3/layer3.3/relu_2/Relu_output_0_1" [label="[1, 1024, 14, 14]", style=solid]; +"235 QuantizeLinear_/layer3/layer3.3/relu_2/Relu_output_0_1" -> "236 DequantizeLinear_/layer3/layer3.3/relu_2/Relu_output_0_1" [label="[1, 1024, 14, 14]", style=dashed]; +"236 DequantizeLinear_/layer3/layer3.3/relu_2/Relu_output_0_1" -> "239 /layer3/layer3.4/conv1/Conv" [label="[1, 1024, 14, 14]", style=solid]; +"236 DequantizeLinear_/layer3/layer3.3/relu_2/Relu_output_0_1" -> "252 /layer3/layer3.4/Add" [label="[1, 1024, 14, 14]", style=solid]; +"237 QuantizeLinear_onnx^^Conv_608_1" -> "238 DequantizeLinear_onnx^^Conv_608_1" [label="[256, 1024, 1, 1]", style=dashed]; +"238 DequantizeLinear_onnx^^Conv_608_1" -> "239 /layer3/layer3.4/conv1/Conv" [label="[256, 1024, 1, 1]", style=solid]; +"239 /layer3/layer3.4/conv1/Conv" -> "240 /layer3/layer3.4/relu/Relu" [label="[1, 256, 14, 14]", style=solid]; +"240 /layer3/layer3.4/relu/Relu" -> "241 QuantizeLinear_/layer3/layer3.4/relu/Relu_output_0_1" [label="[1, 256, 14, 14]", style=solid]; +"241 QuantizeLinear_/layer3/layer3.4/relu/Relu_output_0_1" -> "242 DequantizeLinear_/layer3/layer3.4/relu/Relu_output_0_1" [label="[1, 256, 14, 14]", style=dashed]; +"242 DequantizeLinear_/layer3/layer3.4/relu/Relu_output_0_1" -> "245 /layer3/layer3.4/conv2/Conv" [label="[1, 256, 14, 14]", style=solid]; +"243 QuantizeLinear_onnx^^Conv_611_1" -> "244 DequantizeLinear_onnx^^Conv_611_1" [label="[256, 256, 3, 3]", style=dashed]; +"244 DequantizeLinear_onnx^^Conv_611_1" -> "245 /layer3/layer3.4/conv2/Conv" [label="[256, 256, 3, 3]", style=solid]; +"245 /layer3/layer3.4/conv2/Conv" -> "246 /layer3/layer3.4/relu_1/Relu" [label="[1, 256, 14, 14]", style=solid]; +"246 /layer3/layer3.4/relu_1/Relu" -> "247 QuantizeLinear_/layer3/layer3.4/relu_1/Relu_output_0_1" [label="[1, 256, 14, 14]", style=solid]; +"247 QuantizeLinear_/layer3/layer3.4/relu_1/Relu_output_0_1" -> "248 DequantizeLinear_/layer3/layer3.4/relu_1/Relu_output_0_1" [label="[1, 256, 14, 14]", style=dashed]; +"248 DequantizeLinear_/layer3/layer3.4/relu_1/Relu_output_0_1" -> "249 /layer3/layer3.4/conv3/Conv" [label="[1, 256, 14, 14]", style=solid]; +"249 /layer3/layer3.4/conv3/Conv" -> "250 QuantizeLinear_/layer3/layer3.4/conv3/Conv_output_0_1" [label="[1, 1024, 14, 14]", style=solid]; +"250 QuantizeLinear_/layer3/layer3.4/conv3/Conv_output_0_1" -> "251 DequantizeLinear_/layer3/layer3.4/conv3/Conv_output_0_1" [label="[1, 1024, 14, 14]", style=dashed]; +"251 DequantizeLinear_/layer3/layer3.4/conv3/Conv_output_0_1" -> "252 /layer3/layer3.4/Add" [label="[1, 1024, 14, 14]", style=solid]; +"252 /layer3/layer3.4/Add" -> "253 /layer3/layer3.4/relu_2/Relu" [label="[1, 1024, 14, 14]", style=solid]; +"253 /layer3/layer3.4/relu_2/Relu" -> "254 QuantizeLinear_/layer3/layer3.4/relu_2/Relu_output_0_1" [label="[1, 1024, 14, 14]", style=solid]; +"254 QuantizeLinear_/layer3/layer3.4/relu_2/Relu_output_0_1" -> "255 DequantizeLinear_/layer3/layer3.4/relu_2/Relu_output_0_1" [label="[1, 1024, 14, 14]", style=dashed]; +"255 DequantizeLinear_/layer3/layer3.4/relu_2/Relu_output_0_1" -> "258 /layer3/layer3.5/conv1/Conv" [label="[1, 1024, 14, 14]", style=solid]; +"255 DequantizeLinear_/layer3/layer3.4/relu_2/Relu_output_0_1" -> "271 /layer3/layer3.5/Add" [label="[1, 1024, 14, 14]", style=solid]; +"256 QuantizeLinear_onnx^^Conv_617_1" -> "257 DequantizeLinear_onnx^^Conv_617_1" [label="[256, 1024, 1, 1]", style=dashed]; +"257 DequantizeLinear_onnx^^Conv_617_1" -> "258 /layer3/layer3.5/conv1/Conv" [label="[256, 1024, 1, 1]", style=solid]; +"258 /layer3/layer3.5/conv1/Conv" -> "259 /layer3/layer3.5/relu/Relu" [label="[1, 256, 14, 14]", style=solid]; +"259 /layer3/layer3.5/relu/Relu" -> "260 QuantizeLinear_/layer3/layer3.5/relu/Relu_output_0_1" [label="[1, 256, 14, 14]", style=solid]; +"260 QuantizeLinear_/layer3/layer3.5/relu/Relu_output_0_1" -> "261 DequantizeLinear_/layer3/layer3.5/relu/Relu_output_0_1" [label="[1, 256, 14, 14]", style=dashed]; +"261 DequantizeLinear_/layer3/layer3.5/relu/Relu_output_0_1" -> "264 /layer3/layer3.5/conv2/Conv" [label="[1, 256, 14, 14]", style=solid]; +"262 QuantizeLinear_onnx^^Conv_620_1" -> "263 DequantizeLinear_onnx^^Conv_620_1" [label="[256, 256, 3, 3]", style=dashed]; +"263 DequantizeLinear_onnx^^Conv_620_1" -> "264 /layer3/layer3.5/conv2/Conv" [label="[256, 256, 3, 3]", style=solid]; +"264 /layer3/layer3.5/conv2/Conv" -> "265 /layer3/layer3.5/relu_1/Relu" [label="[1, 256, 14, 14]", style=solid]; +"265 /layer3/layer3.5/relu_1/Relu" -> "266 QuantizeLinear_/layer3/layer3.5/relu_1/Relu_output_0_1" [label="[1, 256, 14, 14]", style=solid]; +"266 QuantizeLinear_/layer3/layer3.5/relu_1/Relu_output_0_1" -> "267 DequantizeLinear_/layer3/layer3.5/relu_1/Relu_output_0_1" [label="[1, 256, 14, 14]", style=dashed]; +"267 DequantizeLinear_/layer3/layer3.5/relu_1/Relu_output_0_1" -> "268 /layer3/layer3.5/conv3/Conv" [label="[1, 256, 14, 14]", style=solid]; +"268 /layer3/layer3.5/conv3/Conv" -> "269 QuantizeLinear_/layer3/layer3.5/conv3/Conv_output_0_1" [label="[1, 1024, 14, 14]", style=solid]; +"269 QuantizeLinear_/layer3/layer3.5/conv3/Conv_output_0_1" -> "270 DequantizeLinear_/layer3/layer3.5/conv3/Conv_output_0_1" [label="[1, 1024, 14, 14]", style=dashed]; +"270 DequantizeLinear_/layer3/layer3.5/conv3/Conv_output_0_1" -> "271 /layer3/layer3.5/Add" [label="[1, 1024, 14, 14]", style=solid]; +"271 /layer3/layer3.5/Add" -> "272 /layer3/layer3.5/relu_2/Relu" [label="[1, 1024, 14, 14]", style=solid]; +"272 /layer3/layer3.5/relu_2/Relu" -> "273 QuantizeLinear_/layer3/layer3.5/relu_2/Relu_output_0_1" [label="[1, 1024, 14, 14]", style=solid]; +"273 QuantizeLinear_/layer3/layer3.5/relu_2/Relu_output_0_1" -> "274 DequantizeLinear_/layer3/layer3.5/relu_2/Relu_output_0_1" [label="[1, 1024, 14, 14]", style=dashed]; +"274 DequantizeLinear_/layer3/layer3.5/relu_2/Relu_output_0_1" -> "277 /layer4/layer4.0/conv1/Conv" [label="[1, 1024, 14, 14]", style=solid]; +"274 DequantizeLinear_/layer3/layer3.5/relu_2/Relu_output_0_1" -> "292 /layer4/layer4.0/downsample/downsample.0/Conv" [label="[1, 1024, 14, 14]", style=solid]; +"275 QuantizeLinear_onnx^^Conv_626_1" -> "276 DequantizeLinear_onnx^^Conv_626_1" [label="[512, 1024, 1, 1]", style=dashed]; +"276 DequantizeLinear_onnx^^Conv_626_1" -> "277 /layer4/layer4.0/conv1/Conv" [label="[512, 1024, 1, 1]", style=solid]; +"277 /layer4/layer4.0/conv1/Conv" -> "278 /layer4/layer4.0/relu/Relu" [label="[1, 512, 14, 14]", style=solid]; +"278 /layer4/layer4.0/relu/Relu" -> "279 QuantizeLinear_/layer4/layer4.0/relu/Relu_output_0_1" [label="[1, 512, 14, 14]", style=solid]; +"279 QuantizeLinear_/layer4/layer4.0/relu/Relu_output_0_1" -> "280 DequantizeLinear_/layer4/layer4.0/relu/Relu_output_0_1" [label="[1, 512, 14, 14]", style=dashed]; +"280 DequantizeLinear_/layer4/layer4.0/relu/Relu_output_0_1" -> "283 /layer4/layer4.0/conv2/Conv" [label="[1, 512, 14, 14]", style=solid]; +"281 QuantizeLinear_onnx^^Conv_629_1" -> "282 DequantizeLinear_onnx^^Conv_629_1" [label="[512, 512, 3, 3]", style=dashed]; +"282 DequantizeLinear_onnx^^Conv_629_1" -> "283 /layer4/layer4.0/conv2/Conv" [label="[512, 512, 3, 3]", style=solid]; +"283 /layer4/layer4.0/conv2/Conv" -> "284 /layer4/layer4.0/relu_1/Relu" [label="[1, 512, 7, 7]", style=solid]; +"284 /layer4/layer4.0/relu_1/Relu" -> "285 QuantizeLinear_/layer4/layer4.0/relu_1/Relu_output_0_1" [label="[1, 512, 7, 7]", style=solid]; +"285 QuantizeLinear_/layer4/layer4.0/relu_1/Relu_output_0_1" -> "286 DequantizeLinear_/layer4/layer4.0/relu_1/Relu_output_0_1" [label="[1, 512, 7, 7]", style=dashed]; +"286 DequantizeLinear_/layer4/layer4.0/relu_1/Relu_output_0_1" -> "289 /layer4/layer4.0/conv3/Conv" [label="[1, 512, 7, 7]", style=solid]; +"287 QuantizeLinear_onnx^^Conv_632_1" -> "288 DequantizeLinear_onnx^^Conv_632_1" [label="[2048, 512, 1, 1]", style=dashed]; +"288 DequantizeLinear_onnx^^Conv_632_1" -> "289 /layer4/layer4.0/conv3/Conv" [label="[2048, 512, 1, 1]", style=solid]; +"289 /layer4/layer4.0/conv3/Conv" -> "293 QuantizeLinear_/layer4/layer4.0/conv3/Conv_output_0_1" [label="[1, 2048, 7, 7]", style=solid]; +"290 QuantizeLinear_onnx^^Conv_635_1" -> "291 DequantizeLinear_onnx^^Conv_635_1" [label="[2048, 1024, 1, 1]", style=dashed]; +"291 DequantizeLinear_onnx^^Conv_635_1" -> "292 /layer4/layer4.0/downsample/downsample.0/Conv" [label="[2048, 1024, 1, 1]", style=solid]; +"292 /layer4/layer4.0/downsample/downsample.0/Conv" -> "295 QuantizeLinear_/layer4/layer4.0/downsample/downsample.0/Conv_output_0_1" [label="[1, 2048, 7, 7]", style=solid]; +"293 QuantizeLinear_/layer4/layer4.0/conv3/Conv_output_0_1" -> "294 DequantizeLinear_/layer4/layer4.0/conv3/Conv_output_0_1" [label="[1, 2048, 7, 7]", style=dashed]; +"294 DequantizeLinear_/layer4/layer4.0/conv3/Conv_output_0_1" -> "297 /layer4/layer4.0/Add" [label="[1, 2048, 7, 7]", style=solid]; +"295 QuantizeLinear_/layer4/layer4.0/downsample/downsample.0/Conv_output_0_1" -> "296 DequantizeLinear_/layer4/layer4.0/downsample/downsample.0/Conv_output_0_1" [label="[1, 2048, 7, 7]", style=dashed]; +"296 DequantizeLinear_/layer4/layer4.0/downsample/downsample.0/Conv_output_0_1" -> "297 /layer4/layer4.0/Add" [label="[1, 2048, 7, 7]", style=solid]; +"297 /layer4/layer4.0/Add" -> "298 /layer4/layer4.0/relu_2/Relu" [label="[1, 2048, 7, 7]", style=solid]; +"298 /layer4/layer4.0/relu_2/Relu" -> "299 QuantizeLinear_/layer4/layer4.0/relu_2/Relu_output_0_1" [label="[1, 2048, 7, 7]", style=solid]; +"299 QuantizeLinear_/layer4/layer4.0/relu_2/Relu_output_0_1" -> "300 DequantizeLinear_/layer4/layer4.0/relu_2/Relu_output_0_1" [label="[1, 2048, 7, 7]", style=dashed]; +"300 DequantizeLinear_/layer4/layer4.0/relu_2/Relu_output_0_1" -> "303 /layer4/layer4.1/conv1/Conv" [label="[1, 2048, 7, 7]", style=solid]; +"300 DequantizeLinear_/layer4/layer4.0/relu_2/Relu_output_0_1" -> "316 /layer4/layer4.1/Add" [label="[1, 2048, 7, 7]", style=solid]; +"301 QuantizeLinear_onnx^^Conv_638_1" -> "302 DequantizeLinear_onnx^^Conv_638_1" [label="[512, 2048, 1, 1]", style=dashed]; +"302 DequantizeLinear_onnx^^Conv_638_1" -> "303 /layer4/layer4.1/conv1/Conv" [label="[512, 2048, 1, 1]", style=solid]; +"303 /layer4/layer4.1/conv1/Conv" -> "304 /layer4/layer4.1/relu/Relu" [label="[1, 512, 7, 7]", style=solid]; +"304 /layer4/layer4.1/relu/Relu" -> "305 QuantizeLinear_/layer4/layer4.1/relu/Relu_output_0_1" [label="[1, 512, 7, 7]", style=solid]; +"305 QuantizeLinear_/layer4/layer4.1/relu/Relu_output_0_1" -> "306 DequantizeLinear_/layer4/layer4.1/relu/Relu_output_0_1" [label="[1, 512, 7, 7]", style=dashed]; +"306 DequantizeLinear_/layer4/layer4.1/relu/Relu_output_0_1" -> "309 /layer4/layer4.1/conv2/Conv" [label="[1, 512, 7, 7]", style=solid]; +"307 QuantizeLinear_onnx^^Conv_641_1" -> "308 DequantizeLinear_onnx^^Conv_641_1" [label="[512, 512, 3, 3]", style=dashed]; +"308 DequantizeLinear_onnx^^Conv_641_1" -> "309 /layer4/layer4.1/conv2/Conv" [label="[512, 512, 3, 3]", style=solid]; +"309 /layer4/layer4.1/conv2/Conv" -> "310 /layer4/layer4.1/relu_1/Relu" [label="[1, 512, 7, 7]", style=solid]; +"310 /layer4/layer4.1/relu_1/Relu" -> "311 QuantizeLinear_/layer4/layer4.1/relu_1/Relu_output_0_1" [label="[1, 512, 7, 7]", style=solid]; +"311 QuantizeLinear_/layer4/layer4.1/relu_1/Relu_output_0_1" -> "312 DequantizeLinear_/layer4/layer4.1/relu_1/Relu_output_0_1" [label="[1, 512, 7, 7]", style=dashed]; +"312 DequantizeLinear_/layer4/layer4.1/relu_1/Relu_output_0_1" -> "313 /layer4/layer4.1/conv3/Conv" [label="[1, 512, 7, 7]", style=solid]; +"313 /layer4/layer4.1/conv3/Conv" -> "314 QuantizeLinear_/layer4/layer4.1/conv3/Conv_output_0_1" [label="[1, 2048, 7, 7]", style=solid]; +"314 QuantizeLinear_/layer4/layer4.1/conv3/Conv_output_0_1" -> "315 DequantizeLinear_/layer4/layer4.1/conv3/Conv_output_0_1" [label="[1, 2048, 7, 7]", style=dashed]; +"315 DequantizeLinear_/layer4/layer4.1/conv3/Conv_output_0_1" -> "316 /layer4/layer4.1/Add" [label="[1, 2048, 7, 7]", style=solid]; +"316 /layer4/layer4.1/Add" -> "317 /layer4/layer4.1/relu_2/Relu" [label="[1, 2048, 7, 7]", style=solid]; +"317 /layer4/layer4.1/relu_2/Relu" -> "318 QuantizeLinear_/layer4/layer4.1/relu_2/Relu_output_0_1" [label="[1, 2048, 7, 7]", style=solid]; +"318 QuantizeLinear_/layer4/layer4.1/relu_2/Relu_output_0_1" -> "319 DequantizeLinear_/layer4/layer4.1/relu_2/Relu_output_0_1" [label="[1, 2048, 7, 7]", style=dashed]; +"319 DequantizeLinear_/layer4/layer4.1/relu_2/Relu_output_0_1" -> "322 /layer4/layer4.2/conv1/Conv" [label="[1, 2048, 7, 7]", style=solid]; +"319 DequantizeLinear_/layer4/layer4.1/relu_2/Relu_output_0_1" -> "335 /layer4/layer4.2/Add" [label="[1, 2048, 7, 7]", style=solid]; +"320 QuantizeLinear_onnx^^Conv_647_1" -> "321 DequantizeLinear_onnx^^Conv_647_1" [label="[512, 2048, 1, 1]", style=dashed]; +"321 DequantizeLinear_onnx^^Conv_647_1" -> "322 /layer4/layer4.2/conv1/Conv" [label="[512, 2048, 1, 1]", style=solid]; +"322 /layer4/layer4.2/conv1/Conv" -> "323 /layer4/layer4.2/relu/Relu" [label="[1, 512, 7, 7]", style=solid]; +"323 /layer4/layer4.2/relu/Relu" -> "324 QuantizeLinear_/layer4/layer4.2/relu/Relu_output_0_1" [label="[1, 512, 7, 7]", style=solid]; +"324 QuantizeLinear_/layer4/layer4.2/relu/Relu_output_0_1" -> "325 DequantizeLinear_/layer4/layer4.2/relu/Relu_output_0_1" [label="[1, 512, 7, 7]", style=dashed]; +"325 DequantizeLinear_/layer4/layer4.2/relu/Relu_output_0_1" -> "328 /layer4/layer4.2/conv2/Conv" [label="[1, 512, 7, 7]", style=solid]; +"326 QuantizeLinear_onnx^^Conv_650_1" -> "327 DequantizeLinear_onnx^^Conv_650_1" [label="[512, 512, 3, 3]", style=dashed]; +"327 DequantizeLinear_onnx^^Conv_650_1" -> "328 /layer4/layer4.2/conv2/Conv" [label="[512, 512, 3, 3]", style=solid]; +"328 /layer4/layer4.2/conv2/Conv" -> "329 /layer4/layer4.2/relu_1/Relu" [label="[1, 512, 7, 7]", style=solid]; +"329 /layer4/layer4.2/relu_1/Relu" -> "330 QuantizeLinear_/layer4/layer4.2/relu_1/Relu_output_0_1" [label="[1, 512, 7, 7]", style=solid]; +"330 QuantizeLinear_/layer4/layer4.2/relu_1/Relu_output_0_1" -> "331 DequantizeLinear_/layer4/layer4.2/relu_1/Relu_output_0_1" [label="[1, 512, 7, 7]", style=dashed]; +"331 DequantizeLinear_/layer4/layer4.2/relu_1/Relu_output_0_1" -> "332 /layer4/layer4.2/conv3/Conv" [label="[1, 512, 7, 7]", style=solid]; +"332 /layer4/layer4.2/conv3/Conv" -> "333 QuantizeLinear_/layer4/layer4.2/conv3/Conv_output_0_1" [label="[1, 2048, 7, 7]", style=solid]; +"333 QuantizeLinear_/layer4/layer4.2/conv3/Conv_output_0_1" -> "334 DequantizeLinear_/layer4/layer4.2/conv3/Conv_output_0_1" [label="[1, 2048, 7, 7]", style=dashed]; +"334 DequantizeLinear_/layer4/layer4.2/conv3/Conv_output_0_1" -> "335 /layer4/layer4.2/Add" [label="[1, 2048, 7, 7]", style=solid]; +"335 /layer4/layer4.2/Add" -> "336 /layer4/layer4.2/relu_2/Relu" [label="[1, 2048, 7, 7]", style=solid]; +"336 /layer4/layer4.2/relu_2/Relu" -> "337 QuantizeLinear_/layer4/layer4.2/relu_2/Relu_output_0_1" [label="[1, 2048, 7, 7]", style=solid]; +"337 QuantizeLinear_/layer4/layer4.2/relu_2/Relu_output_0_1" -> "338 DequantizeLinear_/layer4/layer4.2/relu_2/Relu_output_0_1" [label="[1, 2048, 7, 7]", style=dashed]; +"338 DequantizeLinear_/layer4/layer4.2/relu_2/Relu_output_0_1" -> "339 /avgpool/GlobalAveragePool" [label="[1, 2048, 7, 7]", style=solid]; +"339 /avgpool/GlobalAveragePool" -> "340 QuantizeLinear_/avgpool/GlobalAveragePool_output_0_1" [label="[1, 2048, 1, 1]", style=solid]; +"340 QuantizeLinear_/avgpool/GlobalAveragePool_output_0_1" -> "341 DequantizeLinear_/avgpool/GlobalAveragePool_output_0_1" [label="[1, 2048, 1, 1]", style=dashed]; +"341 DequantizeLinear_/avgpool/GlobalAveragePool_output_0_1" -> "342 /Flatten" [label="[1, 2048, 1, 1]", style=solid]; +"342 /Flatten" -> "345 /fc/Gemm" [label="[1, 2048]", style=solid]; +"343 QuantizeLinear_fc.weight_1" -> "344 DequantizeLinear_fc.weight_1" [label="[1000, 2048]", style=dashed]; +"344 DequantizeLinear_fc.weight_1" -> "345 /fc/Gemm" [label="[1000, 2048]", style=solid]; +"345 /fc/Gemm" -> "347 nncf_model_output_0" [label="[1, 1000]", style=solid]; +"346 nncf_model_input_0" -> "0 QuantizeLinear_input.1_1" [label="[1, 3, 224, 224]", style=solid]; +} diff --git a/tests/onnx/data/reference_graphs/quantization/synthetic/activation_matmul_model.dot b/tests/onnx/data/reference_graphs/quantization/synthetic/activation_matmul_model.dot new file mode 100644 index 00000000000..748a689b50b --- /dev/null +++ b/tests/onnx/data/reference_graphs/quantization/synthetic/activation_matmul_model.dot @@ -0,0 +1,19 @@ +strict digraph { +"0 QuantizeLinear_X_1" [id=0, type=QuantizeLinear]; +"1 DequantizeLinear_X_1" [id=1, type=DequantizeLinear]; +"2 QuantizeLinear_Y_1" [id=2, type=QuantizeLinear]; +"3 DequantizeLinear_Y_1" [id=3, type=DequantizeLinear]; +"4 MatMul" [id=4, type=MatMul]; +"5 Softmax" [id=5, type=Softmax]; +"6 nncf_model_input_0" [id=6, type=nncf_model_input]; +"7 nncf_model_input_1" [id=7, type=nncf_model_input]; +"8 nncf_model_output_0" [id=8, type=nncf_model_output]; +"0 QuantizeLinear_X_1" -> "1 DequantizeLinear_X_1" [label="[10, 1]", style=dashed]; +"1 DequantizeLinear_X_1" -> "4 MatMul" [label="[10, 1]", style=solid]; +"2 QuantizeLinear_Y_1" -> "3 DequantizeLinear_Y_1" [label="[1, 10]", style=dashed]; +"3 DequantizeLinear_Y_1" -> "4 MatMul" [label="[1, 10]", style=solid]; +"4 MatMul" -> "5 Softmax" [label="[10, 10]", style=solid]; +"5 Softmax" -> "8 nncf_model_output_0" [label="[10, 10]", style=solid]; +"6 nncf_model_input_0" -> "0 QuantizeLinear_X_1" [label="[10, 1]", style=solid]; +"7 nncf_model_input_1" -> "2 QuantizeLinear_Y_1" [label="[1, 10]", style=solid]; +} diff --git a/tests/onnx/data/reference_graphs/quantization/synthetic/embedding_model.dot b/tests/onnx/data/reference_graphs/quantization/synthetic/embedding_model.dot new file mode 100644 index 00000000000..eca5b59e2ac --- /dev/null +++ b/tests/onnx/data/reference_graphs/quantization/synthetic/embedding_model.dot @@ -0,0 +1,21 @@ +strict digraph { +"0 Identity" [id=0, type=Identity]; +"1 QuantizeLinear_Identity_Y_1" [id=1, type=QuantizeLinear]; +"2 DequantizeLinear_Identity_Y_1" [id=2, type=DequantizeLinear]; +"3 Embedding" [id=3, type=Gather]; +"4 Gather" [id=4, type=Gather]; +"5 QuantizeLinear_W_1" [id=5, type=QuantizeLinear]; +"6 DequantizeLinear_W_1" [id=6, type=DequantizeLinear]; +"7 MatMul" [id=7, type=MatMul]; +"8 nncf_model_input_0" [id=8, type=nncf_model_input]; +"9 nncf_model_output_0" [id=9, type=nncf_model_output]; +"0 Identity" -> "1 QuantizeLinear_Identity_Y_1" [label="[10, 20]", style=solid]; +"1 QuantizeLinear_Identity_Y_1" -> "2 DequantizeLinear_Identity_Y_1" [label="[10, 20]", style=dashed]; +"2 DequantizeLinear_Identity_Y_1" -> "3 Embedding" [label="[10, 20]", style=solid]; +"3 Embedding" -> "4 Gather" [label="[1, 10, 20]", style=solid]; +"4 Gather" -> "7 MatMul" [label="[10, 20]", style=solid]; +"5 QuantizeLinear_W_1" -> "6 DequantizeLinear_W_1" [label="[20, 10]", style=dashed]; +"6 DequantizeLinear_W_1" -> "7 MatMul" [label="[20, 10]", style=solid]; +"7 MatMul" -> "9 nncf_model_output_0" [label="[10, 10]", style=solid]; +"8 nncf_model_input_0" -> "3 Embedding" [label="[1, 10]", style=dashed]; +} diff --git a/tests/onnx/data/reference_graphs/quantization/synthetic/gemm_weight_transpose_model.dot b/tests/onnx/data/reference_graphs/quantization/synthetic/gemm_weight_transpose_model.dot new file mode 100644 index 00000000000..374f526f8fe --- /dev/null +++ b/tests/onnx/data/reference_graphs/quantization/synthetic/gemm_weight_transpose_model.dot @@ -0,0 +1,19 @@ +strict digraph { +"0 QuantizeLinear_X_1" [id=0, type=QuantizeLinear]; +"1 DequantizeLinear_X_1" [id=1, type=DequantizeLinear]; +"2 Identity" [id=2, type=Identity]; +"3 QuantizeLinear_W_1" [id=3, type=QuantizeLinear]; +"4 DequantizeLinear_W_1" [id=4, type=DequantizeLinear]; +"5 Gemm" [id=5, type=Gemm]; +"6 Softmax" [id=6, type=Softmax]; +"7 nncf_model_input_0" [id=7, type=nncf_model_input]; +"8 nncf_model_output_0" [id=8, type=nncf_model_output]; +"0 QuantizeLinear_X_1" -> "1 DequantizeLinear_X_1" [label="[1, 10]", style=dashed]; +"1 DequantizeLinear_X_1" -> "2 Identity" [label="[1, 10]", style=solid]; +"2 Identity" -> "5 Gemm" [label="[1, 10]", style=solid]; +"3 QuantizeLinear_W_1" -> "4 DequantizeLinear_W_1" [label="[5, 10]", style=dashed]; +"4 DequantizeLinear_W_1" -> "5 Gemm" [label="[5, 10]", style=solid]; +"5 Gemm" -> "6 Softmax" [label="[1, 5]", style=solid]; +"6 Softmax" -> "8 nncf_model_output_0" [label="[1, 5]", style=solid]; +"7 nncf_model_input_0" -> "0 QuantizeLinear_X_1" [label="[1, 10]", style=solid]; +} diff --git a/tests/onnx/data/reference_graphs/quantization/synthetic/unified_embedding_model.dot b/tests/onnx/data/reference_graphs/quantization/synthetic/unified_embedding_model.dot new file mode 100644 index 00000000000..b95108093f3 --- /dev/null +++ b/tests/onnx/data/reference_graphs/quantization/synthetic/unified_embedding_model.dot @@ -0,0 +1,33 @@ +strict digraph { +"0 QuantizeLinear_X_1" [id=0, type=QuantizeLinear]; +"1 DequantizeLinear_X_1" [id=1, type=DequantizeLinear]; +"2 Cast" [id=2, type=Cast]; +"3 QuantizeLinear_Embedding_W_1" [id=3, type=QuantizeLinear]; +"4 DequantizeLinear_Embedding_W_1" [id=4, type=DequantizeLinear]; +"5 Embedding" [id=5, type=Gather]; +"6 QuantizeLinear_W_1_1" [id=6, type=QuantizeLinear]; +"7 DequantizeLinear_W_1_1" [id=7, type=DequantizeLinear]; +"8 MatMul_1" [id=8, type=MatMul]; +"9 Reshape" [id=9, type=Reshape]; +"10 Concat" [id=10, type=Concat]; +"11 QuantizeLinear_W_2_1" [id=11, type=QuantizeLinear]; +"12 DequantizeLinear_W_2_1" [id=12, type=DequantizeLinear]; +"13 MatMul_2" [id=13, type=MatMul]; +"14 nncf_model_input_0" [id=14, type=nncf_model_input]; +"15 nncf_model_output_0" [id=15, type=nncf_model_output]; +"0 QuantizeLinear_X_1" -> "1 DequantizeLinear_X_1" [label="[1, 3]", style=dashed]; +"1 DequantizeLinear_X_1" -> "8 MatMul_1" [label="[1, 3]", style=solid]; +"2 Cast" -> "5 Embedding" [label="[1, 3]", style=dashed]; +"3 QuantizeLinear_Embedding_W_1" -> "4 DequantizeLinear_Embedding_W_1" [label="[4, 5]", style=dashed]; +"4 DequantizeLinear_Embedding_W_1" -> "5 Embedding" [label="[4, 5]", style=solid]; +"5 Embedding" -> "10 Concat" [label="[1, 3, 5]", style=solid]; +"6 QuantizeLinear_W_1_1" -> "7 DequantizeLinear_W_1_1" [label="[3, 3, 5]", style=dashed]; +"7 DequantizeLinear_W_1_1" -> "8 MatMul_1" [label="[3, 3, 5]", style=solid]; +"8 MatMul_1" -> "9 Reshape" [label="[3, 1, 5]", style=solid]; +"10 Concat" -> "13 MatMul_2" [label="[]", style=solid]; +"11 QuantizeLinear_W_2_1" -> "12 DequantizeLinear_W_2_1" [label="[1, 5]", style=dashed]; +"12 DequantizeLinear_W_2_1" -> "13 MatMul_2" [label="[1, 5]", style=solid]; +"13 MatMul_2" -> "15 nncf_model_output_0" [label="[1, 6]", style=solid]; +"14 nncf_model_input_0" -> "0 QuantizeLinear_X_1" [label="[1, 3]", style=solid]; +"14 nncf_model_input_0" -> "2 Cast" [label="[1, 3]", style=solid]; +} diff --git a/tests/onnx/data/reference_graphs/quantization/synthetic/weight_matmul_model.dot b/tests/onnx/data/reference_graphs/quantization/synthetic/weight_matmul_model.dot new file mode 100644 index 00000000000..1ecc9d7e75d --- /dev/null +++ b/tests/onnx/data/reference_graphs/quantization/synthetic/weight_matmul_model.dot @@ -0,0 +1,17 @@ +strict digraph { +"0 QuantizeLinear_X_1" [id=0, type=QuantizeLinear]; +"1 DequantizeLinear_X_1" [id=1, type=DequantizeLinear]; +"2 QuantizeLinear_W_1" [id=2, type=QuantizeLinear]; +"3 DequantizeLinear_W_1" [id=3, type=DequantizeLinear]; +"4 MatMul" [id=4, type=MatMul]; +"5 Softmax" [id=5, type=Softmax]; +"6 nncf_model_input_0" [id=6, type=nncf_model_input]; +"7 nncf_model_output_0" [id=7, type=nncf_model_output]; +"0 QuantizeLinear_X_1" -> "1 DequantizeLinear_X_1" [label="[1, 10]", style=dashed]; +"1 DequantizeLinear_X_1" -> "4 MatMul" [label="[1, 10]", style=solid]; +"2 QuantizeLinear_W_1" -> "3 DequantizeLinear_W_1" [label="[10, 5]", style=dashed]; +"3 DequantizeLinear_W_1" -> "4 MatMul" [label="[10, 5]", style=solid]; +"4 MatMul" -> "5 Softmax" [label="[1, 5]", style=solid]; +"5 Softmax" -> "7 nncf_model_output_0" [label="[1, 5]", style=solid]; +"6 nncf_model_input_0" -> "0 QuantizeLinear_X_1" [label="[1, 10]", style=solid]; +} diff --git a/tests/onnx/data/reference_graphs/quantization/synthetic/weight_propagation_conv_model.dot b/tests/onnx/data/reference_graphs/quantization/synthetic/weight_propagation_conv_model.dot new file mode 100644 index 00000000000..bcd95132227 --- /dev/null +++ b/tests/onnx/data/reference_graphs/quantization/synthetic/weight_propagation_conv_model.dot @@ -0,0 +1,49 @@ +strict digraph { +"0 reshape" [id=0, type=Reshape]; +"1 transpose" [id=1, type=Transpose]; +"2 identity1" [id=2, type=Identity]; +"3 QuantizeLinear_input_1" [id=3, type=QuantizeLinear]; +"4 DequantizeLinear_input_1" [id=4, type=DequantizeLinear]; +"5 QuantizeLinear_identity_output1_1" [id=5, type=QuantizeLinear]; +"6 DequantizeLinear_identity_output1_1" [id=6, type=DequantizeLinear]; +"7 conv1" [id=7, type=Conv]; +"8 constant" [id=8, type=Constant]; +"9 reshape2" [id=9, type=Reshape]; +"10 identity2" [id=10, type=Identity]; +"11 QuantizeLinear_conv_output1_1" [id=11, type=QuantizeLinear]; +"12 DequantizeLinear_conv_output1_1" [id=12, type=DequantizeLinear]; +"13 QuantizeLinear_identity_output2_1" [id=13, type=QuantizeLinear]; +"14 DequantizeLinear_identity_output2_1" [id=14, type=DequantizeLinear]; +"15 conv2" [id=15, type=Conv]; +"16 constant2" [id=16, type=Constant]; +"17 QuantizeLinear_conv_output2_1" [id=17, type=QuantizeLinear]; +"18 DequantizeLinear_conv_output2_1" [id=18, type=DequantizeLinear]; +"19 QuantizeLinear_constant_output2_1" [id=19, type=QuantizeLinear]; +"20 DequantizeLinear_constant_output2_1" [id=20, type=DequantizeLinear]; +"21 conv4" [id=21, type=Conv]; +"22 nncf_model_input_0" [id=22, type=nncf_model_input]; +"23 nncf_model_output_0" [id=23, type=nncf_model_output]; +"0 reshape" -> "1 transpose" [label="[1, 1, 3, 3]", style=solid]; +"1 transpose" -> "2 identity1" [label="[1, 1, 3, 3]", style=solid]; +"2 identity1" -> "5 QuantizeLinear_identity_output1_1" [label="[1, 1, 3, 3]", style=solid]; +"3 QuantizeLinear_input_1" -> "4 DequantizeLinear_input_1" [label="[1, 1, 28, 28]", style=dashed]; +"4 DequantizeLinear_input_1" -> "7 conv1" [label="[1, 1, 28, 28]", style=solid]; +"5 QuantizeLinear_identity_output1_1" -> "6 DequantizeLinear_identity_output1_1" [label="[1, 1, 3, 3]", style=dashed]; +"6 DequantizeLinear_identity_output1_1" -> "7 conv1" [label="[1, 1, 3, 3]", style=solid]; +"7 conv1" -> "11 QuantizeLinear_conv_output1_1" [label="[1, 1, 28, 28]", style=solid]; +"8 constant" -> "9 reshape2" [label="[1, 1, 3, 3]", style=solid]; +"9 reshape2" -> "10 identity2" [label="[1, 1, 3, 3]", style=solid]; +"10 identity2" -> "13 QuantizeLinear_identity_output2_1" [label="[1, 1, 3, 3]", style=solid]; +"11 QuantizeLinear_conv_output1_1" -> "12 DequantizeLinear_conv_output1_1" [label="[1, 1, 28, 28]", style=dashed]; +"12 DequantizeLinear_conv_output1_1" -> "15 conv2" [label="[1, 1, 28, 28]", style=solid]; +"13 QuantizeLinear_identity_output2_1" -> "14 DequantizeLinear_identity_output2_1" [label="[1, 1, 3, 3]", style=dashed]; +"14 DequantizeLinear_identity_output2_1" -> "15 conv2" [label="[1, 1, 3, 3]", style=solid]; +"15 conv2" -> "17 QuantizeLinear_conv_output2_1" [label="[1, 1, 28, 28]", style=solid]; +"16 constant2" -> "19 QuantizeLinear_constant_output2_1" [label="[1, 1, 3, 3]", style=solid]; +"17 QuantizeLinear_conv_output2_1" -> "18 DequantizeLinear_conv_output2_1" [label="[1, 1, 28, 28]", style=dashed]; +"18 DequantizeLinear_conv_output2_1" -> "21 conv4" [label="[1, 1, 28, 28]", style=solid]; +"19 QuantizeLinear_constant_output2_1" -> "20 DequantizeLinear_constant_output2_1" [label="[1, 1, 3, 3]", style=dashed]; +"20 DequantizeLinear_constant_output2_1" -> "21 conv4" [label="[1, 1, 3, 3]", style=solid]; +"21 conv4" -> "23 nncf_model_output_0" [label="[1, 1, 28, 28]", style=solid]; +"22 nncf_model_input_0" -> "3 QuantizeLinear_input_1" [label="[1, 1, 28, 28]", style=solid]; +} diff --git a/tests/onnx/data/reference_graphs/quantization/synthetic/weight_propagation_matmul_model.dot b/tests/onnx/data/reference_graphs/quantization/synthetic/weight_propagation_matmul_model.dot new file mode 100644 index 00000000000..072444df366 --- /dev/null +++ b/tests/onnx/data/reference_graphs/quantization/synthetic/weight_propagation_matmul_model.dot @@ -0,0 +1,31 @@ +strict digraph { +"0 Identity_1" [id=0, type=Identity]; +"1 Identity_2" [id=1, type=Identity]; +"2 QuantizeLinear_X_1" [id=2, type=QuantizeLinear]; +"3 DequantizeLinear_X_1" [id=3, type=DequantizeLinear]; +"4 QuantizeLinear_i_2_1" [id=4, type=QuantizeLinear]; +"5 DequantizeLinear_i_2_1" [id=5, type=DequantizeLinear]; +"6 MatMul_1" [id=6, type=MatMul]; +"7 constant" [id=7, type=Constant]; +"8 QuantizeLinear_mm_1_1" [id=8, type=QuantizeLinear]; +"9 DequantizeLinear_mm_1_1" [id=9, type=DequantizeLinear]; +"10 QuantizeLinear_const_1" [id=10, type=QuantizeLinear]; +"11 DequantizeLinear_const_1" [id=11, type=DequantizeLinear]; +"12 MatMul_2" [id=12, type=MatMul]; +"13 nncf_model_input_0" [id=13, type=nncf_model_input]; +"14 nncf_model_output_0" [id=14, type=nncf_model_output]; +"0 Identity_1" -> "1 Identity_2" [label="[10, 5]", style=solid]; +"1 Identity_2" -> "4 QuantizeLinear_i_2_1" [label="[10, 5]", style=solid]; +"2 QuantizeLinear_X_1" -> "3 DequantizeLinear_X_1" [label="[1, 10]", style=dashed]; +"3 DequantizeLinear_X_1" -> "6 MatMul_1" [label="[1, 10]", style=solid]; +"4 QuantizeLinear_i_2_1" -> "5 DequantizeLinear_i_2_1" [label="[10, 5]", style=dashed]; +"5 DequantizeLinear_i_2_1" -> "6 MatMul_1" [label="[10, 5]", style=solid]; +"6 MatMul_1" -> "8 QuantizeLinear_mm_1_1" [label="[1, 5]", style=solid]; +"7 constant" -> "10 QuantizeLinear_const_1" [label="[5, 10]", style=solid]; +"8 QuantizeLinear_mm_1_1" -> "9 DequantizeLinear_mm_1_1" [label="[1, 5]", style=dashed]; +"9 DequantizeLinear_mm_1_1" -> "12 MatMul_2" [label="[1, 5]", style=solid]; +"10 QuantizeLinear_const_1" -> "11 DequantizeLinear_const_1" [label="[5, 10]", style=dashed]; +"11 DequantizeLinear_const_1" -> "12 MatMul_2" [label="[5, 10]", style=solid]; +"12 MatMul_2" -> "14 nncf_model_output_0" [label="[1, 10]", style=solid]; +"13 nncf_model_input_0" -> "2 QuantizeLinear_X_1" [label="[1, 10]", style=solid]; +} diff --git a/tests/onnx/data/reference_graphs/quantization/synthetic/weight_sharing_model.dot b/tests/onnx/data/reference_graphs/quantization/synthetic/weight_sharing_model.dot index ce748f2c568..1b98e42e486 100644 --- a/tests/onnx/data/reference_graphs/quantization/synthetic/weight_sharing_model.dot +++ b/tests/onnx/data/reference_graphs/quantization/synthetic/weight_sharing_model.dot @@ -17,15 +17,15 @@ strict digraph { "1 QuantizeLinear_relu_X_1" -> "2 DequantizeLinear_relu_X_1" [label="[1, 1, 5, 5]", style=dashed]; "2 DequantizeLinear_relu_X_1" -> "5 Conv1" [label="[1, 1, 5, 5]", style=solid]; "2 DequantizeLinear_relu_X_1" -> "6 Conv2" [label="[1, 1, 5, 5]", style=solid]; -"3 QuantizeLinear_W_1" -> "4 DequantizeLinear_W_1" [label="[1, 1, 3, 3]", style=dashed]; -"4 DequantizeLinear_W_1" -> "5 Conv1" [label="[1, 1, 3, 3]", style=solid]; -"4 DequantizeLinear_W_1" -> "6 Conv2" [label="[1, 1, 3, 3]", style=solid]; -"5 Conv1" -> "7 QuantizeLinear_conv_1_1" [label="[1, 1, 5, 5]", style=solid]; -"6 Conv2" -> "9 QuantizeLinear_conv_2_1" [label="[1, 1, 5, 5]", style=solid]; -"7 QuantizeLinear_conv_1_1" -> "8 DequantizeLinear_conv_1_1" [label="[1, 1, 5, 5]", style=dashed]; -"8 DequantizeLinear_conv_1_1" -> "11 Add" [label="[1, 1, 5, 5]", style=solid]; -"9 QuantizeLinear_conv_2_1" -> "10 DequantizeLinear_conv_2_1" [label="[1, 1, 5, 5]", style=dashed]; -"10 DequantizeLinear_conv_2_1" -> "11 Add" [label="[1, 1, 5, 5]", style=solid]; -"11 Add" -> "13 nncf_model_output_0" [label="[1, 1, 5, 5]", style=solid]; +"3 QuantizeLinear_W_1" -> "4 DequantizeLinear_W_1" [label="[5, 1, 3, 3]", style=dashed]; +"4 DequantizeLinear_W_1" -> "5 Conv1" [label="[5, 1, 3, 3]", style=solid]; +"4 DequantizeLinear_W_1" -> "6 Conv2" [label="[5, 1, 3, 3]", style=solid]; +"5 Conv1" -> "7 QuantizeLinear_conv_1_1" [label="[1, 5, 5, 5]", style=solid]; +"6 Conv2" -> "9 QuantizeLinear_conv_2_1" [label="[1, 5, 5, 5]", style=solid]; +"7 QuantizeLinear_conv_1_1" -> "8 DequantizeLinear_conv_1_1" [label="[1, 5, 5, 5]", style=dashed]; +"8 DequantizeLinear_conv_1_1" -> "11 Add" [label="[1, 5, 5, 5]", style=solid]; +"9 QuantizeLinear_conv_2_1" -> "10 DequantizeLinear_conv_2_1" [label="[1, 5, 5, 5]", style=dashed]; +"10 DequantizeLinear_conv_2_1" -> "11 Add" [label="[1, 5, 5, 5]", style=solid]; +"11 Add" -> "13 nncf_model_output_0" [label="[1, 5, 5, 5]", style=solid]; "12 nncf_model_input_0" -> "0 Relu" [label="[1, 1, 5, 5]", style=solid]; } diff --git a/tests/onnx/data/reference_scales/activation_matmul_model_mixed.json b/tests/onnx/data/reference_scales/activation_matmul_model_mixed.json new file mode 100644 index 00000000000..7080679f2b8 --- /dev/null +++ b/tests/onnx/data/reference_scales/activation_matmul_model_mixed.json @@ -0,0 +1,10 @@ +{ + "QuantizeLinear_X_1": { + "scale": 0.006937139667570591, + "zero_point": -11 + }, + "QuantizeLinear_Y_1": { + "scale": 0.006937139667570591, + "zero_point": -11 + } +} \ No newline at end of file diff --git a/tests/onnx/data/reference_scales/activation_matmul_model_performance.json b/tests/onnx/data/reference_scales/activation_matmul_model_performance.json new file mode 100644 index 00000000000..0b82b1366b8 --- /dev/null +++ b/tests/onnx/data/reference_scales/activation_matmul_model_performance.json @@ -0,0 +1,10 @@ +{ + "QuantizeLinear_X_1": { + "scale": 0.00749011617153883, + "zero_point": 0 + }, + "QuantizeLinear_Y_1": { + "scale": 0.00749011617153883, + "zero_point": 0 + } +} \ No newline at end of file diff --git a/tests/onnx/data/reference_scales/embedding_model_mixed.json b/tests/onnx/data/reference_scales/embedding_model_mixed.json new file mode 100644 index 00000000000..42b9ba63b2e --- /dev/null +++ b/tests/onnx/data/reference_scales/embedding_model_mixed.json @@ -0,0 +1,58 @@ +{ + "QuantizeLinear_Identity_Y_1": { + "scale": [ + 0.0073627750389277935, + 0.007852046750485897, + 0.0070100342854857445, + 0.007835405878722668, + 0.007725945208221674, + 0.007330845110118389, + 0.007606788072735071, + 0.007431507110595703, + 0.007833994925022125, + 0.007731832563877106 + ], + "zero_point": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ] + }, + "QuantizeLinear_Embedding_Y_1": { + "scale": 0.003666950622573495, + "zero_point": 0 + }, + "QuantizeLinear_W_1": { + "scale": [ + 0.00743053387850523, + 0.007582827936857939, + 0.00782698206603527, + 0.0071887727826833725, + 0.006841372232884169, + 0.0077309319749474525, + 0.007578134536743164, + 0.007352511398494244, + 0.007850474677979946, + 0.007811206392943859 + ], + "zero_point": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ] + } +} \ No newline at end of file diff --git a/tests/onnx/data/reference_scales/embedding_model_performance.json b/tests/onnx/data/reference_scales/embedding_model_performance.json new file mode 100644 index 00000000000..d8b07fb14c6 --- /dev/null +++ b/tests/onnx/data/reference_scales/embedding_model_performance.json @@ -0,0 +1,32 @@ +{ + "QuantizeLinear_Identity_Y_1": { + "scale": 0.007852046750485897, + "zero_point": 0 + }, + "QuantizeLinear_W_1": { + "scale": [ + 0.00743053387850523, + 0.007582827936857939, + 0.00782698206603527, + 0.0071887727826833725, + 0.006841372232884169, + 0.0077309319749474525, + 0.007578134536743164, + 0.007352511398494244, + 0.007850474677979946, + 0.007811206392943859 + ], + "zero_point": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ] + } +} \ No newline at end of file diff --git a/tests/onnx/data/reference_scales/gemm_weight_transpose_model_mixed.json b/tests/onnx/data/reference_scales/gemm_weight_transpose_model_mixed.json new file mode 100644 index 00000000000..13a7d07f893 --- /dev/null +++ b/tests/onnx/data/reference_scales/gemm_weight_transpose_model_mixed.json @@ -0,0 +1,22 @@ +{ + "QuantizeLinear_X_1": { + "scale": 0.006937139667570591, + "zero_point": -11 + }, + "QuantizeLinear_W_1": { + "scale": [ + 0.0073627750389277935, + 0.006796684116125107, + 0.007852046750485897, + 0.0073546734638512135, + 0.0070100342854857445 + ], + "zero_point": [ + 0, + 0, + 0, + 0, + 0 + ] + } +} \ No newline at end of file diff --git a/tests/onnx/data/reference_scales/gemm_weight_transpose_model_performance.json b/tests/onnx/data/reference_scales/gemm_weight_transpose_model_performance.json new file mode 100644 index 00000000000..ca5a73b9d04 --- /dev/null +++ b/tests/onnx/data/reference_scales/gemm_weight_transpose_model_performance.json @@ -0,0 +1,22 @@ +{ + "QuantizeLinear_X_1": { + "scale": 0.00749011617153883, + "zero_point": 0 + }, + "QuantizeLinear_W_1": { + "scale": [ + 0.0073627750389277935, + 0.006796684116125107, + 0.007852046750485897, + 0.0073546734638512135, + 0.0070100342854857445 + ], + "zero_point": [ + 0, + 0, + 0, + 0, + 0 + ] + } +} \ No newline at end of file diff --git a/tests/onnx/data/reference_scales/linear_model_mixed.json b/tests/onnx/data/reference_scales/linear_model_mixed.json new file mode 100644 index 00000000000..2cbfd2a51d8 --- /dev/null +++ b/tests/onnx/data/reference_scales/linear_model_mixed.json @@ -0,0 +1,110 @@ +{ + "QuantizeLinear_X_1": { + "scale": 0.00786584708839655, + "zero_point": -1 + }, + "QuantizeLinear_Conv1_W_1": { + "scale": [ + 0.015364131890237331, + 0.015236373990774155, + 0.013439418748021126, + 0.015262619592249393, + 0.014568820595741272, + 0.015095421113073826, + 0.015627961605787277, + 0.015304353088140488, + 0.015216249972581863, + 0.014503472484648228, + 0.015426309779286385, + 0.014942644163966179, + 0.015100476332008839, + 0.014742164872586727, + 0.015701333060860634, + 0.015733933076262474, + 0.015436295419931412, + 0.014898023568093777, + 0.014504837803542614, + 0.015676604583859444, + 0.015595616772770882, + 0.01460567582398653, + 0.015505166724324226, + 0.015520317479968071, + 0.015728242695331573, + 0.015105657279491425, + 0.015664594247937202, + 0.013586843386292458, + 0.014651726931333542, + 0.015618495643138885, + 0.015639128163456917, + 0.015486882999539375 + ], + "zero_point": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ] + }, + "QuantizeLinear_ReLU1_Y_1": { + "scale": 0.10068393498659134, + "zero_point": 0 + }, + "QuantizeLinear_Avg_Pool1_Y_1": { + "scale": 0.016155080869793892, + "zero_point": 0 + }, + "QuantizeLinear_Conv2_W_1": { + "scale": [ + 0.007460909895598888, + 0.007702397182583809, + 0.007863067090511322, + 0.007869463413953781, + 0.00677796034142375, + 0.007798334117978811, + 0.00736647192388773, + 0.007847852073609829, + 0.007445038296282291, + 0.007601700257509947 + ], + "zero_point": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ] + } +} \ No newline at end of file diff --git a/tests/onnx/data/reference_scales/linear_model_overflow_fix_disable.json b/tests/onnx/data/reference_scales/linear_model_overflow_fix_disable.json index bbaa738c8f8..5f9e2421c76 100644 --- a/tests/onnx/data/reference_scales/linear_model_overflow_fix_disable.json +++ b/tests/onnx/data/reference_scales/linear_model_overflow_fix_disable.json @@ -1,6 +1,6 @@ { "QuantizeLinear_X_1": { - "scale": 0.003920648247003555, + "scale": 0.007870321162045002, "zero_point": 0 }, "QuantizeLinear_Conv1_W_1": { @@ -74,11 +74,11 @@ ] }, "QuantizeLinear_ReLU1_Y_1": { - "scale": 0.19180020689964294, + "scale": 0.10068393498659134, "zero_point": 0 }, "QuantizeLinear_Avg_Pool1_Y_1": { - "scale": 0.13938520848751068, + "scale": 0.016155080869793892, "zero_point": 0 }, "QuantizeLinear_Conv2_W_1": { diff --git a/tests/onnx/data/reference_scales/linear_model_overflow_fix_enable.json b/tests/onnx/data/reference_scales/linear_model_overflow_fix_enable.json index 18f22c40542..76e5b32c8b3 100644 --- a/tests/onnx/data/reference_scales/linear_model_overflow_fix_enable.json +++ b/tests/onnx/data/reference_scales/linear_model_overflow_fix_enable.json @@ -1,6 +1,6 @@ { "QuantizeLinear_X_1": { - "scale": 0.003920648247003555, + "scale": 0.007870321162045002, "zero_point": 0 }, "QuantizeLinear_Conv1_W_1": { @@ -74,11 +74,11 @@ ] }, "QuantizeLinear_ReLU1_Y_1": { - "scale": 0.19180020689964294, + "scale": 0.10068393498659134, "zero_point": 0 }, "QuantizeLinear_Avg_Pool1_Y_1": { - "scale": 0.13938520848751068, + "scale": 0.016155080869793892, "zero_point": 0 }, "QuantizeLinear_Conv2_W_1": { diff --git a/tests/onnx/data/reference_scales/linear_model_overflow_fix_first_layer_only.json b/tests/onnx/data/reference_scales/linear_model_overflow_fix_first_layer_only.json index ed3e38623d1..9ec64dea4ac 100644 --- a/tests/onnx/data/reference_scales/linear_model_overflow_fix_first_layer_only.json +++ b/tests/onnx/data/reference_scales/linear_model_overflow_fix_first_layer_only.json @@ -1,6 +1,6 @@ { "QuantizeLinear_X_1": { - "scale": 0.003920648247003555, + "scale": 0.007870321162045002, "zero_point": 0 }, "QuantizeLinear_Conv1_W_1": { @@ -74,11 +74,11 @@ ] }, "QuantizeLinear_ReLU1_Y_1": { - "scale": 0.19180020689964294, + "scale": 0.10068393498659134, "zero_point": 0 }, "QuantizeLinear_Avg_Pool1_Y_1": { - "scale": 0.13938520848751068, + "scale": 0.016155080869793892, "zero_point": 0 }, "QuantizeLinear_Conv2_W_1": { diff --git a/tests/onnx/data/reference_scales/linear_model_performance.json b/tests/onnx/data/reference_scales/linear_model_performance.json new file mode 100644 index 00000000000..9ec64dea4ac --- /dev/null +++ b/tests/onnx/data/reference_scales/linear_model_performance.json @@ -0,0 +1,110 @@ +{ + "QuantizeLinear_X_1": { + "scale": 0.007870321162045002, + "zero_point": 0 + }, + "QuantizeLinear_Conv1_W_1": { + "scale": [ + 0.015364131890237331, + 0.015236373990774155, + 0.013439418748021126, + 0.015262619592249393, + 0.014568820595741272, + 0.015095421113073826, + 0.015627961605787277, + 0.015304353088140488, + 0.015216249972581863, + 0.014503472484648228, + 0.015426309779286385, + 0.014942644163966179, + 0.015100476332008839, + 0.014742164872586727, + 0.015701333060860634, + 0.015733933076262474, + 0.015436295419931412, + 0.014898023568093777, + 0.014504837803542614, + 0.015676604583859444, + 0.015595616772770882, + 0.01460567582398653, + 0.015505166724324226, + 0.015520317479968071, + 0.015728242695331573, + 0.015105657279491425, + 0.015664594247937202, + 0.013586843386292458, + 0.014651726931333542, + 0.015618495643138885, + 0.015639128163456917, + 0.015486882999539375 + ], + "zero_point": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ] + }, + "QuantizeLinear_ReLU1_Y_1": { + "scale": 0.10068393498659134, + "zero_point": 0 + }, + "QuantizeLinear_Avg_Pool1_Y_1": { + "scale": 0.016155080869793892, + "zero_point": 0 + }, + "QuantizeLinear_Conv2_W_1": { + "scale": [ + 0.007460909895598888, + 0.007702397182583809, + 0.007863067090511322, + 0.007869463413953781, + 0.00677796034142375, + 0.007798334117978811, + 0.00736647192388773, + 0.007847852073609829, + 0.007445038296282291, + 0.007601700257509947 + ], + "zero_point": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ] + } +} \ No newline at end of file diff --git a/tests/onnx/data/reference_scales/one_depthwise_convolutional_model_mixed.json b/tests/onnx/data/reference_scales/one_depthwise_convolutional_model_mixed.json new file mode 100644 index 00000000000..a4a2295cf2e --- /dev/null +++ b/tests/onnx/data/reference_scales/one_depthwise_convolutional_model_mixed.json @@ -0,0 +1,74 @@ +{ + "QuantizeLinear_X_1": { + "scale": [ + 0.007609957829117775, + 0.007633729372173548, + 0.007594745140522718 + ], + "zero_point": [ + 2, + -2, + 0 + ] + }, + "QuantizeLinear_Conv1_W_1": { + "scale": [ + 0.0060941423289477825, + 0.003455735743045807, + 0.006760613527148962, + 0.00549108674749732, + 0.0007415539585053921, + 0.007682065945118666, + 0.0059932260774075985, + 0.006189482752233744, + 0.0010087686823680997, + 0.003546345978975296, + 0.002919669495895505, + 0.007297361735254526, + 0.005069803912192583, + 0.006478437688201666, + 0.0034914505667984486, + 0.0017892812611535192, + 0.004366809502243996, + 0.000502498063724488, + 0.0065167807042598724, + 0.004973735194653273, + 0.005969194695353508, + 0.002791543025523424, + 0.007643291261047125, + 0.007032449822872877, + 0.006129004061222076, + 0.0015325882704928517, + 0.0036749683786183596 + ], + "zero_point": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ] + } +} \ No newline at end of file diff --git a/tests/onnx/data/reference_scales/one_depthwise_convolutional_model_performance.json b/tests/onnx/data/reference_scales/one_depthwise_convolutional_model_performance.json new file mode 100644 index 00000000000..45c608c0727 --- /dev/null +++ b/tests/onnx/data/reference_scales/one_depthwise_convolutional_model_performance.json @@ -0,0 +1,74 @@ +{ + "QuantizeLinear_X_1": { + "scale": [ + 0.007758073974400759, + 0.007753945887088776, + 0.00765454676002264 + ], + "zero_point": [ + 0, + 0, + 0 + ] + }, + "QuantizeLinear_Conv1_W_1": { + "scale": [ + 0.0060941423289477825, + 0.003455735743045807, + 0.006760613527148962, + 0.00549108674749732, + 0.0007415539585053921, + 0.007682065945118666, + 0.0059932260774075985, + 0.006189482752233744, + 0.0010087686823680997, + 0.003546345978975296, + 0.002919669495895505, + 0.007297361735254526, + 0.005069803912192583, + 0.006478437688201666, + 0.0034914505667984486, + 0.0017892812611535192, + 0.004366809502243996, + 0.000502498063724488, + 0.0065167807042598724, + 0.004973735194653273, + 0.005969194695353508, + 0.002791543025523424, + 0.007643291261047125, + 0.007032449822872877, + 0.006129004061222076, + 0.0015325882704928517, + 0.0036749683786183596 + ], + "zero_point": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ] + } +} \ No newline at end of file diff --git a/tests/onnx/data/reference_scales/reshape_weight_model_mixed.json b/tests/onnx/data/reference_scales/reshape_weight_model_mixed.json new file mode 100644 index 00000000000..8e0506bb0a2 --- /dev/null +++ b/tests/onnx/data/reference_scales/reshape_weight_model_mixed.json @@ -0,0 +1,22 @@ +{ + "QuantizeLinear_added_x_1": { + "scale": 0.009163882583379745, + "zero_point": -62 + }, + "QuantizeLinear_reshaped_w_1": { + "scale": [ + 0.007682065945118666, + 0.007618186995387077, + 0.007643291261047125, + 0.007032449822872877, + 0.006553226616233587 + ], + "zero_point": [ + 0, + 0, + 0, + 0, + 0 + ] + } +} \ No newline at end of file diff --git a/tests/onnx/data/reference_scales/reshape_weight_model_performance.json b/tests/onnx/data/reference_scales/reshape_weight_model_performance.json new file mode 100644 index 00000000000..71abe34a4b7 --- /dev/null +++ b/tests/onnx/data/reference_scales/reshape_weight_model_performance.json @@ -0,0 +1,22 @@ +{ + "QuantizeLinear_added_x_1": { + "scale": 0.013637588359415531, + "zero_point": 0 + }, + "QuantizeLinear_reshaped_w_1": { + "scale": [ + 0.007682065945118666, + 0.007618186995387077, + 0.007643291261047125, + 0.007032449822872877, + 0.006553226616233587 + ], + "zero_point": [ + 0, + 0, + 0, + 0, + 0 + ] + } +} \ No newline at end of file diff --git a/tests/onnx/data/reference_scales/weight_matmul_model_mixed.json b/tests/onnx/data/reference_scales/weight_matmul_model_mixed.json new file mode 100644 index 00000000000..15d398025c7 --- /dev/null +++ b/tests/onnx/data/reference_scales/weight_matmul_model_mixed.json @@ -0,0 +1,22 @@ +{ + "QuantizeLinear_X_1": { + "scale": 0.006937139667570591, + "zero_point": -11 + }, + "QuantizeLinear_W_1": { + "scale": [ + 0.007187051698565483, + 0.007852046750485897, + 0.007723112590610981, + 0.0073546734638512135, + 0.0073627750389277935 + ], + "zero_point": [ + 0, + 0, + 0, + 0, + 0 + ] + } +} \ No newline at end of file diff --git a/tests/onnx/data/reference_scales/weight_matmul_model_performance.json b/tests/onnx/data/reference_scales/weight_matmul_model_performance.json new file mode 100644 index 00000000000..b7238fb8ae5 --- /dev/null +++ b/tests/onnx/data/reference_scales/weight_matmul_model_performance.json @@ -0,0 +1,22 @@ +{ + "QuantizeLinear_X_1": { + "scale": 0.00749011617153883, + "zero_point": 0 + }, + "QuantizeLinear_W_1": { + "scale": [ + 0.007187051698565483, + 0.007852046750485897, + 0.007723112590610981, + 0.0073546734638512135, + 0.0073627750389277935 + ], + "zero_point": [ + 0, + 0, + 0, + 0, + 0 + ] + } +} \ No newline at end of file diff --git a/tests/onnx/data/reference_scales/weight_sharing_model_mixed.json b/tests/onnx/data/reference_scales/weight_sharing_model_mixed.json new file mode 100644 index 00000000000..f42274c3ae4 --- /dev/null +++ b/tests/onnx/data/reference_scales/weight_sharing_model_mixed.json @@ -0,0 +1,30 @@ +{ + "QuantizeLinear_relu_X_1": { + "scale": 0.0037303713615983725, + "zero_point": 0 + }, + "QuantizeLinear_W_1": { + "scale": [ + 0.015364131890237331, + 0.014594723470509052, + 0.01528658252209425, + 0.015236373990774155, + 0.013113042339682579 + ], + "zero_point": [ + 0, + 0, + 0, + 0, + 0 + ] + }, + "QuantizeLinear_conv_1_1": { + "scale": 0.011836746707558632, + "zero_point": 0 + }, + "QuantizeLinear_conv_2_1": { + "scale": 0.011836746707558632, + "zero_point": 0 + } +} \ No newline at end of file diff --git a/tests/onnx/data/reference_scales/weight_sharing_model_performance.json b/tests/onnx/data/reference_scales/weight_sharing_model_performance.json new file mode 100644 index 00000000000..f42274c3ae4 --- /dev/null +++ b/tests/onnx/data/reference_scales/weight_sharing_model_performance.json @@ -0,0 +1,30 @@ +{ + "QuantizeLinear_relu_X_1": { + "scale": 0.0037303713615983725, + "zero_point": 0 + }, + "QuantizeLinear_W_1": { + "scale": [ + 0.015364131890237331, + 0.014594723470509052, + 0.01528658252209425, + 0.015236373990774155, + 0.013113042339682579 + ], + "zero_point": [ + 0, + 0, + 0, + 0, + 0 + ] + }, + "QuantizeLinear_conv_1_1": { + "scale": 0.011836746707558632, + "zero_point": 0 + }, + "QuantizeLinear_conv_2_1": { + "scale": 0.011836746707558632, + "zero_point": 0 + } +} \ No newline at end of file diff --git a/tests/onnx/models.py b/tests/onnx/models.py index e36a7128c4d..671a4ad8d31 100644 --- a/tests/onnx/models.py +++ b/tests/onnx/models.py @@ -419,7 +419,7 @@ def __init__(self): model_output_name = "Y" model_output_channels = 5 Y = onnx.helper.make_tensor_value_info(model_output_name, onnx.TensorProto.FLOAT, [1, model_output_channels]) - rng = np.random.default_rng(seed=0) + rng = get_random_generator() shape = [1, 1, model_input_channels, model_output_channels] w_tensor = create_initializer_tensor( name="W", tensor_array=rng.uniform(0, 1, shape).astype(np.float32), data_type=onnx.TensorProto.FLOAT @@ -492,18 +492,19 @@ class WeightSharingModel(ONNXReferenceModel): # | # Y def __init__(self): - input_shape = output_shape = [1, 1, 5, 5] - + input_shape = [1, 1, 5, 5] + output_shape = [1, 5, 5, 5] + W_shape = [5, 1, 3, 3] model_input_name = "X" X = onnx.helper.make_tensor_value_info(model_input_name, onnx.TensorProto.FLOAT, input_shape) model_output_name = "Y" Y = onnx.helper.make_tensor_value_info(model_output_name, onnx.TensorProto.FLOAT, output_shape) - W = np.array( - [[[[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]]]] # (1, 1, 3, 3) tensor for convolution weights - ).astype(np.float32) + rng = get_random_generator() - w_tensor = create_initializer_tensor(name="W", tensor_array=W, data_type=onnx.TensorProto.FLOAT) + w_tensor = create_initializer_tensor( + name="W", tensor_array=rng.uniform(0, 1, W_shape), data_type=onnx.TensorProto.FLOAT + ) relu_x_node = onnx.helper.make_node( name="Relu", @@ -1184,3 +1185,562 @@ def __init__(self): model = onnx.helper.make_model(graph_def, opset_imports=[op]) onnx.checker.check_model(model) super().__init__(model, [input_shape], "non_shape_model.dot") + + +@ALL_SYNTHETIC_MODELS.register() +class MatMulWeightModel(ONNXReferenceModel): + # X W + # \ / + # MatMul + # | + # softmax + def __init__(self): + model_input_name, model_output_name = "X", "Y" + model_input_channels, model_output_channels = 10, 5 + input_shape = [1, model_input_channels] + + X = onnx.helper.make_tensor_value_info(model_input_name, onnx.TensorProto.FLOAT, input_shape) + Y = onnx.helper.make_tensor_value_info(model_output_name, onnx.TensorProto.FLOAT, [1, model_output_channels]) + + rng = np.random.default_rng(seed=0) + shape = [model_input_channels, model_output_channels] + w_tensor = create_initializer_tensor( + name="W", tensor_array=rng.uniform(0, 1, shape).astype(np.float32), data_type=onnx.TensorProto.FLOAT + ) + + matmul_node = onnx.helper.make_node( + name="MatMul", op_type="MatMul", inputs=[model_input_name, "W"], outputs=["logit"] + ) + + softmax_node = onnx.helper.make_node( + name="Softmax", + op_type="Softmax", + inputs=["logit"], + outputs=["Y"], + ) + + graph_def = onnx.helper.make_graph( + nodes=[matmul_node, softmax_node], + name="Net", + inputs=[X], + outputs=[Y], + initializer=[w_tensor], + ) + + op = onnx.OperatorSetIdProto() + op.version = OPSET_VERSION + model = onnx.helper.make_model(graph_def, opset_imports=[op]) + onnx.checker.check_model(model) + super().__init__(model, [input_shape], "weight_matmul_model.dot") + + +@ALL_SYNTHETIC_MODELS.register() +class MatMulActivationModel(ONNXReferenceModel): + # X Y + # | | + # \ / + # MatMul + # | + # softmax + def __init__(self): + model_input_name_1, model_input_name_2, model_output_name = "X", "Y", "Z" + channels = 10 + x_input_shape = [channels, 1] + y_input_shape = [1, channels] + + X = onnx.helper.make_tensor_value_info(model_input_name_1, onnx.TensorProto.FLOAT, x_input_shape) + Y = onnx.helper.make_tensor_value_info(model_input_name_2, onnx.TensorProto.FLOAT, y_input_shape) + Z = onnx.helper.make_tensor_value_info(model_output_name, onnx.TensorProto.FLOAT, [channels, channels]) + + matmul_node = onnx.helper.make_node(name="MatMul", op_type="MatMul", inputs=["X", "Y"], outputs=["logit"]) + + softmax_node = onnx.helper.make_node( + name="Softmax", + op_type="Softmax", + inputs=["logit"], + outputs=["Z"], + ) + + graph_def = onnx.helper.make_graph(nodes=[matmul_node, softmax_node], name="Net", inputs=[X, Y], outputs=[Z]) + + op = onnx.OperatorSetIdProto() + op.version = OPSET_VERSION + model = onnx.helper.make_model(graph_def, opset_imports=[op]) + onnx.checker.check_model(model) + super().__init__(model, [x_input_shape, y_input_shape], "activation_matmul_model.dot") + + +@ALL_SYNTHETIC_MODELS.register() +class GEMMTransposeWeightModel(ONNXReferenceModel): + # X W(Transposed) + # | | + # Identity | + # \ / + # Gemm + # | + # softmax + def __init__(self): + model_input_name, model_output_name = "X", "Y" + model_input_channels, model_output_channels = 10, 5 + input_shape = [1, model_input_channels] + + X = onnx.helper.make_tensor_value_info(model_input_name, onnx.TensorProto.FLOAT, input_shape) + Y = onnx.helper.make_tensor_value_info(model_output_name, onnx.TensorProto.FLOAT, [1, model_output_channels]) + + rng = np.random.default_rng(seed=0) + shape = [model_output_channels, model_input_channels] + w_tensor = create_initializer_tensor( + name="W", tensor_array=rng.uniform(0, 1, shape).astype(np.float32), data_type=onnx.TensorProto.FLOAT + ) + + identity_node = onnx.helper.make_node( + name="Identity", op_type="Identity", inputs=[model_input_name], outputs=["identity"] + ) + + gemm_node = onnx.helper.make_node( + name="Gemm", op_type="Gemm", inputs=["identity", "W"], outputs=["logit"], transB=1 + ) + + softmax_node = onnx.helper.make_node( + name="Softmax", + op_type="Softmax", + inputs=["logit"], + outputs=["Y"], + ) + + graph_def = onnx.helper.make_graph( + nodes=[identity_node, gemm_node, softmax_node], + name="Net", + inputs=[X], + outputs=[Y], + initializer=[w_tensor], + ) + + op = onnx.OperatorSetIdProto() + op.version = OPSET_VERSION + model = onnx.helper.make_model(graph_def, opset_imports=[op]) + onnx.checker.check_model(model) + super().__init__(model, [input_shape], "gemm_weight_transpose_model.dot") + + +@ALL_SYNTHETIC_MODELS.register() +class WeightPropagationMatMulModel(ONNXReferenceModel): + # Identity + # | + # X Identity + # \ / + # MatMul + # | Constant + # | / + # MatMul + # | + # Y + def __init__(self): + model_input_name, model_output_name = "X", "Y" + model_input_channels = 10 + matmul_output_channels = 5 + input_shape = [1, model_input_channels] + X = onnx.helper.make_tensor_value_info(model_input_name, onnx.TensorProto.FLOAT, input_shape) + Y = onnx.helper.make_tensor_value_info(model_output_name, onnx.TensorProto.FLOAT, [1, model_input_channels]) + + rng = np.random.default_rng(seed=0) + shape = [model_input_channels, matmul_output_channels] + + # Create MatMul + w_tensor = create_initializer_tensor( + name="W_tensor", tensor_array=rng.uniform(0, 1, shape).astype(np.float32), data_type=onnx.TensorProto.FLOAT + ) + identity_1 = onnx.helper.make_node(name="Identity_1", op_type="Identity", inputs=["W_tensor"], outputs=["i_1"]) + identity_2 = onnx.helper.make_node(name="Identity_2", op_type="Identity", inputs=["i_1"], outputs=["i_2"]) + matmul_1 = onnx.helper.make_node( + name="MatMul_1", op_type="MatMul", inputs=[model_input_name, "i_2"], outputs=["mm_1"] + ) + matmul_2_shape = (matmul_output_channels, model_input_channels) + constant_data = rng.uniform(0, 1, matmul_2_shape).astype(np.float32) # Randomly initialized weight tensor + constant_initializer = onnx.helper.make_tensor( + name="constant_data", + data_type=onnx.TensorProto.FLOAT, + dims=constant_data.shape, + vals=constant_data.flatten(), + ) + constant = onnx.helper.make_node("Constant", [], ["const"], name="constant", value=constant_initializer) + matmul_2 = onnx.helper.make_node( + name="MatMul_2", op_type="MatMul", inputs=["mm_1", "const"], outputs=[model_output_name] + ) + + graph_def = onnx.helper.make_graph( + nodes=[identity_1, identity_2, matmul_1, constant, matmul_2], + name="Net", + inputs=[X], + outputs=[Y], + initializer=[w_tensor, constant_initializer], + ) + + op = onnx.OperatorSetIdProto() + op.version = OPSET_VERSION + model = onnx.helper.make_model(graph_def, opset_imports=[op]) + onnx.checker.check_model(model) + super().__init__(model, [input_shape], "weight_propagation_matmul_model.dot") + + +@ALL_SYNTHETIC_MODELS.register() +class WeightPropagationConvModel(ONNXReferenceModel): + # X Reshape + # | | + # | Transpose + # | | + # \ Identiy + # \ / + # \ / + # Conv + # | + # | Constant + # | / + # | Reshape + # | / + # | Identity + # | / + # Conv + # | + # | Constant + # | / + # Conv + # | + # | + # | + def __init__(self): + input_shape = (1, 1, 28, 28) # Example shape, change as required + Y = onnx.helper.make_tensor_value_info("output", onnx.TensorProto.FLOAT, [1, 1, 28, 28]) + input_tensor = onnx.helper.make_tensor_value_info("input", onnx.TensorProto.FLOAT, input_shape) + rng = get_random_generator() + + # Layer 1: Convolution 1 + conv_output1 = "conv_output1" + conv1_shape = (1, 1, 3, 3) + conv1_weight = rng.uniform(0, 1, conv1_shape).astype(np.float32) # Randomly initialized weight tensor + + conv1_weight_initializer = onnx.helper.make_tensor( + name="conv1_weight", + data_type=onnx.TensorProto.FLOAT, + dims=conv1_weight.shape, + vals=conv1_weight.flatten(), + ) + + # Layer 1: Identity -> Transpose -> Reshape + identity_output1 = "identity_output1" + transpose_output = "transpose_output" + reshape_output = "reshape_output" + reshape_1_tensor_name = "w_r_1" + reshape_1_initializer_tensor = create_initializer_tensor( + name=reshape_1_tensor_name, + tensor_array=np.array(conv1_shape).astype(np.int64), + data_type=onnx.TensorProto.INT64, + ) + reshape_node = onnx.helper.make_node( + "Reshape", ["conv1_weight", reshape_1_tensor_name], [reshape_output], name="reshape" + ) + transpose_node = onnx.helper.make_node( + "Transpose", [reshape_output], [transpose_output], name="transpose", perm=[0, 1, 3, 2] + ) + identity_node1 = onnx.helper.make_node("Identity", [transpose_output], [identity_output1], name="identity1") + + conv1_node = onnx.helper.make_node( + "Conv", ["input", identity_output1], [conv_output1], name="conv1", kernel_shape=[3, 3], pads=[1, 1, 1, 1] + ) + + # Layer 3: Convolution 2 + identity_output2 = "identity_output2" + reshape_output2 = "reshape_output2" + constant_output = "constant_output" + conv_output2 = "conv_output2" + conv2_shape = (1, 1, 3, 3) + conv2_node = onnx.helper.make_node( + "Conv", + [conv_output1, identity_output2], + [conv_output2], + name="conv2", + kernel_shape=[3, 3], + pads=[1, 1, 1, 1], + ) + + # Layer 4: Identity -> Reshape -> Constant + constant_data = rng.uniform(0, 1, conv2_shape).astype(np.float32) # Randomly initialized weight tensor + reshape_2_tensor_name = "w_r_2" + + reshape_2_initializer_tensor = create_initializer_tensor( + name=reshape_2_tensor_name, + tensor_array=np.array((1, 1, 3, 3)).astype(np.int64), + data_type=onnx.TensorProto.INT64, + ) + constant_initializer = onnx.helper.make_tensor( + name="constant_data", + data_type=onnx.TensorProto.FLOAT, + dims=constant_data.shape, + vals=constant_data.flatten(), + ) + constant_node = onnx.helper.make_node( + "Constant", [], [constant_output], name="constant", value=constant_initializer + ) + reshape_node2 = onnx.helper.make_node( + "Reshape", [constant_output, reshape_2_tensor_name], [reshape_output2], name="reshape2" + ) + identity_node2 = onnx.helper.make_node("Identity", [reshape_output2], [identity_output2], name="identity2") + + # Layer 6: Convolution 3 + constant_output2 = "constant_output2" + conv4_shape = (1, 1, 3, 3) + constant_data2 = rng.uniform(0, 1, conv4_shape).astype(np.float32) # Randomly initialized weight tensor + constant_initializer2 = onnx.helper.make_tensor( + name="constant_data2", + data_type=onnx.TensorProto.FLOAT, + dims=constant_data2.shape, + vals=constant_data2.flatten(), + ) + constant_2_node = onnx.helper.make_node( + "Constant", [], [constant_output2], name="constant2", value=constant_initializer2 + ) + conv4_node = onnx.helper.make_node( + "Conv", + [conv_output2, constant_output2], + ["output"], + name="conv4", + kernel_shape=[3, 3], + pads=[1, 1, 1, 1], + ) + + # Create the graph with all the nodes + graph_def = onnx.helper.make_graph( + [ + reshape_node, + transpose_node, + identity_node1, + conv1_node, + constant_node, + reshape_node2, + identity_node2, + conv2_node, + constant_2_node, + conv4_node, + ], + "example_model", + [input_tensor], + [Y], + [ + conv1_weight_initializer, + constant_initializer, + reshape_1_initializer_tensor, + reshape_2_initializer_tensor, + constant_initializer2, + ], + ) + + # Create the model with the graph + op = onnx.OperatorSetIdProto() + op.version = OPSET_VERSION + model = onnx.helper.make_model(graph_def, opset_imports=[op]) + onnx.checker.check_model(model) + super().__init__(model, [input_shape], "weight_propagation_conv_model.dot") + + +@ALL_SYNTHETIC_MODELS.register() +class EmbeddingModel(ONNXReferenceModel): + # Constant + # | + # X Identity + # \ / + # Gather + # | + # Gather + # | + # MatMul + # | + # Y + def __init__(self): + model_input_name, model_output_name = "X", "Y" + model_input_channels = 10 + model_output_channels = 10 + input_shape = [1, model_input_channels] + X = onnx.helper.make_tensor_value_info(model_input_name, onnx.TensorProto.INT64, input_shape) + Y = onnx.helper.make_tensor_value_info(model_output_name, onnx.TensorProto.FLOAT, [10, model_output_channels]) + + rng = np.random.default_rng(seed=0) + + embedding_output_node_name = "Embedding_Y" + embedding_weights_tensor_name = "Embedding_W" + embedding_weights_tensor = create_initializer_tensor( + name=embedding_weights_tensor_name, + tensor_array=rng.uniform(0, 1, (10, 20)).astype(np.float32), + data_type=onnx.TensorProto.FLOAT, + ) + + identity_output_name = "Identity_Y" + identity_node = onnx.helper.make_node( + name="Identity", + op_type="Identity", + inputs=[embedding_weights_tensor_name], + outputs=[identity_output_name], + ) + + embedding_node = onnx.helper.make_node( + name="Embedding", + op_type="Gather", + axis=0, + inputs=[identity_output_name, model_input_name], + outputs=[embedding_output_node_name], + ) + + gather_output_node_name = "Gather_Y" + gather_indices_tensor_name = "Gather_I" + gather_indices_initializer_tensor = create_initializer_tensor( + name=gather_indices_tensor_name, tensor_array=np.int64(0), data_type=onnx.TensorProto.INT64 + ) + gather_node = onnx.helper.make_node( + name="Gather", + op_type="Gather", + axis=0, + inputs=[embedding_output_node_name, gather_indices_tensor_name], + outputs=[gather_output_node_name], + ) + + shape = [20, model_output_channels] + w_tensor_name = "W" + w_tensor = create_initializer_tensor( + name=w_tensor_name, + tensor_array=rng.uniform(0, 1, shape).astype(np.float32), + data_type=onnx.TensorProto.FLOAT, + ) + + matmul_node = onnx.helper.make_node( + name="MatMul", + op_type="MatMul", + inputs=[gather_output_node_name, w_tensor_name], + outputs=[model_output_name], + ) + + graph_def = onnx.helper.make_graph( + nodes=[identity_node, embedding_node, gather_node, matmul_node], + name="EmbeddingModel", + inputs=[X], + outputs=[Y], + initializer=[embedding_weights_tensor, gather_indices_initializer_tensor, w_tensor], + ) + + op = onnx.OperatorSetIdProto() + op.version = OPSET_VERSION + model = onnx.helper.make_model(graph_def, opset_imports=[op]) + onnx.checker.check_model(model) + super().__init__(model, [input_shape], "embedding_model.dot") + + +@ALL_SYNTHETIC_MODELS.register() +class UnifiedEmbeddingModel(ONNXReferenceModel): + # X + # / \ + # | Convert + # | \ + # MatMul Gather + # | | + # Reshape | + # \ / + # Concat + # | + # MatMul + # | + # Y + def __init__(self): + model_input_name, model_output_name = "X", "Y" + model_input_channels = 3 + model_output_channels = 6 + input_shape = [1, model_input_channels] + X = onnx.helper.make_tensor_value_info(model_input_name, onnx.TensorProto.FLOAT, input_shape) + Y = onnx.helper.make_tensor_value_info(model_output_name, onnx.TensorProto.FLOAT, [1, model_output_channels]) + + rng = np.random.default_rng(seed=0) + + cast_output_name = "Cast_Y" + cast_node = onnx.helper.make_node( + name="Cast", + op_type="Cast", + to=onnx.TensorProto.INT64, + inputs=[model_input_name], + outputs=[cast_output_name], + ) + + embedding_output_name = "Embedding_Y" + embedding_tensor_name = "Embedding_W" + embedding_tensor = create_initializer_tensor( + name=embedding_tensor_name, + tensor_array=rng.uniform(0, 1, (4, 5)).astype(np.float32), + data_type=onnx.TensorProto.FLOAT, + ) + embedding_node = onnx.helper.make_node( + name="Embedding", + op_type="Gather", + axis=0, + inputs=[embedding_tensor_name, cast_output_name], + outputs=[embedding_output_name], + ) + + matmul_1_tensor_name = "W_1" + matmul_1_output_name = "MatMul_1_Y" + matmul_1_tensor = create_initializer_tensor( + name=matmul_1_tensor_name, + tensor_array=rng.uniform(0, 1, (3, 3, 5)).astype(np.float32), + data_type=onnx.TensorProto.FLOAT, + ) + matmul_1_node = onnx.helper.make_node( + name="MatMul_1", + op_type="MatMul", + inputs=[model_input_name, matmul_1_tensor_name], + outputs=[matmul_1_output_name], + ) + + reshape_tensor_name = "R" + reshape_tensor = create_initializer_tensor( + name=reshape_tensor_name, + tensor_array=np.array([1, 3, 5]).astype(np.float32), + data_type=onnx.TensorProto.FLOAT, + ) + reshape_output_name = "Reshape_Y" + reshape_node = onnx.helper.make_node( + name="Reshape", + op_type="Reshape", + inputs=[matmul_1_output_name, reshape_tensor_name], + outputs=[reshape_output_name], + ) + + concat_output_name = "Concat_Y" + concat_node = onnx.helper.make_node( + name="Concat", + op_type="Concat", + inputs=[embedding_output_name, reshape_output_name], + outputs=[concat_output_name], + axis=0, + ) + + matmul_2_tensor_name = "W_2" + matmul_2_tensor = create_initializer_tensor( + name=matmul_2_tensor_name, + tensor_array=rng.uniform(0, 1, (1, 5)).astype(np.float32), + data_type=onnx.TensorProto.FLOAT, + ) + matmul_2_node = onnx.helper.make_node( + name="MatMul_2", + op_type="MatMul", + inputs=[concat_output_name, matmul_2_tensor_name], + outputs=[model_output_name], + ) + + graph_def = onnx.helper.make_graph( + nodes=[cast_node, embedding_node, matmul_1_node, reshape_node, concat_node, matmul_2_node], + name="UnifiedEmbeddingModel", + inputs=[X], + outputs=[Y], + initializer=[embedding_tensor, matmul_1_tensor, matmul_2_tensor, reshape_tensor], + ) + + op = onnx.OperatorSetIdProto() + op.version = OPSET_VERSION + model = onnx.helper.make_model(graph_def, opset_imports=[op]) + onnx.checker.check_model(model) + super().__init__(model, [input_shape], "unified_embedding_model.dot") diff --git a/tests/onnx/quantization/common.py b/tests/onnx/quantization/common.py index c37c006ab95..1d3464882fe 100644 --- a/tests/onnx/quantization/common.py +++ b/tests/onnx/quantization/common.py @@ -63,7 +63,7 @@ def transform_fn(i): input_np_dtype = onnx.helper.tensor_dtype_to_np_dtype(input_dtype) shape = ONNXGraph.get_edge_shape(edge) rng = get_random_generator() - tensor = rng.uniform(0, 1, shape).astype(input_np_dtype) + tensor = rng.uniform(-1, 1, shape).astype(input_np_dtype) if has_batch_dim: tensor = np.squeeze(tensor, axis=0) output[key] = tensor @@ -81,7 +81,7 @@ def transform_fn(data_item): class ModelToTest: - def __init__(self, model_name: str, input_shape: List[int]): + def __init__(self, model_name: str, input_shape: Optional[List[int]] = None): self.model_name = model_name self.path_ref_graph = self.model_name + ".dot" self.input_shape = input_shape @@ -95,6 +95,7 @@ def min_max_quantize_model( ) -> onnx.ModelProto: if convert_model_opset: original_model = convert_opset_version(original_model) + graph = GraphConverter.create_nncf_graph(original_model) dataset = get_random_dataset_for_test(original_model, dataset_has_batch_size) quantization_params = {} if quantization_params is None else quantization_params @@ -104,7 +105,7 @@ def min_max_quantize_model( post_training_quantization = PostTrainingQuantization(subset_size=1, **quantization_params) - quantized_model = post_training_quantization.apply(original_model, dataset=dataset) + quantized_model = post_training_quantization.apply(original_model, graph, dataset=dataset) return quantized_model @@ -116,10 +117,11 @@ def ptq_quantize_model( ) -> onnx.ModelProto: if convert_model_opset: original_model = convert_opset_version(original_model) + graph = GraphConverter.create_nncf_graph(original_model) dataset = get_random_dataset_for_test(original_model, dataset_has_batch_size) quantization_params = {} if quantization_params is None else quantization_params post_training_quantization = PostTrainingQuantization(subset_size=1, **quantization_params) - quantized_model = post_training_quantization.apply(original_model, dataset=dataset) + quantized_model = post_training_quantization.apply(original_model, graph, dataset=dataset) return quantized_model diff --git a/tests/onnx/quantization/test_bias_correction.py b/tests/onnx/quantization/test_bias_correction.py new file mode 100644 index 00000000000..2e09c072868 --- /dev/null +++ b/tests/onnx/quantization/test_bias_correction.py @@ -0,0 +1,199 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Dict, List + +import numpy as np +import onnx +import pytest +import torch + +from nncf.common.factory import NNCFGraphFactory +from nncf.onnx.graph.model_utils import remove_fq_from_inputs +from nncf.onnx.graph.nncf_graph_builder import GraphConverter +from nncf.onnx.graph.node_utils import get_bias_value +from nncf.quantization.algorithms.bias_correction.onnx_backend import ONNXBiasCorrectionAlgoBackend +from tests.onnx.quantization.common import compare_nncf_graph +from tests.post_training.test_templates.test_bias_correction import TemplateTestBCAlgorithm +from tests.shared.paths import TEST_ROOT + + +def get_data_from_node(model: onnx.ModelProto, node_name: str): + data = [t for t in model.graph.initializer if t.name == node_name] + if data: + return onnx.numpy_helper.to_array(data[0]) + return None + + +class TestONNXBCAlgorithm(TemplateTestBCAlgorithm): + @staticmethod + def list_to_backend_type(data: List) -> np.ndarray: + return np.array(data) + + @staticmethod + def get_backend() -> ONNXBiasCorrectionAlgoBackend: + return ONNXBiasCorrectionAlgoBackend + + @staticmethod + def backend_specific_model(model: torch.nn.Module, tmp_dir: str) -> onnx.ModelProto: + onnx_path = f"{tmp_dir}/model.onnx" + torch.onnx.export(model, torch.rand(model.INPUT_SIZE), onnx_path, opset_version=13, input_names=["input.1"]) + onnx_model = onnx.load(onnx_path) + return onnx_model + + @staticmethod + def fn_to_type(tensor) -> np.ndarray: + return np.array(tensor) + + @staticmethod + def get_transform_fn() -> callable: + def transform_fn(data_item): + tensor, _ = data_item + return {"input.1": tensor} + + return transform_fn + + @staticmethod + def remove_fq_from_inputs(model: onnx.ModelProto) -> onnx.ModelProto: + graph = GraphConverter.create_nncf_graph(model) + return remove_fq_from_inputs(model, graph) + + @staticmethod + def get_ref_path(suffix: str) -> str: + return TEST_ROOT / "onnx" / "data" / "reference_graphs" / "quantization" / "subgraphs" / f"{suffix}.dot" + + @staticmethod + def compare_nncf_graphs(model: onnx.ModelProto, ref_path: str) -> None: + return compare_nncf_graph(model, ref_path) + + @staticmethod + def check_bias(model: onnx.ModelProto, ref_biases: Dict) -> None: + nncf_graph = NNCFGraphFactory.create(model) + for ref_name, ref_value in ref_biases.items(): + node = nncf_graph.get_node_by_name(ref_name) + ref_value = np.array(ref_value) + curr_value = get_bias_value(node, model) + # TODO(AlexanderDokuchaev): return atol=0.0001 after fix 109189 + assert np.all(np.isclose(curr_value, ref_value, atol=0.01)), f"{curr_value} != {ref_value}" + + @pytest.mark.parametrize( + "layer_name, ref_data", + ( + ( + "/conv_1/Conv", + { + "collected_inputs": {"/conv_1/Conv": ("input.1", 0)}, + "subgraph_data": { + "subgraph_input_names": {"/conv_1/Conv"}, + "subgraph_output_names": {"/maxpool_1/MaxPool", "/Split"}, + "subgraph_output_ids": {("/Split", 0), ("/maxpool_1/MaxPool", 0), ("/Split", 1)}, + }, + }, + ), + ( + "/conv_2/Conv", + { + "collected_inputs": { + "/conv_1/Conv": ("input.1", 0), + "/conv_2/Conv": ("/maxpool_1/MaxPool", 0), + "/conv_4/Conv": ("/Split", 0), + "/conv_6/Conv": ("/Split", 1), + }, + "subgraph_data": { + "subgraph_input_names": {"/conv_2/Conv"}, + "subgraph_output_names": {"/Relu_1"}, + "subgraph_output_ids": {("/Relu_1", 0)}, + }, + }, + ), + ( + "/conv_3/Conv", + { + "collected_inputs": { + "/conv_1/Conv": ("input.1", 0), + "/conv_2/Conv": ("/maxpool_1/MaxPool", 0), + "/conv_3/Conv": ("/Relu_1", 0), + "/conv_4/Conv": ("/Split", 0), + "/conv_6/Conv": ("/Split", 1), + }, + "subgraph_data": { + "subgraph_input_names": {"/conv_1/Conv", "/conv_3/Conv"}, + "subgraph_output_names": {"/Split"}, + "subgraph_output_ids": {("/Split", 0), ("/Split", 1)}, + }, + }, + ), + ( + "/conv_4/Conv", + { + "collected_inputs": { + "/conv_4/Conv": ("/Split", 0), + "/conv_6/Conv": ("/Split", 1), + }, + "subgraph_data": { + "subgraph_input_names": {"/conv_4/Conv"}, + "subgraph_output_names": {"/Relu_2"}, + "subgraph_output_ids": {("/Relu_2", 0)}, + }, + }, + ), + ( + "/conv_6/Conv", + { + "collected_inputs": { + "/conv_5/Conv": ("/Relu_2", 0), + "/conv_6/Conv": ("/Split", 1), + }, + "subgraph_data": { + "subgraph_input_names": {"/conv_5/Conv", "/conv_6/Conv"}, + "subgraph_output_names": {"/Add_3", "/Concat"}, + "subgraph_output_ids": {("/Add_3", 0), ("/Concat", 0)}, + }, + }, + ), + ( + "/conv_10/Conv", + { + "collected_inputs": { + "/conv_8/Conv": ("/conv_7/Conv", 0), + "/conv_9/Conv": ("/Add_3", 0), + "/conv_10/Conv": ("/Concat", 0), + }, + "subgraph_data": { + "subgraph_input_names": { + "/conv_8/Conv", + "/conv_9/Conv", + "/conv_10/Conv", + }, + "subgraph_output_names": {"/Concat_1"}, + "subgraph_output_ids": {("/Concat_1", 0)}, + }, + }, + ), + # Disabled, because ONNX backend doesn't support bias correction for MatMul + # Ticket - CVS-115696 + # ( + # "/MatMul", + # { + # "collected_inputs": { + # "/MatMul": ("/Reshape", 0), + # }, + # "subgraph_data": { + # "subgraph_input_names": {"/MatMul"}, + # "subgraph_output_names": {"/Reshape_1", "/Add_4"}, + # "subgraph_output_ids": {("/Reshape_1", 0), ("/Add_4", 0)}, + # }, + # }, + # ), + ), + ) + def test__get_subgraph_data_for_node(self, quantized_test_model, layer_name, ref_data): + return super().test__get_subgraph_data_for_node(quantized_test_model, layer_name, ref_data) diff --git a/tests/onnx/quantization/test_calculation_quantizer_params.py b/tests/onnx/quantization/test_calculation_quantizer_params.py index 2ad1c54b70d..1e02b4eb25a 100644 --- a/tests/onnx/quantization/test_calculation_quantizer_params.py +++ b/tests/onnx/quantization/test_calculation_quantizer_params.py @@ -12,10 +12,10 @@ import numpy as np import pytest -from nncf.onnx.quantization.quantizer_parameters import calculate_scale_zero_point from nncf.onnx.quantization.quantizer_parameters import get_level_low_level_high from nncf.onnx.statistics.collectors import ONNXMinMaxTensorStatistic -from tests.post_training.test_calculate_quantizer_parameters import TemplateTestFQParams +from nncf.quantization.fake_quantize import calculate_scale_zero_point +from tests.post_training.test_templates.test_calculate_quantizer_parameters import TemplateTestFQParams @pytest.mark.parametrize( diff --git a/tests/onnx/quantization/test_classification_models_graph.py b/tests/onnx/quantization/test_classification_models_graph.py index 860188a9199..034d0a55bc0 100644 --- a/tests/onnx/quantization/test_classification_models_graph.py +++ b/tests/onnx/quantization/test_classification_models_graph.py @@ -14,6 +14,8 @@ import torch from torchvision import models +from nncf.parameters import TargetDevice + # pylint: disable=no-member from tests.onnx.conftest import ONNX_MODEL_DIR from tests.onnx.quantization.common import ModelToTest @@ -23,32 +25,37 @@ from tests.onnx.weightless_model import load_model_topology_with_zeros_weights TORCHVISION_TEST_DATA = [ - (ModelToTest("resnet18", [1, 3, 224, 224]), models.resnet18(pretrained=True)), - (ModelToTest("mobilenet_v2", [1, 3, 224, 224]), models.mobilenet_v2(pretrained=True)), - (ModelToTest("mobilenet_v3_small", [1, 3, 224, 224]), models.mobilenet_v3_small(pretrained=True)), - (ModelToTest("inception_v3", [1, 3, 224, 224]), models.inception_v3(pretrained=True)), - (ModelToTest("googlenet", [1, 3, 224, 224]), models.googlenet(pretrained=True)), - (ModelToTest("vgg16", [1, 3, 224, 224]), models.vgg16(pretrained=True)), - (ModelToTest("shufflenet_v2_x1_0", [1, 3, 224, 224]), models.shufflenet_v2_x1_0(pretrained=True)), - (ModelToTest("squeezenet1_0", [1, 3, 224, 224]), models.squeezenet1_0(pretrained=True)), - (ModelToTest("densenet121", [1, 3, 224, 224]), models.densenet121(pretrained=True)), - (ModelToTest("mnasnet0_5", [1, 3, 224, 224]), models.mnasnet0_5(pretrained=True)), + (ModelToTest("resnet18", [1, 3, 224, 224]), models.resnet18(pretrained=True), {}), + ( + ModelToTest("resnet50_cpu_spr", [1, 3, 224, 224]), + models.resnet50(pretrained=True), + {"target_device": TargetDevice.CPU_SPR}, + ), + (ModelToTest("mobilenet_v2", [1, 3, 224, 224]), models.mobilenet_v2(pretrained=True), {}), + (ModelToTest("mobilenet_v3_small", [1, 3, 224, 224]), models.mobilenet_v3_small(pretrained=True), {}), + (ModelToTest("inception_v3", [1, 3, 224, 224]), models.inception_v3(pretrained=True), {}), + (ModelToTest("googlenet", [1, 3, 224, 224]), models.googlenet(pretrained=True), {}), + (ModelToTest("vgg16", [1, 3, 224, 224]), models.vgg16(pretrained=True), {}), + (ModelToTest("shufflenet_v2_x1_0", [1, 3, 224, 224]), models.shufflenet_v2_x1_0(pretrained=True), {}), + (ModelToTest("squeezenet1_0", [1, 3, 224, 224]), models.squeezenet1_0(pretrained=True), {}), + (ModelToTest("densenet121", [1, 3, 224, 224]), models.densenet121(pretrained=True), {}), + (ModelToTest("mnasnet0_5", [1, 3, 224, 224]), models.mnasnet0_5(pretrained=True), {}), ] @pytest.mark.parametrize( - ("model_to_test", "model"), + ("model_to_test", "model", "quantization_parameters"), TORCHVISION_TEST_DATA, ids=[model_to_test[0].model_name for model_to_test in TORCHVISION_TEST_DATA], ) -def test_min_max_quantization_graph_torchvision_models(tmp_path, mocker, model_to_test, model): +def test_min_max_quantization_graph_torchvision_models(tmp_path, mocker, model_to_test, model, quantization_parameters): mock_collect_statistics(mocker) onnx_model_path = tmp_path / (model_to_test.model_name + ".onnx") x = torch.randn(model_to_test.input_shape, requires_grad=False) torch.onnx.export(model, x, onnx_model_path, opset_version=13) original_model = onnx.load(onnx_model_path) - quantized_model = min_max_quantize_model(original_model) + quantized_model = min_max_quantize_model(original_model, quantization_params=quantization_parameters) compare_nncf_graph(quantized_model, model_to_test.path_ref_graph) @@ -64,4 +71,5 @@ def test_min_max_quantization_graph_onnx_model(tmp_path, mocker, model_to_test): original_model = load_model_topology_with_zeros_weights(onnx_model_path) quantized_model = min_max_quantize_model(original_model) + onnx.save_model(quantized_model, tmp_path / (model_to_test.model_name + "_int8.onnx")) compare_nncf_graph(quantized_model, model_to_test.path_ref_graph) diff --git a/tests/onnx/quantization/test_detection_models_graph.py b/tests/onnx/quantization/test_detection_models_graph.py index 0dbb949e079..d074961ac1a 100644 --- a/tests/onnx/quantization/test_detection_models_graph.py +++ b/tests/onnx/quantization/test_detection_models_graph.py @@ -9,6 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import onnx import pytest from nncf.scopes import IgnoredScope @@ -59,4 +60,5 @@ def test_min_max_quantization_graph(tmp_path, mocker, model_to_test): quantized_model = min_max_quantize_model( original_model, convert_model_opset=convert_opset_version, quantization_params={"ignored_scope": ignored_scopes} ) + onnx.save_model(quantized_model, tmp_path / (model_to_test.model_name + "_int8.onnx")) compare_nncf_graph(quantized_model, model_to_test.path_ref_graph) diff --git a/tests/onnx/quantization/test_fast_bias_correction.py b/tests/onnx/quantization/test_fast_bias_correction.py new file mode 100644 index 00000000000..c294723bdbc --- /dev/null +++ b/tests/onnx/quantization/test_fast_bias_correction.py @@ -0,0 +1,71 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import List + +import numpy as np +import onnx +import torch + +from nncf.common.factory import NNCFGraphFactory +from nncf.onnx.graph.node_utils import get_bias_value +from nncf.onnx.graph.node_utils import is_node_with_bias +from nncf.quantization.algorithms.fast_bias_correction.onnx_backend import ONNXFastBiasCorrectionAlgoBackend +from tests.post_training.test_templates.test_fast_bias_correction import TemplateTestFBCAlgorithm + + +def get_data_from_node(model: onnx.ModelProto, node_name: str): + data = [t for t in model.graph.initializer if t.name == node_name] + if data: + return onnx.numpy_helper.to_array(data[0]) + return None + + +class TestONNXFBCAlgorithm(TemplateTestFBCAlgorithm): + @staticmethod + def list_to_backend_type(data: List) -> np.ndarray: + return np.array(data) + + @staticmethod + def get_backend() -> ONNXFastBiasCorrectionAlgoBackend: + return ONNXFastBiasCorrectionAlgoBackend + + @staticmethod + def backend_specific_model(model, tmp_dir: str): + onnx_path = f"{tmp_dir}/model.onnx" + torch.onnx.export(model, torch.rand(model.INPUT_SIZE), onnx_path, opset_version=13, input_names=["input.1"]) + onnx_model = onnx.load(onnx_path) + return onnx_model + + @staticmethod + def fn_to_type(tensor): + return np.array(tensor) + + @staticmethod + def get_transform_fn(): + def transform_fn(data_item): + tensor, _ = data_item + return {"input.1": tensor} + + return transform_fn + + @staticmethod + def check_bias(model: onnx.ModelProto, ref_bias: list): + ref_bias = np.array(ref_bias) + nncf_graph = NNCFGraphFactory.create(model) + for node in nncf_graph.get_all_nodes(): + if not is_node_with_bias(node): + continue + bias_value = get_bias_value(node, model) + # TODO(AlexanderDokuchaev): return atol=0.0001 after fix 109189 + assert np.all(np.isclose(bias_value, ref_bias, atol=0.01)), f"{bias_value} != {ref_bias}" + return + raise ValueError("Not found node with bias") diff --git a/tests/onnx/quantization/test_min_max.py b/tests/onnx/quantization/test_min_max.py new file mode 100644 index 00000000000..f5f1710268d --- /dev/null +++ b/tests/onnx/quantization/test_min_max.py @@ -0,0 +1,224 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from dataclasses import dataclass +from typing import List + +import pytest + +import nncf.onnx.graph.metatypes.onnx_metatypes as om +from nncf.common.graph.graph import NNCFNode +from nncf.common.graph.transformations.commands import TargetType +from nncf.onnx.graph.nncf_graph_builder import ONNXLayerAttributes +from nncf.onnx.graph.node_utils import get_quantization_axis +from nncf.onnx.graph.node_utils import get_reduction_shape +from nncf.onnx.graph.transformations.commands import ONNXTargetPoint + +# pylint: disable=protected-access + + +@dataclass +class TestCase: + nncf_node: NNCFNode + target_point: ONNXTargetPoint + per_channel: bool + ref_reduction_shape: List[int] + + +test_cases = ( + TestCase( + nncf_node=NNCFNode( + { + NNCFNode.ID_NODE_ATTR: 0, + NNCFNode.NODE_NAME_ATTR: "conv_with_weight_per_tensor", + NNCFNode.METATYPE_ATTR: om.ONNXConvolutionMetatype, + NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes(weight_attrs={1: {"shape": [3, 5, 8]}}), + } + ), + target_point=ONNXTargetPoint( + target_type=TargetType.OPERATION_WITH_WEIGHTS, + target_node_name="conv_with_weight_per_tensor", + port_id=1, + ), + per_channel=False, + ref_reduction_shape=None, + ), + TestCase( + nncf_node=NNCFNode( + { + NNCFNode.ID_NODE_ATTR: 0, + NNCFNode.NODE_NAME_ATTR: "conv_with_weight_per_channel", + NNCFNode.METATYPE_ATTR: om.ONNXConvolutionMetatype, + NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes(weight_attrs={1: {"shape": [3, 5, 8]}}), + } + ), + target_point=ONNXTargetPoint( + target_type=TargetType.OPERATION_WITH_WEIGHTS, + target_node_name="gemm_with_weight_per_channel_0_port", + port_id=1, + ), + per_channel=True, + ref_reduction_shape=(1, 2), + ), + TestCase( + nncf_node=NNCFNode( + { + NNCFNode.ID_NODE_ATTR: 0, + NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_tensor", + NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype, + NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes(weight_attrs={1: {"shape": [5, 8]}}), + } + ), + target_point=ONNXTargetPoint( + target_type=TargetType.OPERATION_WITH_WEIGHTS, + target_node_name="gemm_with_weight_per_tensor", + port_id=1, + ), + per_channel=False, + ref_reduction_shape=None, + ), + TestCase( + nncf_node=NNCFNode( + { + NNCFNode.ID_NODE_ATTR: 0, + NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_channel", + NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype, + NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes(weight_attrs={1: {"shape": [5, 8]}}), + } + ), + target_point=ONNXTargetPoint( + target_type=TargetType.OPERATION_WITH_WEIGHTS, + target_node_name="gemm_with_weight_per_channel_0_port", + port_id=1, + ), + per_channel=True, + ref_reduction_shape=(0,), + ), + TestCase( + nncf_node=NNCFNode( + { + NNCFNode.ID_NODE_ATTR: 0, + NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_channel_extra_attrs", + NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype, + NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes( + weight_attrs={1: {"shape": [5, 8]}}, node_attrs={"transA": 0, "transB": 0} + ), + } + ), + target_point=ONNXTargetPoint( + target_type=TargetType.OPERATION_WITH_WEIGHTS, + target_node_name="gemm_with_weight_per_channel_extra_attrs", + port_id=1, + ), + per_channel=True, + ref_reduction_shape=(0,), + ), + TestCase( + nncf_node=NNCFNode( + { + NNCFNode.ID_NODE_ATTR: 0, + NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_channel_extra_attrs", + NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype, + NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes( + weight_attrs={1: {"shape": [5, 8]}}, node_attrs={"transA": 1, "transB": 0} + ), + } + ), + target_point=ONNXTargetPoint( + target_type=TargetType.OPERATION_WITH_WEIGHTS, + target_node_name="gemm_with_weight_per_channel_extra_attrs", + port_id=1, + ), + per_channel=True, + ref_reduction_shape=(0,), + ), + TestCase( + nncf_node=NNCFNode( + { + NNCFNode.ID_NODE_ATTR: 0, + NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_channel_transpose", + NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype, + NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes( + weight_attrs={1: {"shape": [5, 8]}}, node_attrs={"transA": 0, "transB": 1} + ), + } + ), + target_point=ONNXTargetPoint( + target_type=TargetType.OPERATION_WITH_WEIGHTS, + target_node_name="gemm_with_weight_per_channel_transpose", + port_id=1, + ), + per_channel=True, + ref_reduction_shape=(1,), + ), + TestCase( + nncf_node=NNCFNode( + { + NNCFNode.ID_NODE_ATTR: 0, + NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_channel_transpose_one_dim", + NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype, + NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes( + weight_attrs={1: {"shape": [5]}}, node_attrs={"transA": 0, "transB": 1} + ), + } + ), + target_point=ONNXTargetPoint( + target_type=TargetType.OPERATION_WITH_WEIGHTS, + target_node_name="gemm_with_weight_per_channel_0_port", + port_id=1, + ), + per_channel=True, + ref_reduction_shape=(0,), + ), + TestCase( + nncf_node=NNCFNode( + { + NNCFNode.ID_NODE_ATTR: 0, + NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_channel_0_port", + NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype, + NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes( + weight_attrs={0: {"shape": [10, 10, 5]}}, node_attrs={"transA": 0, "transB": 1} + ), + } + ), + target_point=ONNXTargetPoint( + target_type=TargetType.OPERATION_WITH_WEIGHTS, + target_node_name="gemm_with_weight_per_channel_0_port", + port_id=0, + ), + per_channel=True, + ref_reduction_shape=(0, 1), + ), +) + + +@pytest.mark.parametrize( + "test_case", + (test_cases), + ids=[test_case.nncf_node.node_name for test_case in test_cases], +) +def test_get_reduction_shape(test_case): + """Checks the correct return reduction shape in ONNXMinMaxAlgo. + Edge cases: + 1) per-tensor. + 2) transpose axis of GEMM node. + 3) one dimensional weight tensor. + """ + quantization_axis = get_quantization_axis( + is_per_channel=test_case.per_channel, node=test_case.nncf_node, target_point=test_case.target_point + ) + if quantization_axis is not None: # Per-Channel + reduction_shape = get_reduction_shape( + test_case.nncf_node.layer_attributes.weight_attrs[test_case.target_point.port_id]["shape"], + quantization_axis, + ) + assert reduction_shape == test_case.ref_reduction_shape + else: + assert not test_case.per_channel diff --git a/tests/onnx/quantization/test_ptq_params.py b/tests/onnx/quantization/test_ptq_params.py index 31f9b8d1955..9bc23b1410b 100644 --- a/tests/onnx/quantization/test_ptq_params.py +++ b/tests/onnx/quantization/test_ptq_params.py @@ -16,9 +16,10 @@ from nncf.common.graph.transformations.commands import TargetType from nncf.common.utils.backend import BackendType from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXConvolutionMetatype -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXLinearMetatype +from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXGemmMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXSoftmaxMetatype -from nncf.onnx.graph.nncf_graph_builder import ONNXExtendedLayerAttributes +from nncf.onnx.graph.nncf_graph_builder import GraphConverter +from nncf.onnx.graph.nncf_graph_builder import ONNXLayerAttributes from nncf.onnx.graph.transformations.commands import ONNXTargetPoint from nncf.onnx.statistics.collectors import ONNXMeanMinMaxStatisticCollector from nncf.onnx.statistics.collectors import ONNXMinMaxStatisticCollector @@ -31,9 +32,9 @@ from tests.common.quantization.metatypes import SoftmaxTestMetatype from tests.onnx.models import LinearModel from tests.onnx.models import OneDepthwiseConvolutionalModel -from tests.post_training.models import NNCFGraphToTest -from tests.post_training.models import NNCFGraphToTestMatMul -from tests.post_training.test_ptq_params import TemplateTestPTQParams +from tests.post_training.test_templates.models import NNCFGraphToTest +from tests.post_training.test_templates.models import NNCFGraphToTestMatMul +from tests.post_training.test_templates.test_ptq_params import TemplateTestPTQParams # pylint: disable=protected-access @@ -78,37 +79,71 @@ def target_point(self, target_type: TargetType, target_node_name: str, port_id: def metatypes_mapping(self): return { Conv2dTestMetatype: ONNXConvolutionMetatype, - LinearTestMetatype: ONNXLinearMetatype, + LinearTestMetatype: ONNXGemmMetatype, SoftmaxTestMetatype: ONNXSoftmaxMetatype, } @pytest.fixture(scope="session") def test_params(self): + linear_model = LinearModel().onnx_model + linear_model_graph = GraphConverter.create_nncf_graph(linear_model) + depthwise_model = OneDepthwiseConvolutionalModel().onnx_model + depthwise_model_graph = GraphConverter.create_nncf_graph(depthwise_model) + return { - "test_range_estimator_per_tensor": {"model": LinearModel().onnx_model, "stat_points_num": 5}, + "test_range_estimator_per_tensor": { + "model": linear_model, + "nncf_graph": linear_model_graph, + "stat_points_num": 5, + }, "test_range_estimator_per_channel": { - "model": OneDepthwiseConvolutionalModel().onnx_model, + "model": depthwise_model, + "nncf_graph": depthwise_model_graph, "stat_points_num": 2, }, "test_quantize_outputs": { "nncf_graph": NNCFGraphToTest( - ONNXConvolutionMetatype, ONNXExtendedLayerAttributes(None, None) + conv_metatype=ONNXConvolutionMetatype, + conv_layer_attrs=ONNXLayerAttributes( + weight_attrs={1: {"name": "aaa"}}, + ), + input_layer_attrs=ONNXLayerAttributes(), + output_layer_attrs=ONNXLayerAttributes(), ).nncf_graph, "hw_patterns": get_hw_patterns(), "ignored_patterns": get_ignored_patterns(), }, "test_ignored_scopes": { "nncf_graph": NNCFGraphToTest( - ONNXConvolutionMetatype, ONNXExtendedLayerAttributes(None, None) + conv_metatype=ONNXConvolutionMetatype, + conv_layer_attrs=ONNXLayerAttributes( + weight_attrs={1: {"name": "aaa"}}, + ), + input_layer_attrs=ONNXLayerAttributes(), + output_layer_attrs=ONNXLayerAttributes(), ).nncf_graph, "hw_patterns": get_hw_patterns(), "ignored_patterns": get_ignored_patterns(), }, "test_model_type_pass": { - "nncf_graph": NNCFGraphToTestMatMul(ONNXLinearMetatype).nncf_graph, + "nncf_graph": NNCFGraphToTestMatMul( + ONNXGemmMetatype, + ONNXLayerAttributes(weight_attrs={1: {"name": "aaa"}}), + input_layer_attrs=ONNXLayerAttributes(), + output_layer_attrs=ONNXLayerAttributes(), + ).nncf_graph, "hw_patterns": get_hw_patterns(), "ignored_patterns": get_ignored_patterns(), }, + "test_validate_scope": { + "nncf_graph": NNCFGraphToTestMatMul( + ONNXGemmMetatype, + ONNXLayerAttributes(weight_attrs={1: {"name": "aaa"}}), + input_layer_attrs=ONNXLayerAttributes(), + output_layer_attrs=ONNXLayerAttributes(), + ).nncf_graph, + "ignored_patterns": get_ignored_patterns(), + }, } @pytest.fixture(params=[(IgnoredScope([]), 1, 1), (IgnoredScope(["/Conv_1_0"]), 0, 0)]) diff --git a/tests/onnx/quantization/test_qdq_params_calculation.py b/tests/onnx/quantization/test_qdq_params_calculation.py index 6bee381c44f..bf16eb152b2 100644 --- a/tests/onnx/quantization/test_qdq_params_calculation.py +++ b/tests/onnx/quantization/test_qdq_params_calculation.py @@ -14,11 +14,19 @@ import onnx import pytest +from nncf.common.quantization.structs import QuantizationPreset from nncf.onnx.graph.onnx_graph import ONNXGraph from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters from nncf.quantization.advanced_parameters import OverflowFix from tests.onnx.conftest import ONNX_TEST_ROOT +from tests.onnx.models import EmbeddingModel +from tests.onnx.models import GEMMTransposeWeightModel from tests.onnx.models import LinearModel +from tests.onnx.models import MatMulActivationModel +from tests.onnx.models import MatMulWeightModel +from tests.onnx.models import OneDepthwiseConvolutionalModel +from tests.onnx.models import ReshapeWeightModel +from tests.onnx.models import WeightSharingModel from tests.onnx.quantization.common import min_max_quantize_model from tests.shared.helpers import compare_stats from tests.shared.helpers import load_json @@ -31,8 +39,8 @@ def get_q_nodes_params(model: onnx.ModelProto) -> Dict[str, np.ndarray]: onnx_graph = ONNXGraph(model) for node in onnx_graph.get_all_nodes(): if node.op_type == "QuantizeLinear": - scale = onnx_graph.get_initializers_value(node.input[1]) - zero_point = onnx_graph.get_initializers_value(node.input[2]) + scale = onnx_graph.get_tensor_value(node.input[1]) + zero_point = onnx_graph.get_tensor_value(node.input[2]) output[node.name] = {"scale": scale, "zero_point": zero_point} return output @@ -55,6 +63,56 @@ def test_overflow_fix_scales(overflow_fix): # Unkomment lines below to generate reference for new models. # from tests.shared.helpers import dump_to_json + + # dump_to_json(ref_stats_path, q_nodes_params) + + ref_nodes_params = load_json(ref_stats_path) + params = ["scale", "zero_point"] + compare_stats(ref_nodes_params, q_nodes_params, params) + + +MODELS = [ + GEMMTransposeWeightModel, + MatMulWeightModel, + MatMulActivationModel, + WeightSharingModel, + ReshapeWeightModel, + LinearModel, + OneDepthwiseConvolutionalModel, + EmbeddingModel, +] + + +@pytest.mark.parametrize( + "preset", + [QuantizationPreset.PERFORMANCE, QuantizationPreset.MIXED], + ids=[QuantizationPreset.PERFORMANCE.value, QuantizationPreset.MIXED.value], +) +@pytest.mark.parametrize( + "model", + MODELS, + ids=[ + "GEMMTransposeWeightModel", + "MatMulWeightModel", + "MatMulActivationModel", + "WeightSharingModel", + "ReshapeWeightModel", + "LinearModel", + "OneDepthwiseConvolutionalModel", + "EmbeddingModel", + ], +) +def test_scales(model, preset): + model = model() + quantized_model = min_max_quantize_model(model.onnx_model, quantization_params={"preset": preset}) + q_nodes_params = get_q_nodes_params(quantized_model) + + ref_stats_name = model.path_ref_graph.split(".")[0] + f"_{preset.value}.json" + ref_stats_path = REFERENCE_SCALES_DIR / ref_stats_name + + # Unkomment lines below to generate reference for new models. + # from tests.shared.helpers import dump_to_json + # dump_to_json(ref_stats_path, q_nodes_params) ref_nodes_params = load_json(ref_stats_path) diff --git a/tests/onnx/quantization/test_quantizer_config.py b/tests/onnx/quantization/test_quantizer_config.py index 31a5734e9b2..374ae440f13 100644 --- a/tests/onnx/quantization/test_quantizer_config.py +++ b/tests/onnx/quantization/test_quantizer_config.py @@ -15,14 +15,14 @@ from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXAddLayerMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXConvolutionMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXDepthwiseConvolutionMetatype -from nncf.onnx.graph.nncf_graph_builder import ONNXExtendedLayerAttributes +from nncf.onnx.graph.nncf_graph_builder import ONNXLayerAttributes from nncf.onnx.statistics.collectors import ONNXMeanMinMaxStatisticCollector from nncf.onnx.statistics.collectors import ONNXMinMaxStatisticCollector from nncf.quantization.algorithms.min_max.onnx_backend import ONNXMinMaxAlgoBackend -from tests.post_training.models import NNCFGraphToTest -from tests.post_training.models import NNCFGraphToTestDepthwiseConv -from tests.post_training.models import NNCFGraphToTestSumAggregation -from tests.post_training.test_quantizer_config import TemplateTestQuantizerConfig +from tests.post_training.test_templates.models import NNCFGraphToTest +from tests.post_training.test_templates.models import NNCFGraphToTestDepthwiseConv +from tests.post_training.test_templates.models import NNCFGraphToTestSumAggregation +from tests.post_training.test_templates.test_quantizer_config import TemplateTestQuantizerConfig ParamsCls = TemplateTestQuantizerConfig.TestGetStatisticsCollectorParameters @@ -52,16 +52,31 @@ def statistic_collector_parameters(self, request) -> ParamsCls: @pytest.fixture def single_conv_nncf_graph(self) -> NNCFGraphToTest: - conv_layer_attrs = ONNXExtendedLayerAttributes("dummy", "dummy", (4, 4, 4, 4)) - return NNCFGraphToTest(ONNXConvolutionMetatype, conv_layer_attrs) + conv_layer_attrs = ONNXLayerAttributes(weight_attrs={1: {"shape": [4, 4, 4, 4]}}, bias_attrs={}) + return NNCFGraphToTest( + ONNXConvolutionMetatype, + conv_layer_attrs, + input_layer_attrs=ONNXLayerAttributes(), + output_layer_attrs=ONNXLayerAttributes(), + ) @pytest.fixture def depthwise_conv_nncf_graph(self) -> NNCFGraphToTestDepthwiseConv: return NNCFGraphToTestDepthwiseConv( - ONNXDepthwiseConvolutionMetatype, ONNXExtendedLayerAttributes("dummy", "dummy") + ONNXDepthwiseConvolutionMetatype, + ONNXLayerAttributes(weight_attrs={1: {"shape": [4, 4, 4, 4]}}, bias_attrs={}), + input_layer_attrs=ONNXLayerAttributes(), + output_layer_attrs=ONNXLayerAttributes(), ) @pytest.fixture def conv_sum_aggregation_nncf_graph(self) -> NNCFGraphToTestSumAggregation: - conv_layer_attrs = ONNXExtendedLayerAttributes("dummy", "dummy", (4, 4, 4, 4)) - return NNCFGraphToTestSumAggregation(ONNXConvolutionMetatype, ONNXAddLayerMetatype, conv_layer_attrs) + conv_layer_attrs = ONNXLayerAttributes(weight_attrs={1: {"shape": [4, 4, 4, 4]}}, bias_attrs={}) + return NNCFGraphToTestSumAggregation( + ONNXConvolutionMetatype, + ONNXAddLayerMetatype, + conv_layer_attrs, + sum_layer_attrs=ONNXLayerAttributes(), + input_layer_attrs=ONNXLayerAttributes(), + output_layer_attrs=ONNXLayerAttributes(), + ) diff --git a/tests/onnx/quantization/test_segmentation_models_graph.py b/tests/onnx/quantization/test_segmentation_models_graph.py index 92e6252b43b..7861de76d69 100644 --- a/tests/onnx/quantization/test_segmentation_models_graph.py +++ b/tests/onnx/quantization/test_segmentation_models_graph.py @@ -9,6 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import onnx import pytest from tests.onnx.conftest import ONNX_MODEL_DIR @@ -27,4 +28,5 @@ def test_min_max_quantization_graph(tmp_path, mocker, model_to_test): onnx_model_path = ONNX_MODEL_DIR / (model_to_test.model_name + ".onnx") original_model = load_model_topology_with_zeros_weights(onnx_model_path) quantized_model = min_max_quantize_model(original_model) + onnx.save_model(quantized_model, tmp_path / (model_to_test.model_name + "_int8.onnx")) compare_nncf_graph(quantized_model, model_to_test.path_ref_graph) diff --git a/tests/onnx/quantization/test_transform_fn.py b/tests/onnx/quantization/test_transform_fn.py new file mode 100644 index 00000000000..56ab6f73971 --- /dev/null +++ b/tests/onnx/quantization/test_transform_fn.py @@ -0,0 +1,54 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +import onnxruntime as rt +import pytest + +import nncf +from tests.onnx.models import LinearModel as ModelWithSingleInput +from tests.onnx.models import MultiInputOutputModel as ModelWithMultipleInputs + +dataset = [ + { + "X": np.zeros((1, 3, 32, 32), dtype=np.float32), + "X_1": np.zeros((1, 6, 3, 3), dtype=np.float32), + "X_2": np.zeros((2, 6, 3, 3), dtype=np.float32), + "X_3": np.zeros((3, 6, 3, 3), dtype=np.float32), + } +] + + +def single_input_transform_fn(data_item): + return {"X": data_item["X"]} + + +def multiple_inputs_transform_fn(data_item): + return { + "X_1": data_item["X_1"], + "X_2": data_item["X_2"], + "X_3": data_item["X_3"], + } + + +@pytest.mark.parametrize( + "model,transform_fn", + [[ModelWithSingleInput(), single_input_transform_fn], [ModelWithMultipleInputs(), multiple_inputs_transform_fn]], + ids=["single_input", "multiple_inputs"], +) +def test_transform_fn(model, transform_fn): + # Check the transformation function + session = rt.InferenceSession(model.onnx_model.SerializeToString(), providers=["CPUExecutionProvider"]) + session.run([], transform_fn(next(iter(dataset)))) + + # Start quantization + calibration_dataset = nncf.Dataset(dataset, transform_fn) + _ = nncf.quantize(model.onnx_model, calibration_dataset) diff --git a/tests/onnx/quantization/test_transformer_models_graph.py b/tests/onnx/quantization/test_transformer_models_graph.py new file mode 100644 index 00000000000..f28a7d23108 --- /dev/null +++ b/tests/onnx/quantization/test_transformer_models_graph.py @@ -0,0 +1,34 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import onnx +import pytest + +from nncf.parameters import ModelType +from tests.onnx.conftest import ONNX_MODEL_DIR +from tests.onnx.quantization.common import ModelToTest +from tests.onnx.quantization.common import compare_nncf_graph +from tests.onnx.quantization.common import min_max_quantize_model +from tests.onnx.quantization.common import mock_collect_statistics +from tests.onnx.weightless_model import load_model_topology_with_zeros_weights + +TEST_DATA = [ModelToTest("bertsquad-12"), ModelToTest("gpt2-10")] + + +@pytest.mark.parametrize(("model_to_test"), TEST_DATA, ids=[model_to_test.model_name for model_to_test in TEST_DATA]) +def test_min_max_quantization_transformers(tmp_path, mocker, model_to_test): + mock_collect_statistics(mocker) + onnx_model_path = ONNX_MODEL_DIR / (model_to_test.model_name + ".onnx") + original_model = load_model_topology_with_zeros_weights(onnx_model_path) + + quantized_model = min_max_quantize_model(original_model, quantization_params={"model_type": ModelType.TRANSFORMER}) + onnx.save_model(quantized_model, tmp_path / (model_to_test.model_name + "_int8.onnx")) + compare_nncf_graph(quantized_model, model_to_test.path_ref_graph) diff --git a/tests/onnx/requirements.txt b/tests/onnx/requirements.txt index 1a25d93c61e..4d21b89518c 100644 --- a/tests/onnx/requirements.txt +++ b/tests/onnx/requirements.txt @@ -1,10 +1,11 @@ pytest-mock>=3.3.1 +pytest-cov pytest-dependency>=0.5.1 pytest-ordering>=0.6 torch~=1.13.0 torchvision~=0.14.0 yattag>=1.14.0 -openvino-dev==2023.0.0 +openvino-dev==2023.0.1 fastdownload scikit-learn # Pandas requires it for styling in e2e report generation diff --git a/tests/onnx/test_e2e_ptq.py b/tests/onnx/test_e2e_ptq.py index f516253a22f..0d2f353982b 100644 --- a/tests/onnx/test_e2e_ptq.py +++ b/tests/onnx/test_e2e_ptq.py @@ -1,18 +1,15 @@ -""" -Copyright (c) 2023 Intel Corporation +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" +# pylint: disable=redefined-outer-name import json import math @@ -21,8 +18,6 @@ import sys from pathlib import Path from tempfile import TemporaryDirectory - -# pylint: disable=redefined-outer-name from typing import Dict, List, Optional import pandas as pd diff --git a/tests/onnx/test_engine.py b/tests/onnx/test_engine.py index a7a0a649616..d11ced20f0a 100644 --- a/tests/onnx/test_engine.py +++ b/tests/onnx/test_engine.py @@ -1,15 +1,13 @@ -""" - Copyright (c) 2023 Intel Corporation - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. from typing import List diff --git a/tests/onnx/test_layer_attributes.py b/tests/onnx/test_layer_attributes.py index dc044452b0d..685507c7a18 100644 --- a/tests/onnx/test_layer_attributes.py +++ b/tests/onnx/test_layer_attributes.py @@ -13,9 +13,9 @@ import onnx import pytest -from nncf.onnx.graph.metatypes.onnx_metatypes import WEIGHT_LAYER_METATYPES +from nncf.onnx.graph.metatypes.onnx_metatypes import GENERAL_WEIGHT_LAYER_METATYPES from nncf.onnx.graph.nncf_graph_builder import GraphConverter -from nncf.onnx.graph.nncf_graph_builder import ONNXExtendedLayerAttributes +from nncf.onnx.graph.nncf_graph_builder import ONNXLayerAttributes from tests.onnx.models import OPSET_VERSION from tests.onnx.models import create_initializer_tensor @@ -97,7 +97,12 @@ def get_one_layer_model(op_name: str, node_creator: ONNXNodeCreator, input_shape "node_creator, ref_layer_attrs", [ (ONNXIdentityCreator, None), - (ONNXConvCreator, ONNXExtendedLayerAttributes(["X", "Conv1_W", "Conv1_B"], ["Y"], [3, 3, 1, 1])), + ( + ONNXConvCreator, + ONNXLayerAttributes( + weight_attrs={1: {"name": "Conv1_W", "shape": [3, 3, 1, 1]}}, bias_attrs={"name": "Conv1_B"} + ), + ), ], ) def test_layer_attributes(node_creator, ref_layer_attrs): @@ -106,7 +111,7 @@ def test_layer_attributes(node_creator, ref_layer_attrs): onnx_model = get_one_layer_model(op_name, node_creator, input_shape) nncf_graph = GraphConverter.create_nncf_graph(onnx_model) node = nncf_graph.get_node_by_name(op_name) - if node.metatype in WEIGHT_LAYER_METATYPES: + if node.metatype in GENERAL_WEIGHT_LAYER_METATYPES: assert node.layer_attributes.__dict__ == ref_layer_attrs.__dict__ else: - assert node.layer_attributes is None + assert node.layer_attributes.__dict__ == ONNXLayerAttributes().__dict__ diff --git a/tests/onnx/test_metatypes.py b/tests/onnx/test_metatypes.py index 107090de357..041b4a38e55 100644 --- a/tests/onnx/test_metatypes.py +++ b/tests/onnx/test_metatypes.py @@ -16,7 +16,7 @@ from nncf.common.graph.operator_metatypes import OutputNoopMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXAddLayerMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXBatchNormMetatype -from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXConcatLayerMetatype +from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXConcatMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXConstantOfShapeMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXConvolutionMetatype from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXDepthwiseConvolutionMetatype @@ -44,7 +44,7 @@ InputNoopMetatype, InputNoopMetatype, InputNoopMetatype, - ONNXConcatLayerMetatype, + ONNXConcatMetatype, ONNXAddLayerMetatype, OutputNoopMetatype, OutputNoopMetatype, diff --git a/tests/onnx/test_model_transformer.py b/tests/onnx/test_model_transformer.py index ffd03d0b185..4cf5cb4e332 100644 --- a/tests/onnx/test_model_transformer.py +++ b/tests/onnx/test_model_transformer.py @@ -132,9 +132,9 @@ def test_inserted_quantizer_parameters(test_parameters): if op_type == "QuantizeLinear": for attr in node.attribute: assert test_parameters.onnx_attributes[attr.name] == onnx.helper.get_attribute_value(attr) - assert np.allclose(onnx_graph.get_initializers_value(node.input[1]), np.array(test_parameters.scale)) - assert np.allclose(onnx_graph.get_initializers_value(node.input[2]), np.array(test_parameters.zero_point)) - assert onnx_graph.get_initializers_value(node.input[2]).dtype == test_parameters.onnx_dtype + assert np.allclose(onnx_graph.get_tensor_value(node.input[1]), np.array(test_parameters.scale)) + assert np.allclose(onnx_graph.get_tensor_value(node.input[2]), np.array(test_parameters.zero_point)) + assert onnx_graph.get_tensor_value(node.input[2]).dtype == test_parameters.onnx_dtype TARGET_LAYERS = [["ReLU1"], ["Conv1", "BN1"], ["Conv1", "BN1", "ReLU1"]] @@ -186,7 +186,7 @@ def test_bias_correction(layers, values, refs): for conv_layer, bias_reference in zip(layers, refs): bias_tensor_name = onnx_graph.get_node_by_name(conv_layer).input[2] - bias_tensor = onnx_graph.get_initializer(bias_tensor_name) + bias_tensor = onnx_graph.get_tensor(bias_tensor_name) bias_value = onnx.numpy_helper.to_array(bias_tensor) assert np.all(bias_value == bias_reference) diff --git a/tests/onnx/test_node_utils.py b/tests/onnx/test_node_utils.py index 4b1918f36b2..2c4ec41c5d9 100644 --- a/tests/onnx/test_node_utils.py +++ b/tests/onnx/test_node_utils.py @@ -14,6 +14,7 @@ from nncf.onnx.graph.metatypes.onnx_metatypes import ONNXConvolutionMetatype from nncf.onnx.graph.nncf_graph_builder import GraphConverter from nncf.onnx.graph.node_utils import get_bias_value +from nncf.onnx.graph.node_utils import transpose_axis from tests.onnx.models import OneConvolutionalIdentityBiasModel from tests.onnx.models import OneConvolutionalModel @@ -26,3 +27,21 @@ def test_get_bias_value(model): conv_node = nncf_graph.get_nodes_by_metatypes([ONNXConvolutionMetatype])[0] bias_value = get_bias_value(conv_node, onnx_model) assert np.allclose(bias_value, model.conv_bias) + + +@pytest.mark.parametrize( + "shape, axis, expected_channel_axis", + [ + ((1, 3, 5, 5), -1, 0), + ((1, 3, 5, 5), 1, 2), + ((1, 3, 5, 5), 0, 3), + ((1, 3, 5, 5), 2, 1), + ((1, 3, 5, 5), -2, 1), + ((1,), -1, 0), + ((1, 1), -1, 0), + ((1, 1), 1, 0), + ((1, 1), 0, 1), + ], +) +def test_transpose_axis(shape, axis, expected_channel_axis): + assert expected_channel_axis == transpose_axis(shape, axis) diff --git a/tests/onnx/test_pattern_manager.py b/tests/onnx/test_pattern_manager.py index e5d2aa9aa4b..379b0a13df0 100644 --- a/tests/onnx/test_pattern_manager.py +++ b/tests/onnx/test_pattern_manager.py @@ -8,7 +8,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + from nncf.common.graph.patterns import HWFusedPatternNames +from nncf.common.graph.patterns import IgnoredPatternNames from nncf.common.utils.backend import BackendType from tests.shared.patterns import check_hw_patterns from tests.shared.patterns import check_ignored_patterns @@ -23,16 +25,8 @@ HWFusedPatternNames.NORMALIZE_L2_MULTIPLY: "Not relevant for ONNX.", HWFusedPatternNames.LINEAR_WITH_BIAS: "Linear layers contains biases in ONNX.", HWFusedPatternNames.SE_BLOCK: "Not relevant for ONNX.", - HWFusedPatternNames.STABLE_DIFFUSION: "Not relevant for ONNX.", HWFusedPatternNames.SOFTMAX_DIV: "Not relevant for ONNX.", - HWFusedPatternNames.SOFTMAX_RESHAPE_MATMUL: "Not relevant for ONNX.", - HWFusedPatternNames.SOFTMAX_RESHAPE_TRANSPOSE_MATMUL: "Not relevant for ONNX.", - HWFusedPatternNames.SOFTMAX_RESHAPE_TRANSPOSE_GATHER_MATMUL: "Not relevant for ONNX.", - HWFusedPatternNames.EQUAL_LOGICALNOT: "Not relevant for ONNX.", - HWFusedPatternNames.FC_BN_HSWISH_ACTIVATION: "Not relevant for ONNX.", - HWFusedPatternNames.HSWISH_ACTIVATION: "Not relevant for ONNX.", - HWFusedPatternNames.HSWISH_ACTIVATION_V2: "Not relevant for ONNX.", - HWFusedPatternNames.HSWISH_ACTIVATION_WITHOUT_DENOMINATOR: "Not relevant for ONNX.", + HWFusedPatternNames.HSWISH_ACTIVATION_V2: "Is already covered by HSWISH_ACTIVATION for ONNX.", HWFusedPatternNames.SOFTMAX: "Not relevant for ONNX.", HWFusedPatternNames.INPUT_CONVERT_TRANSPOSE_PROCESSING: "Not relevant for ONNX.", HWFusedPatternNames.INPUT_CONVERT_TRANSPOSE_REVERSE_ADD: "Not relevant for ONNX.", @@ -43,7 +37,6 @@ HWFusedPatternNames.INPUT_TRANSPOSE_PROCESSING: "Not relevant for ONNX.", HWFusedPatternNames.INPUT_TRANSPOSE_REVERSE_ADD: "Not relevant for ONNX.", HWFusedPatternNames.INPUT_TRANSPOSE_SCALE_SHIFT: "Not relevant for ONNX.", - HWFusedPatternNames.LINEAR_ARITHMETIC_ACTIVATIONS: "Not relevant for ONNX.", HWFusedPatternNames.HSWISH_ACTIVATION_CLAMP_MULTIPLY: "Not relevant for ONNX.", HWFusedPatternNames.LINEAR_BIASED_SCALE_SHIFT: "Not relevant for ONNX.", HWFusedPatternNames.LINEAR_ACTIVATION_SCALE_SHIFT: "Not relevant for ONNX.", @@ -53,9 +46,13 @@ HWFusedPatternNames.LINEAR_ACTIVATION_ELEMENTWISE: "Not relevant for ONNX.", HWFusedPatternNames.LINEAR_BIASED_ACTIVATION_ELEMENTWISE: "Not relevant for ONNX.", HWFusedPatternNames.MVN_SCALE_SHIFT_ACTIVATIONS: "Not relevant for ONNX.", + HWFusedPatternNames.LINEAR_ACTIVATIONS_UNSQUEEZE_BN_SQUEEZE: "Not relevant for ONNX.", } -IGNORING_IGNORED_PATTERN_REASONS = {} +IGNORING_IGNORED_PATTERN_REASONS = { + IgnoredPatternNames.FC_BN_HSWISH_ACTIVATION: "Not relevant for ONNX.", + IgnoredPatternNames.EQUAL_LOGICALNOT: "Not relevant for ONNX.", +} def test_pattern_manager(): diff --git a/tests/onnx/test_statistics_aggregator.py b/tests/onnx/test_statistics_aggregator.py index b0ea08dd2ea..7cf32b28687 100644 --- a/tests/onnx/test_statistics_aggregator.py +++ b/tests/onnx/test_statistics_aggregator.py @@ -99,7 +99,7 @@ def inplace_statistics(self, request) -> bool: return request.param @pytest.mark.skip("Merging is not implemented yet") - def test_statistics_merging_simple(self, dataset_samples, inplace_statistics): + def test_statistics_merging_simple(self, dataset_samples, inplace_statistics, statistic_point_params): pass @pytest.mark.skip("Merging is not implemented yet") diff --git a/tests/onnx/tools/save_model_without_tensors.py b/tests/onnx/tools/save_model_without_tensors.py new file mode 100644 index 00000000000..88891d97d95 --- /dev/null +++ b/tests/onnx/tools/save_model_without_tensors.py @@ -0,0 +1,28 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +This script is used to save only model topology without weights. +Usage example: to save model topology in tests/onnx/data/models for future usage in graph tests. +""" +from argparse import ArgumentParser + +import onnx + +from tests.onnx.weightless_model import save_model_without_tensors + +if __name__ == "__main__": + parser = ArgumentParser() + parser.add_argument("model") + parser.add_argument("output_model") + args = parser.parse_args() + model = onnx.load(args.model) + save_model_without_tensors(model, args.output_model) diff --git a/tests/openvino/data/ac_configs/mobilefacedet-v1-mxnet.yml b/tests/openvino/data/ac_configs/mobilefacedet-v1-mxnet.yml deleted file mode 100644 index ec2b4c167d8..00000000000 --- a/tests/openvino/data/ac_configs/mobilefacedet-v1-mxnet.yml +++ /dev/null @@ -1,47 +0,0 @@ -models: - - name: mobilefacedet-v1-mxnet - launchers: - - framework: openvino - adapter: - type: yolo_v3 - classes: 1 - num: 9 - anchors: 10,12,16,20,23,29,43,54,60,75,80,106,118,157,186,248,285,379 - outputs: - - yolov30_yolooutputv32_conv0_fwd/YoloRegion - - yolov30_yolooutputv31_conv0_fwd/YoloRegion - - yolov30_yolooutputv30_conv0_fwd/YoloRegion - anchor_masks: [[0, 1, 2], [3, 4, 5], [6, 7, 8]] - inputs: - - name: data - type: INPUT - layout: NHWC - - datasets: - - name: wider_without_bkgr - - preprocessing: - - type: resize - size: 256 - - postprocessing: - - type: resize_prediction_boxes - - type: filter - apply_to: prediction - min_confidence: 0.001 - remove_filtered: true - - type: nms - overlap: 0.45 - - type: clip_boxes - apply_to: prediction - - type: filter - apply_to: annotation - height_range: 60 - is_empty: True - - metrics: - - type: map - ignore_difficult: True - include_boundaries: False - allow_multiple_matches_per_ignored: True - use_filtered_tp: True diff --git a/tests/openvino/data/ov_dataset_definitions.yml b/tests/openvino/data/ov_dataset_definitions.yml index 7ff296e63f2..cebdbe0aaef 100644 --- a/tests/openvino/data/ov_dataset_definitions.yml +++ b/tests/openvino/data/ov_dataset_definitions.yml @@ -19,9 +19,9 @@ datasets: - name: imagenette2-320 annotation_conversion: converter: imagenet - annotation_file: imagenette2-320/imagenette2-320_val.txt + annotation_file: ac_imagenette2-320/imagenette2-320_val.txt annotation: imagenette2-320.pickle - data_source: imagenette2-320/val/ + data_source: ac_imagenette2-320/ metrics: - name: accuracy@top1 type: accuracy diff --git a/tests/openvino/datasets_helpers.py b/tests/openvino/datasets_helpers.py index edb9061dfc7..8b1044ee9d4 100644 --- a/tests/openvino/datasets_helpers.py +++ b/tests/openvino/datasets_helpers.py @@ -35,18 +35,17 @@ def download(url: str, path: str) -> Path: return downloader.get(url) -def preprocess_imagenette_data(dataset_path: str) -> None: - destination = os.path.join(dataset_path, "val") - for filename in os.listdir(destination): - path = os.path.join(destination, filename) - if os.path.isdir(path): - for img_name in os.listdir(path): - img_path = os.path.join(path, img_name) - shutil.move(img_path, destination) - os.rmdir(path) - - -def preprocess_imagenette_labels(dataset_path: str) -> None: +def preprocess_imagenette_data(dataset_path: Path, destination_path: Path) -> None: + val_dataset_path = dataset_path / "val" + for filename in os.listdir(val_dataset_path): + path = val_dataset_path / filename + if path.is_dir(): + for img_name in path.iterdir(): + img_path = path / img_name + shutil.copy(img_path, destination_path) + + +def preprocess_imagenette_labels(destination_path: Path) -> None: labels_map = { "n01440764": 0, # tench "n02102040": 217, # English springer @@ -59,23 +58,30 @@ def preprocess_imagenette_labels(dataset_path: str) -> None: "n03445777": 574, # golf ball "n03888257": 701, # parachute } - response = requests.get(IMAGENETTE_ANNOTATION_URL, timeout=10) - annotation_path = dataset_path / "imagenette2-320_val.txt" - with open(annotation_path, "w", encoding="utf-8") as output_file: + annotation_path = destination_path / "imagenette2-320_val.txt" + + with open(annotation_path, "w+", encoding="utf-8") as output_file: for line in response.iter_lines(): image_path = line.decode("utf-8").split("/") class_name = image_path[2] - new_path = os.path.join(image_path[0], image_path[1], image_path[3]) + new_path = os.path.join(image_path[0], image_path[1], image_path[2], image_path[3]) label = labels_map[class_name] img_path_with_labels = f"{new_path} {label}\n" output_file.write(img_path_with_labels) +def convert_dataset_to_ac_format(dataset_path: Path, destination_path: Path) -> None: + preprocess_imagenette_labels(destination_path) + preprocess_imagenette_data(dataset_path, destination_path) + + def prepare_imagenette_for_test(data_dir: Path) -> Path: dataset_path = download(IMAGENETTE_URL, data_dir) - preprocess_imagenette_labels(dataset_path) - preprocess_imagenette_data(dataset_path) + destination_path = dataset_path.parent / "ac_imagenette2-320/" + if not destination_path.exists(): + destination_path.mkdir() + convert_dataset_to_ac_format(dataset_path, destination_path) return dataset_path diff --git a/tests/openvino/native/data/reference_graphs/original_nncf_graph/UnifiedEmbeddingModel.dot b/tests/openvino/native/data/reference_graphs/original_nncf_graph/UnifiedEmbeddingModel.dot new file mode 100644 index 00000000000..b779ab79695 --- /dev/null +++ b/tests/openvino/native/data/reference_graphs/original_nncf_graph/UnifiedEmbeddingModel.dot @@ -0,0 +1,28 @@ +strict digraph { +"0 Input" [id=0, type=Parameter]; +"1 Convert_1" [id=1, type=Convert]; +"2 MatMul_1" [id=2, type=MatMul]; +"3 Gather_1" [id=3, type=Gather]; +"4 Reshape_1" [id=4, type=Reshape]; +"5 Concat_12" [id=5, type=Concat]; +"6 MatMul_2" [id=6, type=MatMul]; +"7 Result" [id=7, type=Result]; +"8 matmul_2_data" [id=8, type=Constant]; +"9 Constant_8" [id=9, type=Constant]; +"10 matmul_1_data" [id=10, type=Constant]; +"11 Constant_4" [id=11, type=Constant]; +"12 gather_1_data" [id=12, type=Constant]; +"0 Input" -> "1 Convert_1" [label="[1, 3]", style=solid]; +"0 Input" -> "2 MatMul_1" [label="[1, 3]", style=solid]; +"1 Convert_1" -> "3 Gather_1" [label="[1, 3]", style=dashed]; +"2 MatMul_1" -> "4 Reshape_1" [label="[3, 1, 5]", style=solid]; +"3 Gather_1" -> "5 Concat_12" [label="[1, 3, 5]", style=solid]; +"4 Reshape_1" -> "5 Concat_12" [label="[1, 3, 5]", style=solid]; +"5 Concat_12" -> "6 MatMul_2" [label="[1, 6, 5]", style=solid]; +"6 MatMul_2" -> "7 Result" [label="[1, 6, 1]", style=solid]; +"8 matmul_2_data" -> "6 MatMul_2" [label="[1, 5]", style=solid]; +"9 Constant_8" -> "4 Reshape_1" [label="[3]", style=dashed]; +"10 matmul_1_data" -> "2 MatMul_1" [label="[3, 3, 5]", style=solid]; +"11 Constant_4" -> "3 Gather_1" [label="[]", style=dashed]; +"12 gather_1_data" -> "3 Gather_1" [label="[4, 5]", style=solid]; +} diff --git a/tests/openvino/native/data/reference_graphs/original_nncf_graph/WeightsModel.dot b/tests/openvino/native/data/reference_graphs/original_nncf_graph/WeightsModel.dot index 3b2905675bc..fe530ba099a 100644 --- a/tests/openvino/native/data/reference_graphs/original_nncf_graph/WeightsModel.dot +++ b/tests/openvino/native/data/reference_graphs/original_nncf_graph/WeightsModel.dot @@ -5,30 +5,28 @@ strict digraph { "3 MatMul_1" [id=3, type=MatMul]; "4 MatMul" [id=4, type=MatMul]; "5 MatMul_0" [id=5, type=MatMul]; -"6 Add_19" [id=6, type=Add]; +"6 Add_15" [id=6, type=Add]; "7 Result" [id=7, type=Result]; -"8 Constant_11" [id=8, type=Constant]; -"9 Constant_4" [id=9, type=Constant]; -"10 Constant_3" [id=10, type=Constant]; -"11 Constant_1" [id=11, type=Constant]; -"12 Constant_13" [id=12, type=Constant]; +"8 weights_1" [id=8, type=Constant]; +"9 Constant_5" [id=9, type=Constant]; +"10 Constant_4" [id=10, type=Constant]; +"11 Constant_2" [id=11, type=Constant]; +"12 weights_0" [id=12, type=Constant]; "13 MatMul_const" [id=13, type=MatMul]; -"14 Constant_17" [id=14, type=Constant]; -"15 Constant_16" [id=15, type=Constant]; "0 Input_1" -> "1 Conv" [label="[1, 3, 5, 5]", style=solid]; "1 Conv" -> "2 Conv_backprop" [label="[1, 3, 5, 5]", style=solid]; "2 Conv_backprop" -> "3 MatMul_1" [label="[1, 3, 1, 1]", style=solid]; "3 MatMul_1" -> "4 MatMul" [label="[1, 3, 1, 4]", style=solid]; "3 MatMul_1" -> "5 MatMul_0" [label="[1, 3, 1, 4]", style=solid]; -"4 MatMul" -> "6 Add_19" [label="[1, 3, 1, 1]", style=solid]; +"4 MatMul" -> "6 Add_15" [label="[1, 3, 1, 1]", style=solid]; "5 MatMul_0" -> "4 MatMul" [label="[1, 3, 1, 4]", style=solid]; -"6 Add_19" -> "7 Result" [label="[1, 3, 4, 1]", style=solid]; -"8 Constant_11" -> "3 MatMul_1" [label="[1, 3, 1, 4]", style=solid]; -"9 Constant_4" -> "2 Conv_backprop" [label="[2]", style=dashed]; -"10 Constant_3" -> "2 Conv_backprop" [label="[3, 3, 1, 1]", style=solid]; -"11 Constant_1" -> "1 Conv" [label="[3, 3, 1, 1]", style=solid]; -"12 Constant_13" -> "5 MatMul_0" [label="[1, 3, 1, 1]", style=solid]; -"13 MatMul_const" -> "6 Add_19" [label="[1, 3, 4, 1]", style=solid]; -"14 Constant_17" -> "13 MatMul_const" [label="[1, 3, 1, 1]", style=solid]; -"15 Constant_16" -> "13 MatMul_const" [label="[1, 3, 1, 4]", style=solid]; +"6 Add_15" -> "7 Result" [label="[1, 3, 4, 1]", style=solid]; +"8 weights_1" -> "3 MatMul_1" [label="[1, 4]", style=solid]; +"8 weights_1" -> "13 MatMul_const" [label="[1, 4]", style=solid]; +"9 Constant_5" -> "2 Conv_backprop" [label="[2]", style=dashed]; +"10 Constant_4" -> "2 Conv_backprop" [label="[3, 3, 1, 1]", style=solid]; +"11 Constant_2" -> "1 Conv" [label="[3, 3, 1, 1]", style=solid]; +"12 weights_0" -> "5 MatMul_0" [label="[1, 1]", style=solid]; +"12 weights_0" -> "13 MatMul_const" [label="[1, 1]", style=solid]; +"13 MatMul_const" -> "6 Add_15" [label="[4, 1]", style=solid]; } diff --git a/tests/openvino/native/data/reference_graphs/original_nncf_graph/exctracted_ConvModel.dot b/tests/openvino/native/data/reference_graphs/original_nncf_graph/exctracted_ConvModel.dot new file mode 100644 index 00000000000..fc05c2e496a --- /dev/null +++ b/tests/openvino/native/data/reference_graphs/original_nncf_graph/exctracted_ConvModel.dot @@ -0,0 +1,9 @@ +strict digraph { +"0 Parameter_Conv" [id=0, type=Parameter]; +"1 Conv" [id=1, type=Convolution]; +"2 Result_Conv.0" [id=2, type=Result]; +"3 Constant_58" [id=3, type=Constant]; +"0 Parameter_Conv" -> "1 Conv" [label="[1, 3, 4, 2]", style=solid]; +"1 Conv" -> "2 Result_Conv.0" [label="[1, 3, 4, 2]", style=solid]; +"3 Constant_58" -> "1 Conv" [label="[3, 3, 1, 1]", style=solid]; +} diff --git a/tests/openvino/native/data/reference_graphs/original_nncf_graph/exctracted_QuantizedModel.dot b/tests/openvino/native/data/reference_graphs/original_nncf_graph/exctracted_QuantizedModel.dot new file mode 100644 index 00000000000..cb59d32c930 --- /dev/null +++ b/tests/openvino/native/data/reference_graphs/original_nncf_graph/exctracted_QuantizedModel.dot @@ -0,0 +1,81 @@ +strict digraph { +"0 Parameter_Relu_1" [id=0, type=Parameter]; +"1 Parameter_Transpose" [id=1, type=Parameter]; +"2 Relu_1" [id=2, type=Relu]; +"3 Transpose" [id=3, type=Transpose]; +"4 Concat_1/fq_input_0" [id=4, type=FakeQuantize]; +"5 Concat_1" [id=5, type=Concat]; +"6 Add_2" [id=6, type=Add]; +"7 Conv_2" [id=7, type=Convolution]; +"8 Result_Add_2.0" [id=8, type=Result]; +"9 Relu_2" [id=9, type=Relu]; +"10 Conv_3/fq_input_0" [id=10, type=FakeQuantize]; +"11 Conv_3" [id=11, type=Convolution]; +"12 Result_Conv_3.0" [id=12, type=Result]; +"13 Add_2/fq_weights_0" [id=13, type=FakeQuantize]; +"14 Constant_128" [id=14, type=Constant]; +"15 Constant_127" [id=15, type=Constant]; +"16 Constant_126" [id=16, type=Constant]; +"17 Constant_125" [id=17, type=Constant]; +"18 Constant_124" [id=18, type=Constant]; +"19 Constant_96" [id=19, type=Constant]; +"20 Constant_101" [id=20, type=Constant]; +"21 Constant_100" [id=21, type=Constant]; +"22 Constant_99" [id=22, type=Constant]; +"23 Constant_98" [id=23, type=Constant]; +"24 Conv_3/fq_weights_0" [id=24, type=FakeQuantize]; +"25 Constant_121" [id=25, type=Constant]; +"26 Constant_120" [id=26, type=Constant]; +"27 Constant_119" [id=27, type=Constant]; +"28 Constant_118" [id=28, type=Constant]; +"29 Constant_117" [id=29, type=Constant]; +"30 Constant_115" [id=30, type=Constant]; +"31 Constant_114" [id=31, type=Constant]; +"32 Constant_113" [id=32, type=Constant]; +"33 Constant_112" [id=33, type=Constant]; +"34 Conv_2/fq_weights_0" [id=34, type=FakeQuantize]; +"35 Constant_108" [id=35, type=Constant]; +"36 Constant_107" [id=36, type=Constant]; +"37 Constant_106" [id=37, type=Constant]; +"38 Constant_105" [id=38, type=Constant]; +"39 Constant_104" [id=39, type=Constant]; +"0 Parameter_Relu_1" -> "2 Relu_1" [label="[1, 3, 14, 28]", style=solid]; +"1 Parameter_Transpose" -> "3 Transpose" [label="[1, 3, 28, 14]", style=solid]; +"2 Relu_1" -> "4 Concat_1/fq_input_0" [label="[1, 3, 14, 28]", style=solid]; +"3 Transpose" -> "5 Concat_1" [label="[1, 3, 14, 28]", style=solid]; +"4 Concat_1/fq_input_0" -> "5 Concat_1" [label="[1, 3, 14, 28]", style=solid]; +"5 Concat_1" -> "6 Add_2" [label="[1, 6, 14, 28]", style=solid]; +"5 Concat_1" -> "7 Conv_2" [label="[1, 6, 14, 28]", style=solid]; +"6 Add_2" -> "8 Result_Add_2.0" [label="[1, 6, 14, 28]", style=solid]; +"7 Conv_2" -> "9 Relu_2" [label="[1, 12, 14, 28]", style=solid]; +"9 Relu_2" -> "10 Conv_3/fq_input_0" [label="[1, 12, 14, 28]", style=solid]; +"10 Conv_3/fq_input_0" -> "11 Conv_3" [label="[1, 12, 14, 28]", style=solid]; +"11 Conv_3" -> "12 Result_Conv_3.0" [label="[1, 6, 14, 28]", style=solid]; +"13 Add_2/fq_weights_0" -> "6 Add_2" [label="[1, 6, 1, 1]", style=solid]; +"14 Constant_128" -> "13 Add_2/fq_weights_0" [label="[]", style=solid]; +"15 Constant_127" -> "13 Add_2/fq_weights_0" [label="[]", style=solid]; +"16 Constant_126" -> "13 Add_2/fq_weights_0" [label="[]", style=solid]; +"17 Constant_125" -> "13 Add_2/fq_weights_0" [label="[]", style=solid]; +"18 Constant_124" -> "13 Add_2/fq_weights_0" [label="[1, 6, 1, 1]", style=solid]; +"19 Constant_96" -> "3 Transpose" [label="[4]", style=dashed]; +"20 Constant_101" -> "4 Concat_1/fq_input_0" [label="[]", style=solid]; +"21 Constant_100" -> "4 Concat_1/fq_input_0" [label="[]", style=solid]; +"22 Constant_99" -> "4 Concat_1/fq_input_0" [label="[]", style=solid]; +"23 Constant_98" -> "4 Concat_1/fq_input_0" [label="[]", style=solid]; +"24 Conv_3/fq_weights_0" -> "11 Conv_3" [label="[6, 12, 1, 1]", style=solid]; +"25 Constant_121" -> "24 Conv_3/fq_weights_0" [label="[]", style=solid]; +"26 Constant_120" -> "24 Conv_3/fq_weights_0" [label="[]", style=solid]; +"27 Constant_119" -> "24 Conv_3/fq_weights_0" [label="[]", style=solid]; +"28 Constant_118" -> "24 Conv_3/fq_weights_0" [label="[]", style=solid]; +"29 Constant_117" -> "24 Conv_3/fq_weights_0" [label="[6, 12, 1, 1]", style=solid]; +"30 Constant_115" -> "10 Conv_3/fq_input_0" [label="[]", style=solid]; +"31 Constant_114" -> "10 Conv_3/fq_input_0" [label="[]", style=solid]; +"32 Constant_113" -> "10 Conv_3/fq_input_0" [label="[]", style=solid]; +"33 Constant_112" -> "10 Conv_3/fq_input_0" [label="[]", style=solid]; +"34 Conv_2/fq_weights_0" -> "7 Conv_2" [label="[12, 6, 1, 1]", style=solid]; +"35 Constant_108" -> "34 Conv_2/fq_weights_0" [label="[]", style=solid]; +"36 Constant_107" -> "34 Conv_2/fq_weights_0" [label="[]", style=solid]; +"37 Constant_106" -> "34 Conv_2/fq_weights_0" [label="[]", style=solid]; +"38 Constant_105" -> "34 Conv_2/fq_weights_0" [label="[]", style=solid]; +"39 Constant_104" -> "34 Conv_2/fq_weights_0" [label="[12, 6, 1, 1]", style=solid]; +} diff --git a/tests/openvino/native/data/reference_graphs/original_nncf_graph/mobilenet-v3-small-1.0-224-tf.dot b/tests/openvino/native/data/reference_graphs/original_nncf_graph/mobilenet-v3-small-1.0-224-tf.dot index 5f9334d7821..195becdc1a1 100644 --- a/tests/openvino/native/data/reference_graphs/original_nncf_graph/mobilenet-v3-small-1.0-224-tf.dot +++ b/tests/openvino/native/data/reference_graphs/original_nncf_graph/mobilenet-v3-small-1.0-224-tf.dot @@ -1,628 +1,628 @@ strict digraph { "0 input_1" [id=0, type=Parameter]; -"1 Transpose_9545" [id=1, type=Transpose]; -"2 Transpose_9539" [id=2, type=Multiply]; -"3 Transpose_2342" [id=3, type=Add]; -"4 Multiply_11760" [id=4, type=Convolution]; -"5 Transpose_6952" [id=5, type=Add]; -"6 Transpose_6970" [id=6, type=HSwish]; -"7 Multiply_11774" [id=7, type=GroupConvolution]; -"8 Transpose_7019" [id=8, type=Add]; -"9 Relu_7020" [id=9, type=Relu]; -"10 Transpose_7025" [id=10, type=ReduceMean]; -"11 Transpose_7057" [id=11, type=Multiply]; -"12 Convolution_2431" [id=12, type=Convolution]; -"13 Multiply_11788" [id=13, type=Convolution]; -"14 Transpose_7031" [id=14, type=Add]; -"15 Transpose_7081" [id=15, type=Add]; -"16 Relu_7032" [id=16, type=Relu]; -"17 Multiply_11802" [id=17, type=Convolution]; -"18 Convolution_2440" [id=18, type=Convolution]; -"19 Transpose_7105" [id=19, type=Add]; -"20 Transpose_9591" [id=20, type=Add]; -"21 Relu_7106" [id=21, type=Relu]; -"22 Transpose_7055" [id=22, type=HSigmoid]; -"23 Multiply_11816" [id=23, type=GroupConvolution]; -"24 Transpose_7156" [id=24, type=Add]; -"25 Relu_7157" [id=25, type=Relu]; -"26 Multiply_11830" [id=26, type=Convolution]; -"27 Transpose_7182" [id=27, type=Add]; -"28 Multiply_11844" [id=28, type=Convolution]; -"29 Transpose_7262" [id=29, type=Add]; -"30 Transpose_7206" [id=30, type=Add]; -"31 Multiply_11886" [id=31, type=Convolution]; -"32 Relu_7207" [id=32, type=Relu]; -"33 Transpose_7286" [id=33, type=Add]; -"34 Multiply_11858" [id=34, type=GroupConvolution]; -"35 Transpose_7304" [id=35, type=HSwish]; -"36 Transpose_7232" [id=36, type=Add]; -"37 Multiply_11900" [id=37, type=GroupConvolution]; -"38 Relu_7233" [id=38, type=Relu]; -"39 Transpose_7353" [id=39, type=Add]; -"40 Multiply_11872" [id=40, type=Convolution]; -"41 Transpose_7371" [id=41, type=HSwish]; -"42 Transpose_7260" [id=42, type=Add]; -"43 Transpose_7375" [id=43, type=ReduceMean]; -"44 Transpose_7407" [id=44, type=Multiply]; -"45 Convolution_2758" [id=45, type=Convolution]; -"46 Multiply_11914" [id=46, type=Convolution]; -"47 Transpose_7381" [id=47, type=Add]; -"48 Transpose_7431" [id=48, type=Add]; -"49 Relu_7382" [id=49, type=Relu]; -"50 Multiply_11928" [id=50, type=Convolution]; -"51 Transpose_7579" [id=51, type=Add]; -"52 Convolution_2767" [id=52, type=Convolution]; -"53 Transpose_7455" [id=53, type=Add]; -"54 Multiply_11970" [id=54, type=Convolution]; -"55 Transpose_7727" [id=55, type=Add]; -"56 Transpose_9691" [id=56, type=Add]; -"57 Transpose_7473" [id=57, type=HSwish]; -"58 Transpose_7603" [id=58, type=Add]; -"59 Multiply_12012" [id=59, type=Convolution]; -"60 Transpose_7405" [id=60, type=HSigmoid]; -"61 Multiply_11942" [id=61, type=GroupConvolution]; -"62 Transpose_7621" [id=62, type=HSwish]; -"63 Transpose_7751" [id=63, type=Add]; -"64 Transpose_7497" [id=64, type=Add]; -"65 Multiply_11984" [id=65, type=GroupConvolution]; -"66 Transpose_7769" [id=66, type=HSwish]; -"67 Transpose_7515" [id=67, type=HSwish]; -"68 Transpose_7645" [id=68, type=Add]; -"69 Multiply_12026" [id=69, type=GroupConvolution]; -"70 Transpose_7519" [id=70, type=ReduceMean]; -"71 Transpose_7551" [id=71, type=Multiply]; -"72 Transpose_7663" [id=72, type=HSwish]; -"73 Transpose_7793" [id=73, type=Add]; -"74 Convolution_2868" [id=74, type=Convolution]; -"75 Multiply_11956" [id=75, type=Convolution]; -"76 Transpose_7667" [id=76, type=ReduceMean]; -"77 Transpose_7699" [id=77, type=Multiply]; -"78 Transpose_7811" [id=78, type=HSwish]; -"79 Transpose_7525" [id=79, type=Add]; -"80 Transpose_7577" [id=80, type=Add]; -"81 Convolution_2979" [id=81, type=Convolution]; -"82 Multiply_11998" [id=82, type=Convolution]; -"83 Transpose_7815" [id=83, type=ReduceMean]; -"84 Transpose_7847" [id=84, type=Multiply]; -"85 Relu_7526" [id=85, type=Relu]; -"86 Transpose_7673" [id=86, type=Add]; -"87 Transpose_7725" [id=87, type=Add]; -"88 Convolution_3090" [id=88, type=Convolution]; -"89 Multiply_12040" [id=89, type=Convolution]; -"90 Convolution_2877" [id=90, type=Convolution]; -"91 Relu_7674" [id=91, type=Relu]; -"92 Transpose_7821" [id=92, type=Add]; -"93 Transpose_7871" [id=93, type=Add]; -"94 Transpose_9759" [id=94, type=Add]; -"95 Convolution_2988" [id=95, type=Convolution]; -"96 Relu_7822" [id=96, type=Relu]; -"97 Multiply_12054" [id=97, type=Convolution]; -"98 Transpose_8019" [id=98, type=Add]; -"99 Transpose_7549" [id=99, type=HSigmoid]; -"100 Transpose_9859" [id=100, type=Add]; -"101 Convolution_3099" [id=101, type=Convolution]; -"102 Transpose_7895" [id=102, type=Add]; -"103 Multiply_12096" [id=103, type=Convolution]; -"104 Transpose_7697" [id=104, type=HSigmoid]; -"105 Transpose_9959" [id=105, type=Add]; -"106 Transpose_7913" [id=106, type=HSwish]; -"107 Transpose_8043" [id=107, type=Add]; -"108 Transpose_7845" [id=108, type=HSigmoid]; -"109 Multiply_12068" [id=109, type=GroupConvolution]; -"110 Transpose_8061" [id=110, type=HSwish]; -"111 Transpose_7937" [id=111, type=Add]; -"112 Multiply_12110" [id=112, type=GroupConvolution]; -"113 Transpose_7955" [id=113, type=HSwish]; -"114 Transpose_8110" [id=114, type=Add]; -"115 Transpose_7959" [id=115, type=ReduceMean]; -"116 Transpose_7991" [id=116, type=Multiply]; -"117 Transpose_8128" [id=117, type=HSwish]; -"118 Convolution_3200" [id=118, type=Convolution]; -"119 Multiply_12082" [id=119, type=Convolution]; -"120 Transpose_8132" [id=120, type=ReduceMean]; -"121 Transpose_8164" [id=121, type=Multiply]; -"122 Transpose_7965" [id=122, type=Add]; -"123 Transpose_8017" [id=123, type=Add]; -"124 Convolution_3337" [id=124, type=Convolution]; -"125 Multiply_12124" [id=125, type=Convolution]; -"126 Relu_7966" [id=126, type=Relu]; -"127 Transpose_8138" [id=127, type=Add]; -"128 Transpose_8188" [id=128, type=Add]; -"129 Convolution_3209" [id=129, type=Convolution]; -"130 Relu_8139" [id=130, type=Relu]; -"131 Multiply_12138" [id=131, type=Convolution]; -"132 Transpose_8336" [id=132, type=Add]; -"133 Transpose_10027" [id=133, type=Add]; -"134 Convolution_3346" [id=134, type=Convolution]; -"135 Transpose_8212" [id=135, type=Add]; -"136 Multiply_12180" [id=136, type=Convolution]; -"137 Transpose_8484" [id=137, type=Add]; -"138 Transpose_7989" [id=138, type=HSigmoid]; -"139 Transpose_10127" [id=139, type=Add]; -"140 Transpose_8230" [id=140, type=HSwish]; -"141 Transpose_8360" [id=141, type=Add]; -"142 Multiply_12222" [id=142, type=Convolution]; -"143 Transpose_8162" [id=143, type=HSigmoid]; -"144 Multiply_12152" [id=144, type=GroupConvolution]; -"145 Transpose_8378" [id=145, type=HSwish]; -"146 Transpose_8508" [id=146, type=Add]; -"147 Transpose_8254" [id=147, type=Add]; -"148 Multiply_12194" [id=148, type=GroupConvolution]; -"149 Transpose_8526" [id=149, type=HSwish]; -"150 Transpose_8272" [id=150, type=HSwish]; -"151 Transpose_8402" [id=151, type=Add]; -"152 Transpose_8530" [id=152, type=ReduceMean]; -"153 Transpose_8276" [id=153, type=ReduceMean]; -"154 Transpose_8308" [id=154, type=Multiply]; -"155 Transpose_8420" [id=155, type=HSwish]; -"156 Convolution_3637" [id=156, type=Convolution]; -"157 Convolution_3447" [id=157, type=Convolution]; -"158 Multiply_12166" [id=158, type=Convolution]; -"159 Transpose_8424" [id=159, type=ReduceMean]; -"160 Transpose_8456" [id=160, type=Multiply]; -"161 Transpose_8536" [id=161, type=Add]; -"162 Transpose_8282" [id=162, type=Add]; -"163 Transpose_8334" [id=163, type=Add]; -"164 Convolution_3558" [id=164, type=Convolution]; -"165 Multiply_12208" [id=165, type=Convolution]; -"166 Transpose_8554" [id=166, type=HSwish]; -"167 Relu_8283" [id=167, type=Relu]; -"168 Transpose_8430" [id=168, type=Add]; -"169 Transpose_8482" [id=169, type=Add]; -"170 Convolution_3649" [id=170, type=Convolution]; -"171 Convolution_3456" [id=171, type=Convolution]; -"172 Relu_8431" [id=172, type=Relu]; -"173 Transpose_10375" [id=173, type=Reshape]; -"174 Transpose_10195" [id=174, type=Add]; -"175 Convolution_3567" [id=175, type=Convolution]; -"176 MobilenetV3small/Logits/BiasAdd" [id=176, type=Add]; -"177 Transpose_8306" [id=177, type=HSigmoid]; -"178 Transpose_10295" [id=178, type=Add]; +"1 Transpose_7780" [id=1, type=Transpose]; +"2 Transpose_7774" [id=2, type=Multiply]; +"3 Transpose_710" [id=3, type=Add]; +"4 Multiply_9167" [id=4, type=Convolution]; +"5 Transpose_5170" [id=5, type=Add]; +"6 Transpose_5188" [id=6, type=HSwish]; +"7 Multiply_9181" [id=7, type=GroupConvolution]; +"8 Transpose_5239" [id=8, type=Add]; +"9 Transpose_5241" [id=9, type=Relu]; +"10 Transpose_5245" [id=10, type=ReduceMean]; +"11 Transpose_5277" [id=11, type=Multiply]; +"12 Convolution_801" [id=12, type=Convolution]; +"13 Multiply_9195" [id=13, type=Convolution]; +"14 Transpose_5251" [id=14, type=Add]; +"15 Transpose_5301" [id=15, type=Add]; +"16 Transpose_5253" [id=16, type=Relu]; +"17 Multiply_9209" [id=17, type=Convolution]; +"18 Convolution_810" [id=18, type=Convolution]; +"19 Transpose_5325" [id=19, type=Add]; +"20 Transpose_5259" [id=20, type=Add]; +"21 Transpose_5327" [id=21, type=Relu]; +"22 Transpose_5273" [id=22, type=HSigmoid]; +"23 Multiply_9223" [id=23, type=GroupConvolution]; +"24 Transpose_5378" [id=24, type=Add]; +"25 Transpose_5380" [id=25, type=Relu]; +"26 Multiply_9237" [id=26, type=Convolution]; +"27 Transpose_5404" [id=27, type=Add]; +"28 Multiply_9251" [id=28, type=Convolution]; +"29 Transpose_5484" [id=29, type=Add]; +"30 Transpose_5428" [id=30, type=Add]; +"31 Multiply_9293" [id=31, type=Convolution]; +"32 Transpose_5430" [id=32, type=Relu]; +"33 Transpose_5508" [id=33, type=Add]; +"34 Multiply_9265" [id=34, type=GroupConvolution]; +"35 Transpose_5526" [id=35, type=HSwish]; +"36 Transpose_5454" [id=36, type=Add]; +"37 Multiply_9307" [id=37, type=GroupConvolution]; +"38 Transpose_5456" [id=38, type=Relu]; +"39 Transpose_5577" [id=39, type=Add]; +"40 Multiply_9279" [id=40, type=Convolution]; +"41 Transpose_5595" [id=41, type=HSwish]; +"42 Transpose_5480" [id=42, type=Add]; +"43 Transpose_5599" [id=43, type=ReduceMean]; +"44 Transpose_5631" [id=44, type=Multiply]; +"45 Convolution_1132" [id=45, type=Convolution]; +"46 Multiply_9321" [id=46, type=Convolution]; +"47 Transpose_5605" [id=47, type=Add]; +"48 Transpose_5655" [id=48, type=Add]; +"49 Transpose_5607" [id=49, type=Relu]; +"50 Multiply_9335" [id=50, type=Convolution]; +"51 Transpose_5803" [id=51, type=Add]; +"52 Convolution_1141" [id=52, type=Convolution]; +"53 Transpose_5679" [id=53, type=Add]; +"54 Multiply_9377" [id=54, type=Convolution]; +"55 Transpose_5951" [id=55, type=Add]; +"56 Transpose_5613" [id=56, type=Add]; +"57 Transpose_5697" [id=57, type=HSwish]; +"58 Transpose_5827" [id=58, type=Add]; +"59 Multiply_9419" [id=59, type=Convolution]; +"60 Transpose_5627" [id=60, type=HSigmoid]; +"61 Multiply_9349" [id=61, type=GroupConvolution]; +"62 Transpose_5845" [id=62, type=HSwish]; +"63 Transpose_5975" [id=63, type=Add]; +"64 Transpose_5721" [id=64, type=Add]; +"65 Multiply_9391" [id=65, type=GroupConvolution]; +"66 Transpose_5993" [id=66, type=HSwish]; +"67 Transpose_5739" [id=67, type=HSwish]; +"68 Transpose_5869" [id=68, type=Add]; +"69 Multiply_9433" [id=69, type=GroupConvolution]; +"70 Transpose_5743" [id=70, type=ReduceMean]; +"71 Transpose_5775" [id=71, type=Multiply]; +"72 Transpose_5887" [id=72, type=HSwish]; +"73 Transpose_6017" [id=73, type=Add]; +"74 Convolution_1242" [id=74, type=Convolution]; +"75 Multiply_9363" [id=75, type=Convolution]; +"76 Transpose_5891" [id=76, type=ReduceMean]; +"77 Transpose_5923" [id=77, type=Multiply]; +"78 Transpose_6035" [id=78, type=HSwish]; +"79 Transpose_5749" [id=79, type=Add]; +"80 Transpose_5799" [id=80, type=Add]; +"81 Convolution_1353" [id=81, type=Convolution]; +"82 Multiply_9405" [id=82, type=Convolution]; +"83 Transpose_6039" [id=83, type=ReduceMean]; +"84 Transpose_6071" [id=84, type=Multiply]; +"85 Transpose_5751" [id=85, type=Relu]; +"86 Transpose_5897" [id=86, type=Add]; +"87 Transpose_5947" [id=87, type=Add]; +"88 Convolution_1464" [id=88, type=Convolution]; +"89 Multiply_9447" [id=89, type=Convolution]; +"90 Convolution_1251" [id=90, type=Convolution]; +"91 Transpose_5899" [id=91, type=Relu]; +"92 Transpose_6045" [id=92, type=Add]; +"93 Transpose_6095" [id=93, type=Add]; +"94 Transpose_5757" [id=94, type=Add]; +"95 Convolution_1362" [id=95, type=Convolution]; +"96 Transpose_6047" [id=96, type=Relu]; +"97 Multiply_9461" [id=97, type=Convolution]; +"98 Transpose_6243" [id=98, type=Add]; +"99 Transpose_5771" [id=99, type=HSigmoid]; +"100 Transpose_5905" [id=100, type=Add]; +"101 Convolution_1473" [id=101, type=Convolution]; +"102 Transpose_6119" [id=102, type=Add]; +"103 Multiply_9503" [id=103, type=Convolution]; +"104 Transpose_5919" [id=104, type=HSigmoid]; +"105 Transpose_6053" [id=105, type=Add]; +"106 Transpose_6137" [id=106, type=HSwish]; +"107 Transpose_6267" [id=107, type=Add]; +"108 Transpose_6067" [id=108, type=HSigmoid]; +"109 Multiply_9475" [id=109, type=GroupConvolution]; +"110 Transpose_6285" [id=110, type=HSwish]; +"111 Transpose_6161" [id=111, type=Add]; +"112 Multiply_9517" [id=112, type=GroupConvolution]; +"113 Transpose_6179" [id=113, type=HSwish]; +"114 Transpose_6336" [id=114, type=Add]; +"115 Transpose_6183" [id=115, type=ReduceMean]; +"116 Transpose_6215" [id=116, type=Multiply]; +"117 Transpose_6354" [id=117, type=HSwish]; +"118 Convolution_1574" [id=118, type=Convolution]; +"119 Multiply_9489" [id=119, type=Convolution]; +"120 Transpose_6358" [id=120, type=ReduceMean]; +"121 Transpose_6390" [id=121, type=Multiply]; +"122 Transpose_6189" [id=122, type=Add]; +"123 Transpose_6239" [id=123, type=Add]; +"124 Convolution_1713" [id=124, type=Convolution]; +"125 Multiply_9531" [id=125, type=Convolution]; +"126 Transpose_6191" [id=126, type=Relu]; +"127 Transpose_6364" [id=127, type=Add]; +"128 Transpose_6414" [id=128, type=Add]; +"129 Convolution_1583" [id=129, type=Convolution]; +"130 Transpose_6366" [id=130, type=Relu]; +"131 Multiply_9545" [id=131, type=Convolution]; +"132 Transpose_6562" [id=132, type=Add]; +"133 Transpose_6197" [id=133, type=Add]; +"134 Convolution_1722" [id=134, type=Convolution]; +"135 Transpose_6438" [id=135, type=Add]; +"136 Multiply_9587" [id=136, type=Convolution]; +"137 Transpose_6710" [id=137, type=Add]; +"138 Transpose_6211" [id=138, type=HSigmoid]; +"139 Transpose_6372" [id=139, type=Add]; +"140 Transpose_6456" [id=140, type=HSwish]; +"141 Transpose_6586" [id=141, type=Add]; +"142 Multiply_9629" [id=142, type=Convolution]; +"143 Transpose_6386" [id=143, type=HSigmoid]; +"144 Multiply_9559" [id=144, type=GroupConvolution]; +"145 Transpose_6604" [id=145, type=HSwish]; +"146 Transpose_6734" [id=146, type=Add]; +"147 Transpose_6480" [id=147, type=Add]; +"148 Multiply_9601" [id=148, type=GroupConvolution]; +"149 Transpose_6752" [id=149, type=HSwish]; +"150 Transpose_6498" [id=150, type=HSwish]; +"151 Transpose_6628" [id=151, type=Add]; +"152 Transpose_6756" [id=152, type=ReduceMean]; +"153 Transpose_6502" [id=153, type=ReduceMean]; +"154 Transpose_6534" [id=154, type=Multiply]; +"155 Transpose_6646" [id=155, type=HSwish]; +"156 Convolution_2013" [id=156, type=Convolution]; +"157 Convolution_1823" [id=157, type=Convolution]; +"158 Multiply_9573" [id=158, type=Convolution]; +"159 Transpose_6650" [id=159, type=ReduceMean]; +"160 Transpose_6682" [id=160, type=Multiply]; +"161 Transpose_6762" [id=161, type=Add]; +"162 Transpose_6508" [id=162, type=Add]; +"163 Transpose_6558" [id=163, type=Add]; +"164 Convolution_1934" [id=164, type=Convolution]; +"165 Multiply_9615" [id=165, type=Convolution]; +"166 Transpose_6780" [id=166, type=HSwish]; +"167 Transpose_6510" [id=167, type=Relu]; +"168 Transpose_6656" [id=168, type=Add]; +"169 Transpose_6706" [id=169, type=Add]; +"170 Convolution_2025" [id=170, type=Convolution]; +"171 Convolution_1832" [id=171, type=Convolution]; +"172 Transpose_6658" [id=172, type=Relu]; +"173 Transpose_6786" [id=173, type=Add]; +"174 Transpose_6516" [id=174, type=Add]; +"175 Convolution_1943" [id=175, type=Convolution]; +"176 MobilenetV3small/Logits/BiasAdd" [id=176, type=Reshape]; +"177 Transpose_6530" [id=177, type=HSigmoid]; +"178 Transpose_6664" [id=178, type=Add]; "179 MobilenetV3small/flatten/Reshape" [id=179, type=Reshape]; -"180 Transpose_8454" [id=180, type=HSigmoid]; +"180 Transpose_6678" [id=180, type=HSigmoid]; "181 MobilenetV3small/Predictions/Softmax" [id=181, type=Softmax]; "182 Predictions" [id=182, type=Result]; "183 MobilenetV3small/flatten/Const" [id=183, type=Constant]; -"184 Transpose_10377" [id=184, type=Constant]; -"185 Constant_11480" [id=185, type=Constant]; -"186 Transpose_3648" [id=186, type=Constant]; -"187 Transpose_8534" [id=187, type=Constant]; -"188 Transpose_3636" [id=188, type=Constant]; -"189 Constant_8528" [id=189, type=Constant]; -"190 Constant_12230" [id=190, type=Constant]; -"191 Multiply_12423" [id=191, type=Constant]; -"192 Constant_12216" [id=192, type=Constant]; -"193 Multiply_12417" [id=193, type=Constant]; -"194 Transpose_8436" [id=194, type=Constant]; -"195 Transpose_3566" [id=195, type=Constant]; -"196 Transpose_8428" [id=196, type=Constant]; -"197 Transpose_3557" [id=197, type=Constant]; -"198 Constant_8422" [id=198, type=Constant]; -"199 Constant_12202" [id=199, type=Constant]; -"200 Multiply_12412" [id=200, type=Constant]; -"201 Constant_12188" [id=201, type=Constant]; -"202 Multiply_12406" [id=202, type=Constant]; -"203 Constant_12174" [id=203, type=Constant]; -"204 Multiply_12400" [id=204, type=Constant]; -"205 Transpose_8288" [id=205, type=Constant]; -"206 Transpose_3455" [id=206, type=Constant]; -"207 Transpose_8280" [id=207, type=Constant]; -"208 Transpose_3446" [id=208, type=Constant]; -"209 Constant_8274" [id=209, type=Constant]; -"210 Constant_12160" [id=210, type=Constant]; -"211 Multiply_12395" [id=211, type=Constant]; -"212 Constant_12146" [id=212, type=Constant]; -"213 Multiply_12389" [id=213, type=Constant]; -"214 Constant_12132" [id=214, type=Constant]; -"215 Multiply_12383" [id=215, type=Constant]; -"216 Transpose_8144" [id=216, type=Constant]; -"217 Transpose_3345" [id=217, type=Constant]; -"218 Transpose_8136" [id=218, type=Constant]; -"219 Transpose_3336" [id=219, type=Constant]; -"220 Constant_8130" [id=220, type=Constant]; -"221 Constant_12118" [id=221, type=Constant]; -"222 Multiply_12378" [id=222, type=Constant]; -"223 Constant_12104" [id=223, type=Constant]; -"224 Multiply_12372" [id=224, type=Constant]; -"225 Constant_12090" [id=225, type=Constant]; -"226 Multiply_12366" [id=226, type=Constant]; -"227 Transpose_7971" [id=227, type=Constant]; -"228 Transpose_3208" [id=228, type=Constant]; -"229 Transpose_7963" [id=229, type=Constant]; -"230 Transpose_3199" [id=230, type=Constant]; -"231 Constant_7957" [id=231, type=Constant]; -"232 Constant_12076" [id=232, type=Constant]; -"233 Multiply_12361" [id=233, type=Constant]; -"234 Constant_12062" [id=234, type=Constant]; -"235 Multiply_12355" [id=235, type=Constant]; -"236 Constant_12048" [id=236, type=Constant]; -"237 Multiply_12349" [id=237, type=Constant]; -"238 Transpose_7827" [id=238, type=Constant]; -"239 Transpose_3098" [id=239, type=Constant]; -"240 Transpose_7819" [id=240, type=Constant]; -"241 Transpose_3089" [id=241, type=Constant]; -"242 Constant_7813" [id=242, type=Constant]; -"243 Constant_12034" [id=243, type=Constant]; -"244 Multiply_12344" [id=244, type=Constant]; -"245 Constant_12020" [id=245, type=Constant]; -"246 Multiply_12338" [id=246, type=Constant]; -"247 Constant_12006" [id=247, type=Constant]; -"248 Multiply_12332" [id=248, type=Constant]; -"249 Transpose_7679" [id=249, type=Constant]; -"250 Transpose_2987" [id=250, type=Constant]; -"251 Transpose_7671" [id=251, type=Constant]; -"252 Transpose_2978" [id=252, type=Constant]; -"253 Constant_7665" [id=253, type=Constant]; -"254 Constant_11992" [id=254, type=Constant]; -"255 Multiply_12327" [id=255, type=Constant]; -"256 Constant_11978" [id=256, type=Constant]; -"257 Multiply_12321" [id=257, type=Constant]; -"258 Constant_11964" [id=258, type=Constant]; -"259 Multiply_12315" [id=259, type=Constant]; -"260 Transpose_7531" [id=260, type=Constant]; -"261 Transpose_2876" [id=261, type=Constant]; -"262 Transpose_7523" [id=262, type=Constant]; -"263 Transpose_2867" [id=263, type=Constant]; -"264 Constant_7517" [id=264, type=Constant]; -"265 Constant_11950" [id=265, type=Constant]; -"266 Multiply_12310" [id=266, type=Constant]; -"267 Constant_11936" [id=267, type=Constant]; -"268 Multiply_12304" [id=268, type=Constant]; -"269 Constant_11922" [id=269, type=Constant]; -"270 Multiply_12298" [id=270, type=Constant]; -"271 Transpose_7387" [id=271, type=Constant]; -"272 Transpose_2766" [id=272, type=Constant]; -"273 Transpose_7379" [id=273, type=Constant]; -"274 Transpose_2757" [id=274, type=Constant]; -"275 Constant_7373" [id=275, type=Constant]; -"276 Constant_11908" [id=276, type=Constant]; -"277 Multiply_12293" [id=277, type=Constant]; -"278 Constant_11894" [id=278, type=Constant]; -"279 Multiply_12287" [id=279, type=Constant]; -"280 Constant_11880" [id=280, type=Constant]; -"281 Multiply_12281" [id=281, type=Constant]; -"282 Constant_11866" [id=282, type=Constant]; -"283 Multiply_12276" [id=283, type=Constant]; -"284 Constant_11852" [id=284, type=Constant]; -"285 Multiply_12270" [id=285, type=Constant]; -"286 Constant_11838" [id=286, type=Constant]; -"287 Multiply_12264" [id=287, type=Constant]; -"288 Constant_11824" [id=288, type=Constant]; -"289 Multiply_12259" [id=289, type=Constant]; -"290 Constant_11810" [id=290, type=Constant]; -"291 Multiply_12253" [id=291, type=Constant]; -"292 Constant_11796" [id=292, type=Constant]; -"293 Multiply_12247" [id=293, type=Constant]; -"294 Transpose_7037" [id=294, type=Constant]; -"295 Transpose_2439" [id=295, type=Constant]; -"296 Transpose_7029" [id=296, type=Constant]; -"297 Transpose_2430" [id=297, type=Constant]; -"298 Constant_7023" [id=298, type=Constant]; -"299 Constant_11782" [id=299, type=Constant]; -"300 Multiply_12242" [id=300, type=Constant]; -"301 Constant_11768" [id=301, type=Constant]; -"302 Gather_12661" [id=302, type=Constant]; -"303 Unsqueeze_9541" [id=303, type=Constant]; -"304 Unsqueeze_9547" [id=304, type=Constant]; -"305 Constant_9544" [id=305, type=Constant]; -"0 input_1" -> "1 Transpose_9545" [label="[1, 224, 224, 3]", style=solid]; -"1 Transpose_9545" -> "2 Transpose_9539" [label="[1, 3, 224, 224]", style=solid]; -"2 Transpose_9539" -> "3 Transpose_2342" [label="[1, 3, 224, 224]", style=solid]; -"3 Transpose_2342" -> "4 Multiply_11760" [label="[1, 3, 224, 224]", style=solid]; -"4 Multiply_11760" -> "5 Transpose_6952" [label="[1, 16, 112, 112]", style=solid]; -"5 Transpose_6952" -> "6 Transpose_6970" [label="[1, 16, 112, 112]", style=solid]; -"6 Transpose_6970" -> "7 Multiply_11774" [label="[1, 16, 112, 112]", style=solid]; -"7 Multiply_11774" -> "8 Transpose_7019" [label="[1, 16, 56, 56]", style=solid]; -"8 Transpose_7019" -> "9 Relu_7020" [label="[1, 16, 56, 56]", style=solid]; -"9 Relu_7020" -> "10 Transpose_7025" [label="[1, 16, 56, 56]", style=solid]; -"9 Relu_7020" -> "11 Transpose_7057" [label="[1, 16, 56, 56]", style=solid]; -"10 Transpose_7025" -> "12 Convolution_2431" [label="[1, 16, 1, 1]", style=solid]; -"11 Transpose_7057" -> "13 Multiply_11788" [label="[1, 16, 56, 56]", style=solid]; -"12 Convolution_2431" -> "14 Transpose_7031" [label="[1, 8, 1, 1]", style=solid]; -"13 Multiply_11788" -> "15 Transpose_7081" [label="[1, 16, 56, 56]", style=solid]; -"14 Transpose_7031" -> "16 Relu_7032" [label="[1, 8, 1, 1]", style=solid]; -"15 Transpose_7081" -> "17 Multiply_11802" [label="[1, 16, 56, 56]", style=solid]; -"16 Relu_7032" -> "18 Convolution_2440" [label="[1, 8, 1, 1]", style=solid]; -"17 Multiply_11802" -> "19 Transpose_7105" [label="[1, 72, 56, 56]", style=solid]; -"18 Convolution_2440" -> "20 Transpose_9591" [label="[1, 16, 1, 1]", style=solid]; -"19 Transpose_7105" -> "21 Relu_7106" [label="[1, 72, 56, 56]", style=solid]; -"20 Transpose_9591" -> "22 Transpose_7055" [label="[1, 16, 1, 1]", style=solid]; -"21 Relu_7106" -> "23 Multiply_11816" [label="[1, 72, 56, 56]", style=solid]; -"22 Transpose_7055" -> "11 Transpose_7057" [label="[1, 16, 1, 1]", style=solid]; -"23 Multiply_11816" -> "24 Transpose_7156" [label="[1, 72, 28, 28]", style=solid]; -"24 Transpose_7156" -> "25 Relu_7157" [label="[1, 72, 28, 28]", style=solid]; -"25 Relu_7157" -> "26 Multiply_11830" [label="[1, 72, 28, 28]", style=solid]; -"26 Multiply_11830" -> "27 Transpose_7182" [label="[1, 24, 28, 28]", style=solid]; -"27 Transpose_7182" -> "28 Multiply_11844" [label="[1, 24, 28, 28]", style=solid]; -"27 Transpose_7182" -> "29 Transpose_7262" [label="[1, 24, 28, 28]", style=solid]; -"28 Multiply_11844" -> "30 Transpose_7206" [label="[1, 88, 28, 28]", style=solid]; -"29 Transpose_7262" -> "31 Multiply_11886" [label="[1, 24, 28, 28]", style=solid]; -"30 Transpose_7206" -> "32 Relu_7207" [label="[1, 88, 28, 28]", style=solid]; -"31 Multiply_11886" -> "33 Transpose_7286" [label="[1, 96, 28, 28]", style=solid]; -"32 Relu_7207" -> "34 Multiply_11858" [label="[1, 88, 28, 28]", style=solid]; -"33 Transpose_7286" -> "35 Transpose_7304" [label="[1, 96, 28, 28]", style=solid]; -"34 Multiply_11858" -> "36 Transpose_7232" [label="[1, 88, 28, 28]", style=solid]; -"35 Transpose_7304" -> "37 Multiply_11900" [label="[1, 96, 28, 28]", style=solid]; -"36 Transpose_7232" -> "38 Relu_7233" [label="[1, 88, 28, 28]", style=solid]; -"37 Multiply_11900" -> "39 Transpose_7353" [label="[1, 96, 14, 14]", style=solid]; -"38 Relu_7233" -> "40 Multiply_11872" [label="[1, 88, 28, 28]", style=solid]; -"39 Transpose_7353" -> "41 Transpose_7371" [label="[1, 96, 14, 14]", style=solid]; -"40 Multiply_11872" -> "42 Transpose_7260" [label="[1, 24, 28, 28]", style=solid]; -"41 Transpose_7371" -> "43 Transpose_7375" [label="[1, 96, 14, 14]", style=solid]; -"41 Transpose_7371" -> "44 Transpose_7407" [label="[1, 96, 14, 14]", style=solid]; -"42 Transpose_7260" -> "29 Transpose_7262" [label="[1, 24, 28, 28]", style=solid]; -"43 Transpose_7375" -> "45 Convolution_2758" [label="[1, 96, 1, 1]", style=solid]; -"44 Transpose_7407" -> "46 Multiply_11914" [label="[1, 96, 14, 14]", style=solid]; -"45 Convolution_2758" -> "47 Transpose_7381" [label="[1, 24, 1, 1]", style=solid]; -"46 Multiply_11914" -> "48 Transpose_7431" [label="[1, 40, 14, 14]", style=solid]; -"47 Transpose_7381" -> "49 Relu_7382" [label="[1, 24, 1, 1]", style=solid]; -"48 Transpose_7431" -> "50 Multiply_11928" [label="[1, 40, 14, 14]", style=solid]; -"48 Transpose_7431" -> "51 Transpose_7579" [label="[1, 40, 14, 14]", style=solid]; -"49 Relu_7382" -> "52 Convolution_2767" [label="[1, 24, 1, 1]", style=solid]; -"50 Multiply_11928" -> "53 Transpose_7455" [label="[1, 240, 14, 14]", style=solid]; -"51 Transpose_7579" -> "54 Multiply_11970" [label="[1, 40, 14, 14]", style=solid]; -"51 Transpose_7579" -> "55 Transpose_7727" [label="[1, 40, 14, 14]", style=solid]; -"52 Convolution_2767" -> "56 Transpose_9691" [label="[1, 96, 1, 1]", style=solid]; -"53 Transpose_7455" -> "57 Transpose_7473" [label="[1, 240, 14, 14]", style=solid]; -"54 Multiply_11970" -> "58 Transpose_7603" [label="[1, 240, 14, 14]", style=solid]; -"55 Transpose_7727" -> "59 Multiply_12012" [label="[1, 40, 14, 14]", style=solid]; -"56 Transpose_9691" -> "60 Transpose_7405" [label="[1, 96, 1, 1]", style=solid]; -"57 Transpose_7473" -> "61 Multiply_11942" [label="[1, 240, 14, 14]", style=solid]; -"58 Transpose_7603" -> "62 Transpose_7621" [label="[1, 240, 14, 14]", style=solid]; -"59 Multiply_12012" -> "63 Transpose_7751" [label="[1, 120, 14, 14]", style=solid]; -"60 Transpose_7405" -> "44 Transpose_7407" [label="[1, 96, 1, 1]", style=solid]; -"61 Multiply_11942" -> "64 Transpose_7497" [label="[1, 240, 14, 14]", style=solid]; -"62 Transpose_7621" -> "65 Multiply_11984" [label="[1, 240, 14, 14]", style=solid]; -"63 Transpose_7751" -> "66 Transpose_7769" [label="[1, 120, 14, 14]", style=solid]; -"64 Transpose_7497" -> "67 Transpose_7515" [label="[1, 240, 14, 14]", style=solid]; -"65 Multiply_11984" -> "68 Transpose_7645" [label="[1, 240, 14, 14]", style=solid]; -"66 Transpose_7769" -> "69 Multiply_12026" [label="[1, 120, 14, 14]", style=solid]; -"67 Transpose_7515" -> "70 Transpose_7519" [label="[1, 240, 14, 14]", style=solid]; -"67 Transpose_7515" -> "71 Transpose_7551" [label="[1, 240, 14, 14]", style=solid]; -"68 Transpose_7645" -> "72 Transpose_7663" [label="[1, 240, 14, 14]", style=solid]; -"69 Multiply_12026" -> "73 Transpose_7793" [label="[1, 120, 14, 14]", style=solid]; -"70 Transpose_7519" -> "74 Convolution_2868" [label="[1, 240, 1, 1]", style=solid]; -"71 Transpose_7551" -> "75 Multiply_11956" [label="[1, 240, 14, 14]", style=solid]; -"72 Transpose_7663" -> "76 Transpose_7667" [label="[1, 240, 14, 14]", style=solid]; -"72 Transpose_7663" -> "77 Transpose_7699" [label="[1, 240, 14, 14]", style=solid]; -"73 Transpose_7793" -> "78 Transpose_7811" [label="[1, 120, 14, 14]", style=solid]; -"74 Convolution_2868" -> "79 Transpose_7525" [label="[1, 64, 1, 1]", style=solid]; -"75 Multiply_11956" -> "80 Transpose_7577" [label="[1, 40, 14, 14]", style=solid]; -"76 Transpose_7667" -> "81 Convolution_2979" [label="[1, 240, 1, 1]", style=solid]; -"77 Transpose_7699" -> "82 Multiply_11998" [label="[1, 240, 14, 14]", style=solid]; -"78 Transpose_7811" -> "83 Transpose_7815" [label="[1, 120, 14, 14]", style=solid]; -"78 Transpose_7811" -> "84 Transpose_7847" [label="[1, 120, 14, 14]", style=solid]; -"79 Transpose_7525" -> "85 Relu_7526" [label="[1, 64, 1, 1]", style=solid]; -"80 Transpose_7577" -> "51 Transpose_7579" [label="[1, 40, 14, 14]", style=solid]; -"81 Convolution_2979" -> "86 Transpose_7673" [label="[1, 64, 1, 1]", style=solid]; -"82 Multiply_11998" -> "87 Transpose_7725" [label="[1, 40, 14, 14]", style=solid]; -"83 Transpose_7815" -> "88 Convolution_3090" [label="[1, 120, 1, 1]", style=solid]; -"84 Transpose_7847" -> "89 Multiply_12040" [label="[1, 120, 14, 14]", style=solid]; -"85 Relu_7526" -> "90 Convolution_2877" [label="[1, 64, 1, 1]", style=solid]; -"86 Transpose_7673" -> "91 Relu_7674" [label="[1, 64, 1, 1]", style=solid]; -"87 Transpose_7725" -> "55 Transpose_7727" [label="[1, 40, 14, 14]", style=solid]; -"88 Convolution_3090" -> "92 Transpose_7821" [label="[1, 32, 1, 1]", style=solid]; -"89 Multiply_12040" -> "93 Transpose_7871" [label="[1, 48, 14, 14]", style=solid]; -"90 Convolution_2877" -> "94 Transpose_9759" [label="[1, 240, 1, 1]", style=solid]; -"91 Relu_7674" -> "95 Convolution_2988" [label="[1, 64, 1, 1]", style=solid]; -"92 Transpose_7821" -> "96 Relu_7822" [label="[1, 32, 1, 1]", style=solid]; -"93 Transpose_7871" -> "97 Multiply_12054" [label="[1, 48, 14, 14]", style=solid]; -"93 Transpose_7871" -> "98 Transpose_8019" [label="[1, 48, 14, 14]", style=solid]; -"94 Transpose_9759" -> "99 Transpose_7549" [label="[1, 240, 1, 1]", style=solid]; -"95 Convolution_2988" -> "100 Transpose_9859" [label="[1, 240, 1, 1]", style=solid]; -"96 Relu_7822" -> "101 Convolution_3099" [label="[1, 32, 1, 1]", style=solid]; -"97 Multiply_12054" -> "102 Transpose_7895" [label="[1, 144, 14, 14]", style=solid]; -"98 Transpose_8019" -> "103 Multiply_12096" [label="[1, 48, 14, 14]", style=solid]; -"99 Transpose_7549" -> "71 Transpose_7551" [label="[1, 240, 1, 1]", style=solid]; -"100 Transpose_9859" -> "104 Transpose_7697" [label="[1, 240, 1, 1]", style=solid]; -"101 Convolution_3099" -> "105 Transpose_9959" [label="[1, 120, 1, 1]", style=solid]; -"102 Transpose_7895" -> "106 Transpose_7913" [label="[1, 144, 14, 14]", style=solid]; -"103 Multiply_12096" -> "107 Transpose_8043" [label="[1, 288, 14, 14]", style=solid]; -"104 Transpose_7697" -> "77 Transpose_7699" [label="[1, 240, 1, 1]", style=solid]; -"105 Transpose_9959" -> "108 Transpose_7845" [label="[1, 120, 1, 1]", style=solid]; -"106 Transpose_7913" -> "109 Multiply_12068" [label="[1, 144, 14, 14]", style=solid]; -"107 Transpose_8043" -> "110 Transpose_8061" [label="[1, 288, 14, 14]", style=solid]; -"108 Transpose_7845" -> "84 Transpose_7847" [label="[1, 120, 1, 1]", style=solid]; -"109 Multiply_12068" -> "111 Transpose_7937" [label="[1, 144, 14, 14]", style=solid]; -"110 Transpose_8061" -> "112 Multiply_12110" [label="[1, 288, 14, 14]", style=solid]; -"111 Transpose_7937" -> "113 Transpose_7955" [label="[1, 144, 14, 14]", style=solid]; -"112 Multiply_12110" -> "114 Transpose_8110" [label="[1, 288, 7, 7]", style=solid]; -"113 Transpose_7955" -> "115 Transpose_7959" [label="[1, 144, 14, 14]", style=solid]; -"113 Transpose_7955" -> "116 Transpose_7991" [label="[1, 144, 14, 14]", style=solid]; -"114 Transpose_8110" -> "117 Transpose_8128" [label="[1, 288, 7, 7]", style=solid]; -"115 Transpose_7959" -> "118 Convolution_3200" [label="[1, 144, 1, 1]", style=solid]; -"116 Transpose_7991" -> "119 Multiply_12082" [label="[1, 144, 14, 14]", style=solid]; -"117 Transpose_8128" -> "120 Transpose_8132" [label="[1, 288, 7, 7]", style=solid]; -"117 Transpose_8128" -> "121 Transpose_8164" [label="[1, 288, 7, 7]", style=solid]; -"118 Convolution_3200" -> "122 Transpose_7965" [label="[1, 40, 1, 1]", style=solid]; -"119 Multiply_12082" -> "123 Transpose_8017" [label="[1, 48, 14, 14]", style=solid]; -"120 Transpose_8132" -> "124 Convolution_3337" [label="[1, 288, 1, 1]", style=solid]; -"121 Transpose_8164" -> "125 Multiply_12124" [label="[1, 288, 7, 7]", style=solid]; -"122 Transpose_7965" -> "126 Relu_7966" [label="[1, 40, 1, 1]", style=solid]; -"123 Transpose_8017" -> "98 Transpose_8019" [label="[1, 48, 14, 14]", style=solid]; -"124 Convolution_3337" -> "127 Transpose_8138" [label="[1, 72, 1, 1]", style=solid]; -"125 Multiply_12124" -> "128 Transpose_8188" [label="[1, 96, 7, 7]", style=solid]; -"126 Relu_7966" -> "129 Convolution_3209" [label="[1, 40, 1, 1]", style=solid]; -"127 Transpose_8138" -> "130 Relu_8139" [label="[1, 72, 1, 1]", style=solid]; -"128 Transpose_8188" -> "131 Multiply_12138" [label="[1, 96, 7, 7]", style=solid]; -"128 Transpose_8188" -> "132 Transpose_8336" [label="[1, 96, 7, 7]", style=solid]; -"129 Convolution_3209" -> "133 Transpose_10027" [label="[1, 144, 1, 1]", style=solid]; -"130 Relu_8139" -> "134 Convolution_3346" [label="[1, 72, 1, 1]", style=solid]; -"131 Multiply_12138" -> "135 Transpose_8212" [label="[1, 576, 7, 7]", style=solid]; -"132 Transpose_8336" -> "136 Multiply_12180" [label="[1, 96, 7, 7]", style=solid]; -"132 Transpose_8336" -> "137 Transpose_8484" [label="[1, 96, 7, 7]", style=solid]; -"133 Transpose_10027" -> "138 Transpose_7989" [label="[1, 144, 1, 1]", style=solid]; -"134 Convolution_3346" -> "139 Transpose_10127" [label="[1, 288, 1, 1]", style=solid]; -"135 Transpose_8212" -> "140 Transpose_8230" [label="[1, 576, 7, 7]", style=solid]; -"136 Multiply_12180" -> "141 Transpose_8360" [label="[1, 576, 7, 7]", style=solid]; -"137 Transpose_8484" -> "142 Multiply_12222" [label="[1, 96, 7, 7]", style=solid]; -"138 Transpose_7989" -> "116 Transpose_7991" [label="[1, 144, 1, 1]", style=solid]; -"139 Transpose_10127" -> "143 Transpose_8162" [label="[1, 288, 1, 1]", style=solid]; -"140 Transpose_8230" -> "144 Multiply_12152" [label="[1, 576, 7, 7]", style=solid]; -"141 Transpose_8360" -> "145 Transpose_8378" [label="[1, 576, 7, 7]", style=solid]; -"142 Multiply_12222" -> "146 Transpose_8508" [label="[1, 576, 7, 7]", style=solid]; -"143 Transpose_8162" -> "121 Transpose_8164" [label="[1, 288, 1, 1]", style=solid]; -"144 Multiply_12152" -> "147 Transpose_8254" [label="[1, 576, 7, 7]", style=solid]; -"145 Transpose_8378" -> "148 Multiply_12194" [label="[1, 576, 7, 7]", style=solid]; -"146 Transpose_8508" -> "149 Transpose_8526" [label="[1, 576, 7, 7]", style=solid]; -"147 Transpose_8254" -> "150 Transpose_8272" [label="[1, 576, 7, 7]", style=solid]; -"148 Multiply_12194" -> "151 Transpose_8402" [label="[1, 576, 7, 7]", style=solid]; -"149 Transpose_8526" -> "152 Transpose_8530" [label="[1, 576, 7, 7]", style=solid]; -"150 Transpose_8272" -> "153 Transpose_8276" [label="[1, 576, 7, 7]", style=solid]; -"150 Transpose_8272" -> "154 Transpose_8308" [label="[1, 576, 7, 7]", style=solid]; -"151 Transpose_8402" -> "155 Transpose_8420" [label="[1, 576, 7, 7]", style=solid]; -"152 Transpose_8530" -> "156 Convolution_3637" [label="[1, 576, 1, 1]", style=solid]; -"153 Transpose_8276" -> "157 Convolution_3447" [label="[1, 576, 1, 1]", style=solid]; -"154 Transpose_8308" -> "158 Multiply_12166" [label="[1, 576, 7, 7]", style=solid]; -"155 Transpose_8420" -> "159 Transpose_8424" [label="[1, 576, 7, 7]", style=solid]; -"155 Transpose_8420" -> "160 Transpose_8456" [label="[1, 576, 7, 7]", style=solid]; -"156 Convolution_3637" -> "161 Transpose_8536" [label="[1, 1024, 1, 1]", style=solid]; -"157 Convolution_3447" -> "162 Transpose_8282" [label="[1, 144, 1, 1]", style=solid]; -"158 Multiply_12166" -> "163 Transpose_8334" [label="[1, 96, 7, 7]", style=solid]; -"159 Transpose_8424" -> "164 Convolution_3558" [label="[1, 576, 1, 1]", style=solid]; -"160 Transpose_8456" -> "165 Multiply_12208" [label="[1, 576, 7, 7]", style=solid]; -"161 Transpose_8536" -> "166 Transpose_8554" [label="[1, 1024, 1, 1]", style=solid]; -"162 Transpose_8282" -> "167 Relu_8283" [label="[1, 144, 1, 1]", style=solid]; -"163 Transpose_8334" -> "132 Transpose_8336" [label="[1, 96, 7, 7]", style=solid]; -"164 Convolution_3558" -> "168 Transpose_8430" [label="[1, 144, 1, 1]", style=solid]; -"165 Multiply_12208" -> "169 Transpose_8482" [label="[1, 96, 7, 7]", style=solid]; -"166 Transpose_8554" -> "170 Convolution_3649" [label="[1, 1024, 1, 1]", style=solid]; -"167 Relu_8283" -> "171 Convolution_3456" [label="[1, 144, 1, 1]", style=solid]; -"168 Transpose_8430" -> "172 Relu_8431" [label="[1, 144, 1, 1]", style=solid]; -"169 Transpose_8482" -> "137 Transpose_8484" [label="[1, 96, 7, 7]", style=solid]; -"170 Convolution_3649" -> "173 Transpose_10375" [label="[1, 1000, 1, 1]", style=solid]; -"171 Convolution_3456" -> "174 Transpose_10195" [label="[1, 576, 1, 1]", style=solid]; -"172 Relu_8431" -> "175 Convolution_3567" [label="[1, 144, 1, 1]", style=solid]; -"173 Transpose_10375" -> "176 MobilenetV3small/Logits/BiasAdd" [label="[1, 1, 1, 1000]", style=solid]; -"174 Transpose_10195" -> "177 Transpose_8306" [label="[1, 576, 1, 1]", style=solid]; -"175 Convolution_3567" -> "178 Transpose_10295" [label="[1, 576, 1, 1]", style=solid]; +"184 Constant_8887" [id=184, type=Constant]; +"185 Transpose_6784" [id=185, type=Constant]; +"186 Transpose_2024" [id=186, type=Constant]; +"187 Transpose_6760" [id=187, type=Constant]; +"188 Transpose_2012" [id=188, type=Constant]; +"189 Constant_6754" [id=189, type=Constant]; +"190 Constant_9637" [id=190, type=Constant]; +"191 Multiply_9830" [id=191, type=Constant]; +"192 Constant_9623" [id=192, type=Constant]; +"193 Multiply_9824" [id=193, type=Constant]; +"194 Transpose_6662" [id=194, type=Constant]; +"195 Transpose_1942" [id=195, type=Constant]; +"196 Transpose_6654" [id=196, type=Constant]; +"197 Transpose_1933" [id=197, type=Constant]; +"198 Constant_6648" [id=198, type=Constant]; +"199 Constant_9609" [id=199, type=Constant]; +"200 Multiply_9819" [id=200, type=Constant]; +"201 Constant_9595" [id=201, type=Constant]; +"202 Multiply_9813" [id=202, type=Constant]; +"203 Constant_9581" [id=203, type=Constant]; +"204 Multiply_9807" [id=204, type=Constant]; +"205 Transpose_6514" [id=205, type=Constant]; +"206 Transpose_1831" [id=206, type=Constant]; +"207 Transpose_6506" [id=207, type=Constant]; +"208 Transpose_1822" [id=208, type=Constant]; +"209 Constant_6500" [id=209, type=Constant]; +"210 Constant_9567" [id=210, type=Constant]; +"211 Multiply_9802" [id=211, type=Constant]; +"212 Constant_9553" [id=212, type=Constant]; +"213 Multiply_9796" [id=213, type=Constant]; +"214 Constant_9539" [id=214, type=Constant]; +"215 Multiply_9790" [id=215, type=Constant]; +"216 Transpose_6370" [id=216, type=Constant]; +"217 Transpose_1721" [id=217, type=Constant]; +"218 Transpose_6362" [id=218, type=Constant]; +"219 Transpose_1712" [id=219, type=Constant]; +"220 Constant_6356" [id=220, type=Constant]; +"221 Constant_9525" [id=221, type=Constant]; +"222 Multiply_9785" [id=222, type=Constant]; +"223 Constant_9511" [id=223, type=Constant]; +"224 Multiply_9779" [id=224, type=Constant]; +"225 Constant_9497" [id=225, type=Constant]; +"226 Multiply_9773" [id=226, type=Constant]; +"227 Transpose_6195" [id=227, type=Constant]; +"228 Transpose_1582" [id=228, type=Constant]; +"229 Transpose_6187" [id=229, type=Constant]; +"230 Transpose_1573" [id=230, type=Constant]; +"231 Constant_6181" [id=231, type=Constant]; +"232 Constant_9483" [id=232, type=Constant]; +"233 Multiply_9768" [id=233, type=Constant]; +"234 Constant_9469" [id=234, type=Constant]; +"235 Multiply_9762" [id=235, type=Constant]; +"236 Constant_9455" [id=236, type=Constant]; +"237 Multiply_9756" [id=237, type=Constant]; +"238 Transpose_6051" [id=238, type=Constant]; +"239 Transpose_1472" [id=239, type=Constant]; +"240 Transpose_6043" [id=240, type=Constant]; +"241 Transpose_1463" [id=241, type=Constant]; +"242 Constant_6037" [id=242, type=Constant]; +"243 Constant_9441" [id=243, type=Constant]; +"244 Multiply_9751" [id=244, type=Constant]; +"245 Constant_9427" [id=245, type=Constant]; +"246 Multiply_9745" [id=246, type=Constant]; +"247 Constant_9413" [id=247, type=Constant]; +"248 Multiply_9739" [id=248, type=Constant]; +"249 Transpose_5903" [id=249, type=Constant]; +"250 Transpose_1361" [id=250, type=Constant]; +"251 Transpose_5895" [id=251, type=Constant]; +"252 Transpose_1352" [id=252, type=Constant]; +"253 Constant_5889" [id=253, type=Constant]; +"254 Constant_9399" [id=254, type=Constant]; +"255 Multiply_9734" [id=255, type=Constant]; +"256 Constant_9385" [id=256, type=Constant]; +"257 Multiply_9728" [id=257, type=Constant]; +"258 Constant_9371" [id=258, type=Constant]; +"259 Multiply_9722" [id=259, type=Constant]; +"260 Transpose_5755" [id=260, type=Constant]; +"261 Transpose_1250" [id=261, type=Constant]; +"262 Transpose_5747" [id=262, type=Constant]; +"263 Transpose_1241" [id=263, type=Constant]; +"264 Constant_5741" [id=264, type=Constant]; +"265 Constant_9357" [id=265, type=Constant]; +"266 Multiply_9717" [id=266, type=Constant]; +"267 Constant_9343" [id=267, type=Constant]; +"268 Multiply_9711" [id=268, type=Constant]; +"269 Constant_9329" [id=269, type=Constant]; +"270 Multiply_9705" [id=270, type=Constant]; +"271 Transpose_5611" [id=271, type=Constant]; +"272 Transpose_1140" [id=272, type=Constant]; +"273 Transpose_5603" [id=273, type=Constant]; +"274 Transpose_1131" [id=274, type=Constant]; +"275 Constant_5597" [id=275, type=Constant]; +"276 Constant_9315" [id=276, type=Constant]; +"277 Multiply_9700" [id=277, type=Constant]; +"278 Constant_9301" [id=278, type=Constant]; +"279 Multiply_9694" [id=279, type=Constant]; +"280 Constant_9287" [id=280, type=Constant]; +"281 Multiply_9688" [id=281, type=Constant]; +"282 Constant_9273" [id=282, type=Constant]; +"283 Multiply_9683" [id=283, type=Constant]; +"284 Constant_9259" [id=284, type=Constant]; +"285 Multiply_9677" [id=285, type=Constant]; +"286 Constant_9245" [id=286, type=Constant]; +"287 Multiply_9671" [id=287, type=Constant]; +"288 Constant_9231" [id=288, type=Constant]; +"289 Multiply_9666" [id=289, type=Constant]; +"290 Constant_9217" [id=290, type=Constant]; +"291 Multiply_9660" [id=291, type=Constant]; +"292 Constant_9203" [id=292, type=Constant]; +"293 Multiply_9654" [id=293, type=Constant]; +"294 Transpose_5257" [id=294, type=Constant]; +"295 Transpose_809" [id=295, type=Constant]; +"296 Transpose_5249" [id=296, type=Constant]; +"297 Transpose_800" [id=297, type=Constant]; +"298 Constant_5243" [id=298, type=Constant]; +"299 Constant_9189" [id=299, type=Constant]; +"300 Multiply_9649" [id=300, type=Constant]; +"301 Constant_9175" [id=301, type=Constant]; +"302 Gather_10068" [id=302, type=Constant]; +"303 Unsqueeze_7776" [id=303, type=Constant]; +"304 Unsqueeze_7782" [id=304, type=Constant]; +"305 Constant_7779" [id=305, type=Constant]; +"0 input_1" -> "1 Transpose_7780" [label="[1, 224, 224, 3]", style=solid]; +"1 Transpose_7780" -> "2 Transpose_7774" [label="[1, 3, 224, 224]", style=solid]; +"2 Transpose_7774" -> "3 Transpose_710" [label="[1, 3, 224, 224]", style=solid]; +"3 Transpose_710" -> "4 Multiply_9167" [label="[1, 3, 224, 224]", style=solid]; +"4 Multiply_9167" -> "5 Transpose_5170" [label="[1, 16, 112, 112]", style=solid]; +"5 Transpose_5170" -> "6 Transpose_5188" [label="[1, 16, 112, 112]", style=solid]; +"6 Transpose_5188" -> "7 Multiply_9181" [label="[1, 16, 112, 112]", style=solid]; +"7 Multiply_9181" -> "8 Transpose_5239" [label="[1, 16, 56, 56]", style=solid]; +"8 Transpose_5239" -> "9 Transpose_5241" [label="[1, 16, 56, 56]", style=solid]; +"9 Transpose_5241" -> "10 Transpose_5245" [label="[1, 16, 56, 56]", style=solid]; +"9 Transpose_5241" -> "11 Transpose_5277" [label="[1, 16, 56, 56]", style=solid]; +"10 Transpose_5245" -> "12 Convolution_801" [label="[1, 16, 1, 1]", style=solid]; +"11 Transpose_5277" -> "13 Multiply_9195" [label="[1, 16, 56, 56]", style=solid]; +"12 Convolution_801" -> "14 Transpose_5251" [label="[1, 8, 1, 1]", style=solid]; +"13 Multiply_9195" -> "15 Transpose_5301" [label="[1, 16, 56, 56]", style=solid]; +"14 Transpose_5251" -> "16 Transpose_5253" [label="[1, 8, 1, 1]", style=solid]; +"15 Transpose_5301" -> "17 Multiply_9209" [label="[1, 16, 56, 56]", style=solid]; +"16 Transpose_5253" -> "18 Convolution_810" [label="[1, 8, 1, 1]", style=solid]; +"17 Multiply_9209" -> "19 Transpose_5325" [label="[1, 72, 56, 56]", style=solid]; +"18 Convolution_810" -> "20 Transpose_5259" [label="[1, 16, 1, 1]", style=solid]; +"19 Transpose_5325" -> "21 Transpose_5327" [label="[1, 72, 56, 56]", style=solid]; +"20 Transpose_5259" -> "22 Transpose_5273" [label="[1, 16, 1, 1]", style=solid]; +"21 Transpose_5327" -> "23 Multiply_9223" [label="[1, 72, 56, 56]", style=solid]; +"22 Transpose_5273" -> "11 Transpose_5277" [label="[1, 16, 1, 1]", style=solid]; +"23 Multiply_9223" -> "24 Transpose_5378" [label="[1, 72, 28, 28]", style=solid]; +"24 Transpose_5378" -> "25 Transpose_5380" [label="[1, 72, 28, 28]", style=solid]; +"25 Transpose_5380" -> "26 Multiply_9237" [label="[1, 72, 28, 28]", style=solid]; +"26 Multiply_9237" -> "27 Transpose_5404" [label="[1, 24, 28, 28]", style=solid]; +"27 Transpose_5404" -> "28 Multiply_9251" [label="[1, 24, 28, 28]", style=solid]; +"27 Transpose_5404" -> "29 Transpose_5484" [label="[1, 24, 28, 28]", style=solid]; +"28 Multiply_9251" -> "30 Transpose_5428" [label="[1, 88, 28, 28]", style=solid]; +"29 Transpose_5484" -> "31 Multiply_9293" [label="[1, 24, 28, 28]", style=solid]; +"30 Transpose_5428" -> "32 Transpose_5430" [label="[1, 88, 28, 28]", style=solid]; +"31 Multiply_9293" -> "33 Transpose_5508" [label="[1, 96, 28, 28]", style=solid]; +"32 Transpose_5430" -> "34 Multiply_9265" [label="[1, 88, 28, 28]", style=solid]; +"33 Transpose_5508" -> "35 Transpose_5526" [label="[1, 96, 28, 28]", style=solid]; +"34 Multiply_9265" -> "36 Transpose_5454" [label="[1, 88, 28, 28]", style=solid]; +"35 Transpose_5526" -> "37 Multiply_9307" [label="[1, 96, 28, 28]", style=solid]; +"36 Transpose_5454" -> "38 Transpose_5456" [label="[1, 88, 28, 28]", style=solid]; +"37 Multiply_9307" -> "39 Transpose_5577" [label="[1, 96, 14, 14]", style=solid]; +"38 Transpose_5456" -> "40 Multiply_9279" [label="[1, 88, 28, 28]", style=solid]; +"39 Transpose_5577" -> "41 Transpose_5595" [label="[1, 96, 14, 14]", style=solid]; +"40 Multiply_9279" -> "42 Transpose_5480" [label="[1, 24, 28, 28]", style=solid]; +"41 Transpose_5595" -> "43 Transpose_5599" [label="[1, 96, 14, 14]", style=solid]; +"41 Transpose_5595" -> "44 Transpose_5631" [label="[1, 96, 14, 14]", style=solid]; +"42 Transpose_5480" -> "29 Transpose_5484" [label="[1, 24, 28, 28]", style=solid]; +"43 Transpose_5599" -> "45 Convolution_1132" [label="[1, 96, 1, 1]", style=solid]; +"44 Transpose_5631" -> "46 Multiply_9321" [label="[1, 96, 14, 14]", style=solid]; +"45 Convolution_1132" -> "47 Transpose_5605" [label="[1, 24, 1, 1]", style=solid]; +"46 Multiply_9321" -> "48 Transpose_5655" [label="[1, 40, 14, 14]", style=solid]; +"47 Transpose_5605" -> "49 Transpose_5607" [label="[1, 24, 1, 1]", style=solid]; +"48 Transpose_5655" -> "50 Multiply_9335" [label="[1, 40, 14, 14]", style=solid]; +"48 Transpose_5655" -> "51 Transpose_5803" [label="[1, 40, 14, 14]", style=solid]; +"49 Transpose_5607" -> "52 Convolution_1141" [label="[1, 24, 1, 1]", style=solid]; +"50 Multiply_9335" -> "53 Transpose_5679" [label="[1, 240, 14, 14]", style=solid]; +"51 Transpose_5803" -> "54 Multiply_9377" [label="[1, 40, 14, 14]", style=solid]; +"51 Transpose_5803" -> "55 Transpose_5951" [label="[1, 40, 14, 14]", style=solid]; +"52 Convolution_1141" -> "56 Transpose_5613" [label="[1, 96, 1, 1]", style=solid]; +"53 Transpose_5679" -> "57 Transpose_5697" [label="[1, 240, 14, 14]", style=solid]; +"54 Multiply_9377" -> "58 Transpose_5827" [label="[1, 240, 14, 14]", style=solid]; +"55 Transpose_5951" -> "59 Multiply_9419" [label="[1, 40, 14, 14]", style=solid]; +"56 Transpose_5613" -> "60 Transpose_5627" [label="[1, 96, 1, 1]", style=solid]; +"57 Transpose_5697" -> "61 Multiply_9349" [label="[1, 240, 14, 14]", style=solid]; +"58 Transpose_5827" -> "62 Transpose_5845" [label="[1, 240, 14, 14]", style=solid]; +"59 Multiply_9419" -> "63 Transpose_5975" [label="[1, 120, 14, 14]", style=solid]; +"60 Transpose_5627" -> "44 Transpose_5631" [label="[1, 96, 1, 1]", style=solid]; +"61 Multiply_9349" -> "64 Transpose_5721" [label="[1, 240, 14, 14]", style=solid]; +"62 Transpose_5845" -> "65 Multiply_9391" [label="[1, 240, 14, 14]", style=solid]; +"63 Transpose_5975" -> "66 Transpose_5993" [label="[1, 120, 14, 14]", style=solid]; +"64 Transpose_5721" -> "67 Transpose_5739" [label="[1, 240, 14, 14]", style=solid]; +"65 Multiply_9391" -> "68 Transpose_5869" [label="[1, 240, 14, 14]", style=solid]; +"66 Transpose_5993" -> "69 Multiply_9433" [label="[1, 120, 14, 14]", style=solid]; +"67 Transpose_5739" -> "70 Transpose_5743" [label="[1, 240, 14, 14]", style=solid]; +"67 Transpose_5739" -> "71 Transpose_5775" [label="[1, 240, 14, 14]", style=solid]; +"68 Transpose_5869" -> "72 Transpose_5887" [label="[1, 240, 14, 14]", style=solid]; +"69 Multiply_9433" -> "73 Transpose_6017" [label="[1, 120, 14, 14]", style=solid]; +"70 Transpose_5743" -> "74 Convolution_1242" [label="[1, 240, 1, 1]", style=solid]; +"71 Transpose_5775" -> "75 Multiply_9363" [label="[1, 240, 14, 14]", style=solid]; +"72 Transpose_5887" -> "76 Transpose_5891" [label="[1, 240, 14, 14]", style=solid]; +"72 Transpose_5887" -> "77 Transpose_5923" [label="[1, 240, 14, 14]", style=solid]; +"73 Transpose_6017" -> "78 Transpose_6035" [label="[1, 120, 14, 14]", style=solid]; +"74 Convolution_1242" -> "79 Transpose_5749" [label="[1, 64, 1, 1]", style=solid]; +"75 Multiply_9363" -> "80 Transpose_5799" [label="[1, 40, 14, 14]", style=solid]; +"76 Transpose_5891" -> "81 Convolution_1353" [label="[1, 240, 1, 1]", style=solid]; +"77 Transpose_5923" -> "82 Multiply_9405" [label="[1, 240, 14, 14]", style=solid]; +"78 Transpose_6035" -> "83 Transpose_6039" [label="[1, 120, 14, 14]", style=solid]; +"78 Transpose_6035" -> "84 Transpose_6071" [label="[1, 120, 14, 14]", style=solid]; +"79 Transpose_5749" -> "85 Transpose_5751" [label="[1, 64, 1, 1]", style=solid]; +"80 Transpose_5799" -> "51 Transpose_5803" [label="[1, 40, 14, 14]", style=solid]; +"81 Convolution_1353" -> "86 Transpose_5897" [label="[1, 64, 1, 1]", style=solid]; +"82 Multiply_9405" -> "87 Transpose_5947" [label="[1, 40, 14, 14]", style=solid]; +"83 Transpose_6039" -> "88 Convolution_1464" [label="[1, 120, 1, 1]", style=solid]; +"84 Transpose_6071" -> "89 Multiply_9447" [label="[1, 120, 14, 14]", style=solid]; +"85 Transpose_5751" -> "90 Convolution_1251" [label="[1, 64, 1, 1]", style=solid]; +"86 Transpose_5897" -> "91 Transpose_5899" [label="[1, 64, 1, 1]", style=solid]; +"87 Transpose_5947" -> "55 Transpose_5951" [label="[1, 40, 14, 14]", style=solid]; +"88 Convolution_1464" -> "92 Transpose_6045" [label="[1, 32, 1, 1]", style=solid]; +"89 Multiply_9447" -> "93 Transpose_6095" [label="[1, 48, 14, 14]", style=solid]; +"90 Convolution_1251" -> "94 Transpose_5757" [label="[1, 240, 1, 1]", style=solid]; +"91 Transpose_5899" -> "95 Convolution_1362" [label="[1, 64, 1, 1]", style=solid]; +"92 Transpose_6045" -> "96 Transpose_6047" [label="[1, 32, 1, 1]", style=solid]; +"93 Transpose_6095" -> "97 Multiply_9461" [label="[1, 48, 14, 14]", style=solid]; +"93 Transpose_6095" -> "98 Transpose_6243" [label="[1, 48, 14, 14]", style=solid]; +"94 Transpose_5757" -> "99 Transpose_5771" [label="[1, 240, 1, 1]", style=solid]; +"95 Convolution_1362" -> "100 Transpose_5905" [label="[1, 240, 1, 1]", style=solid]; +"96 Transpose_6047" -> "101 Convolution_1473" [label="[1, 32, 1, 1]", style=solid]; +"97 Multiply_9461" -> "102 Transpose_6119" [label="[1, 144, 14, 14]", style=solid]; +"98 Transpose_6243" -> "103 Multiply_9503" [label="[1, 48, 14, 14]", style=solid]; +"99 Transpose_5771" -> "71 Transpose_5775" [label="[1, 240, 1, 1]", style=solid]; +"100 Transpose_5905" -> "104 Transpose_5919" [label="[1, 240, 1, 1]", style=solid]; +"101 Convolution_1473" -> "105 Transpose_6053" [label="[1, 120, 1, 1]", style=solid]; +"102 Transpose_6119" -> "106 Transpose_6137" [label="[1, 144, 14, 14]", style=solid]; +"103 Multiply_9503" -> "107 Transpose_6267" [label="[1, 288, 14, 14]", style=solid]; +"104 Transpose_5919" -> "77 Transpose_5923" [label="[1, 240, 1, 1]", style=solid]; +"105 Transpose_6053" -> "108 Transpose_6067" [label="[1, 120, 1, 1]", style=solid]; +"106 Transpose_6137" -> "109 Multiply_9475" [label="[1, 144, 14, 14]", style=solid]; +"107 Transpose_6267" -> "110 Transpose_6285" [label="[1, 288, 14, 14]", style=solid]; +"108 Transpose_6067" -> "84 Transpose_6071" [label="[1, 120, 1, 1]", style=solid]; +"109 Multiply_9475" -> "111 Transpose_6161" [label="[1, 144, 14, 14]", style=solid]; +"110 Transpose_6285" -> "112 Multiply_9517" [label="[1, 288, 14, 14]", style=solid]; +"111 Transpose_6161" -> "113 Transpose_6179" [label="[1, 144, 14, 14]", style=solid]; +"112 Multiply_9517" -> "114 Transpose_6336" [label="[1, 288, 7, 7]", style=solid]; +"113 Transpose_6179" -> "115 Transpose_6183" [label="[1, 144, 14, 14]", style=solid]; +"113 Transpose_6179" -> "116 Transpose_6215" [label="[1, 144, 14, 14]", style=solid]; +"114 Transpose_6336" -> "117 Transpose_6354" [label="[1, 288, 7, 7]", style=solid]; +"115 Transpose_6183" -> "118 Convolution_1574" [label="[1, 144, 1, 1]", style=solid]; +"116 Transpose_6215" -> "119 Multiply_9489" [label="[1, 144, 14, 14]", style=solid]; +"117 Transpose_6354" -> "120 Transpose_6358" [label="[1, 288, 7, 7]", style=solid]; +"117 Transpose_6354" -> "121 Transpose_6390" [label="[1, 288, 7, 7]", style=solid]; +"118 Convolution_1574" -> "122 Transpose_6189" [label="[1, 40, 1, 1]", style=solid]; +"119 Multiply_9489" -> "123 Transpose_6239" [label="[1, 48, 14, 14]", style=solid]; +"120 Transpose_6358" -> "124 Convolution_1713" [label="[1, 288, 1, 1]", style=solid]; +"121 Transpose_6390" -> "125 Multiply_9531" [label="[1, 288, 7, 7]", style=solid]; +"122 Transpose_6189" -> "126 Transpose_6191" [label="[1, 40, 1, 1]", style=solid]; +"123 Transpose_6239" -> "98 Transpose_6243" [label="[1, 48, 14, 14]", style=solid]; +"124 Convolution_1713" -> "127 Transpose_6364" [label="[1, 72, 1, 1]", style=solid]; +"125 Multiply_9531" -> "128 Transpose_6414" [label="[1, 96, 7, 7]", style=solid]; +"126 Transpose_6191" -> "129 Convolution_1583" [label="[1, 40, 1, 1]", style=solid]; +"127 Transpose_6364" -> "130 Transpose_6366" [label="[1, 72, 1, 1]", style=solid]; +"128 Transpose_6414" -> "131 Multiply_9545" [label="[1, 96, 7, 7]", style=solid]; +"128 Transpose_6414" -> "132 Transpose_6562" [label="[1, 96, 7, 7]", style=solid]; +"129 Convolution_1583" -> "133 Transpose_6197" [label="[1, 144, 1, 1]", style=solid]; +"130 Transpose_6366" -> "134 Convolution_1722" [label="[1, 72, 1, 1]", style=solid]; +"131 Multiply_9545" -> "135 Transpose_6438" [label="[1, 576, 7, 7]", style=solid]; +"132 Transpose_6562" -> "136 Multiply_9587" [label="[1, 96, 7, 7]", style=solid]; +"132 Transpose_6562" -> "137 Transpose_6710" [label="[1, 96, 7, 7]", style=solid]; +"133 Transpose_6197" -> "138 Transpose_6211" [label="[1, 144, 1, 1]", style=solid]; +"134 Convolution_1722" -> "139 Transpose_6372" [label="[1, 288, 1, 1]", style=solid]; +"135 Transpose_6438" -> "140 Transpose_6456" [label="[1, 576, 7, 7]", style=solid]; +"136 Multiply_9587" -> "141 Transpose_6586" [label="[1, 576, 7, 7]", style=solid]; +"137 Transpose_6710" -> "142 Multiply_9629" [label="[1, 96, 7, 7]", style=solid]; +"138 Transpose_6211" -> "116 Transpose_6215" [label="[1, 144, 1, 1]", style=solid]; +"139 Transpose_6372" -> "143 Transpose_6386" [label="[1, 288, 1, 1]", style=solid]; +"140 Transpose_6456" -> "144 Multiply_9559" [label="[1, 576, 7, 7]", style=solid]; +"141 Transpose_6586" -> "145 Transpose_6604" [label="[1, 576, 7, 7]", style=solid]; +"142 Multiply_9629" -> "146 Transpose_6734" [label="[1, 576, 7, 7]", style=solid]; +"143 Transpose_6386" -> "121 Transpose_6390" [label="[1, 288, 1, 1]", style=solid]; +"144 Multiply_9559" -> "147 Transpose_6480" [label="[1, 576, 7, 7]", style=solid]; +"145 Transpose_6604" -> "148 Multiply_9601" [label="[1, 576, 7, 7]", style=solid]; +"146 Transpose_6734" -> "149 Transpose_6752" [label="[1, 576, 7, 7]", style=solid]; +"147 Transpose_6480" -> "150 Transpose_6498" [label="[1, 576, 7, 7]", style=solid]; +"148 Multiply_9601" -> "151 Transpose_6628" [label="[1, 576, 7, 7]", style=solid]; +"149 Transpose_6752" -> "152 Transpose_6756" [label="[1, 576, 7, 7]", style=solid]; +"150 Transpose_6498" -> "153 Transpose_6502" [label="[1, 576, 7, 7]", style=solid]; +"150 Transpose_6498" -> "154 Transpose_6534" [label="[1, 576, 7, 7]", style=solid]; +"151 Transpose_6628" -> "155 Transpose_6646" [label="[1, 576, 7, 7]", style=solid]; +"152 Transpose_6756" -> "156 Convolution_2013" [label="[1, 576, 1, 1]", style=solid]; +"153 Transpose_6502" -> "157 Convolution_1823" [label="[1, 576, 1, 1]", style=solid]; +"154 Transpose_6534" -> "158 Multiply_9573" [label="[1, 576, 7, 7]", style=solid]; +"155 Transpose_6646" -> "159 Transpose_6650" [label="[1, 576, 7, 7]", style=solid]; +"155 Transpose_6646" -> "160 Transpose_6682" [label="[1, 576, 7, 7]", style=solid]; +"156 Convolution_2013" -> "161 Transpose_6762" [label="[1, 1024, 1, 1]", style=solid]; +"157 Convolution_1823" -> "162 Transpose_6508" [label="[1, 144, 1, 1]", style=solid]; +"158 Multiply_9573" -> "163 Transpose_6558" [label="[1, 96, 7, 7]", style=solid]; +"159 Transpose_6650" -> "164 Convolution_1934" [label="[1, 576, 1, 1]", style=solid]; +"160 Transpose_6682" -> "165 Multiply_9615" [label="[1, 576, 7, 7]", style=solid]; +"161 Transpose_6762" -> "166 Transpose_6780" [label="[1, 1024, 1, 1]", style=solid]; +"162 Transpose_6508" -> "167 Transpose_6510" [label="[1, 144, 1, 1]", style=solid]; +"163 Transpose_6558" -> "132 Transpose_6562" [label="[1, 96, 7, 7]", style=solid]; +"164 Convolution_1934" -> "168 Transpose_6656" [label="[1, 144, 1, 1]", style=solid]; +"165 Multiply_9615" -> "169 Transpose_6706" [label="[1, 96, 7, 7]", style=solid]; +"166 Transpose_6780" -> "170 Convolution_2025" [label="[1, 1024, 1, 1]", style=solid]; +"167 Transpose_6510" -> "171 Convolution_1832" [label="[1, 144, 1, 1]", style=solid]; +"168 Transpose_6656" -> "172 Transpose_6658" [label="[1, 144, 1, 1]", style=solid]; +"169 Transpose_6706" -> "137 Transpose_6710" [label="[1, 96, 7, 7]", style=solid]; +"170 Convolution_2025" -> "173 Transpose_6786" [label="[1, 1000, 1, 1]", style=solid]; +"171 Convolution_1832" -> "174 Transpose_6516" [label="[1, 576, 1, 1]", style=solid]; +"172 Transpose_6658" -> "175 Convolution_1943" [label="[1, 144, 1, 1]", style=solid]; +"173 Transpose_6786" -> "176 MobilenetV3small/Logits/BiasAdd" [label="[1, 1000, 1, 1]", style=solid]; +"174 Transpose_6516" -> "177 Transpose_6530" [label="[1, 576, 1, 1]", style=solid]; +"175 Convolution_1943" -> "178 Transpose_6664" [label="[1, 576, 1, 1]", style=solid]; "176 MobilenetV3small/Logits/BiasAdd" -> "179 MobilenetV3small/flatten/Reshape" [label="[1, 1, 1, 1000]", style=solid]; -"177 Transpose_8306" -> "154 Transpose_8308" [label="[1, 576, 1, 1]", style=solid]; -"178 Transpose_10295" -> "180 Transpose_8454" [label="[1, 576, 1, 1]", style=solid]; +"177 Transpose_6530" -> "154 Transpose_6534" [label="[1, 576, 1, 1]", style=solid]; +"178 Transpose_6664" -> "180 Transpose_6678" [label="[1, 576, 1, 1]", style=solid]; "179 MobilenetV3small/flatten/Reshape" -> "181 MobilenetV3small/Predictions/Softmax" [label="[1, 1000]", style=solid]; -"180 Transpose_8454" -> "160 Transpose_8456" [label="[1, 576, 1, 1]", style=solid]; +"180 Transpose_6678" -> "160 Transpose_6682" [label="[1, 576, 1, 1]", style=solid]; "181 MobilenetV3small/Predictions/Softmax" -> "182 Predictions" [label="[1, 1000]", style=solid]; "183 MobilenetV3small/flatten/Const" -> "179 MobilenetV3small/flatten/Reshape" [label="[2]", style=dashed]; -"184 Transpose_10377" -> "176 MobilenetV3small/Logits/BiasAdd" [label="[1, 1, 1, 1000]", style=solid]; -"185 Constant_11480" -> "173 Transpose_10375" [label="[4]", style=dashed]; -"186 Transpose_3648" -> "170 Convolution_3649" [label="[1000, 1024, 1, 1]", style=solid]; -"187 Transpose_8534" -> "161 Transpose_8536" [label="[1, 1024, 1, 1]", style=solid]; -"188 Transpose_3636" -> "156 Convolution_3637" [label="[1024, 576, 1, 1]", style=solid]; -"189 Constant_8528" -> "152 Transpose_8530" [label="[2]", style=dashed]; -"190 Constant_12230" -> "146 Transpose_8508" [label="[1, 576, 1, 1]", style=solid]; -"191 Multiply_12423" -> "142 Multiply_12222" [label="[576, 96, 1, 1]", style=solid]; -"192 Constant_12216" -> "169 Transpose_8482" [label="[1, 96, 1, 1]", style=solid]; -"193 Multiply_12417" -> "165 Multiply_12208" [label="[96, 576, 1, 1]", style=solid]; -"194 Transpose_8436" -> "178 Transpose_10295" [label="[1, 576, 1, 1]", style=solid]; -"195 Transpose_3566" -> "175 Convolution_3567" [label="[576, 144, 1, 1]", style=solid]; -"196 Transpose_8428" -> "168 Transpose_8430" [label="[1, 144, 1, 1]", style=solid]; -"197 Transpose_3557" -> "164 Convolution_3558" [label="[144, 576, 1, 1]", style=solid]; -"198 Constant_8422" -> "159 Transpose_8424" [label="[2]", style=dashed]; -"199 Constant_12202" -> "151 Transpose_8402" [label="[1, 576, 1, 1]", style=solid]; -"200 Multiply_12412" -> "148 Multiply_12194" [label="[576, 1, 1, 5, 5]", style=solid]; -"201 Constant_12188" -> "141 Transpose_8360" [label="[1, 576, 1, 1]", style=solid]; -"202 Multiply_12406" -> "136 Multiply_12180" [label="[576, 96, 1, 1]", style=solid]; -"203 Constant_12174" -> "163 Transpose_8334" [label="[1, 96, 1, 1]", style=solid]; -"204 Multiply_12400" -> "158 Multiply_12166" [label="[96, 576, 1, 1]", style=solid]; -"205 Transpose_8288" -> "174 Transpose_10195" [label="[1, 576, 1, 1]", style=solid]; -"206 Transpose_3455" -> "171 Convolution_3456" [label="[576, 144, 1, 1]", style=solid]; -"207 Transpose_8280" -> "162 Transpose_8282" [label="[1, 144, 1, 1]", style=solid]; -"208 Transpose_3446" -> "157 Convolution_3447" [label="[144, 576, 1, 1]", style=solid]; -"209 Constant_8274" -> "153 Transpose_8276" [label="[2]", style=dashed]; -"210 Constant_12160" -> "147 Transpose_8254" [label="[1, 576, 1, 1]", style=solid]; -"211 Multiply_12395" -> "144 Multiply_12152" [label="[576, 1, 1, 5, 5]", style=solid]; -"212 Constant_12146" -> "135 Transpose_8212" [label="[1, 576, 1, 1]", style=solid]; -"213 Multiply_12389" -> "131 Multiply_12138" [label="[576, 96, 1, 1]", style=solid]; -"214 Constant_12132" -> "128 Transpose_8188" [label="[1, 96, 1, 1]", style=solid]; -"215 Multiply_12383" -> "125 Multiply_12124" [label="[96, 288, 1, 1]", style=solid]; -"216 Transpose_8144" -> "139 Transpose_10127" [label="[1, 288, 1, 1]", style=solid]; -"217 Transpose_3345" -> "134 Convolution_3346" [label="[288, 72, 1, 1]", style=solid]; -"218 Transpose_8136" -> "127 Transpose_8138" [label="[1, 72, 1, 1]", style=solid]; -"219 Transpose_3336" -> "124 Convolution_3337" [label="[72, 288, 1, 1]", style=solid]; -"220 Constant_8130" -> "120 Transpose_8132" [label="[2]", style=dashed]; -"221 Constant_12118" -> "114 Transpose_8110" [label="[1, 288, 1, 1]", style=solid]; -"222 Multiply_12378" -> "112 Multiply_12110" [label="[288, 1, 1, 5, 5]", style=solid]; -"223 Constant_12104" -> "107 Transpose_8043" [label="[1, 288, 1, 1]", style=solid]; -"224 Multiply_12372" -> "103 Multiply_12096" [label="[288, 48, 1, 1]", style=solid]; -"225 Constant_12090" -> "123 Transpose_8017" [label="[1, 48, 1, 1]", style=solid]; -"226 Multiply_12366" -> "119 Multiply_12082" [label="[48, 144, 1, 1]", style=solid]; -"227 Transpose_7971" -> "133 Transpose_10027" [label="[1, 144, 1, 1]", style=solid]; -"228 Transpose_3208" -> "129 Convolution_3209" [label="[144, 40, 1, 1]", style=solid]; -"229 Transpose_7963" -> "122 Transpose_7965" [label="[1, 40, 1, 1]", style=solid]; -"230 Transpose_3199" -> "118 Convolution_3200" [label="[40, 144, 1, 1]", style=solid]; -"231 Constant_7957" -> "115 Transpose_7959" [label="[2]", style=dashed]; -"232 Constant_12076" -> "111 Transpose_7937" [label="[1, 144, 1, 1]", style=solid]; -"233 Multiply_12361" -> "109 Multiply_12068" [label="[144, 1, 1, 5, 5]", style=solid]; -"234 Constant_12062" -> "102 Transpose_7895" [label="[1, 144, 1, 1]", style=solid]; -"235 Multiply_12355" -> "97 Multiply_12054" [label="[144, 48, 1, 1]", style=solid]; -"236 Constant_12048" -> "93 Transpose_7871" [label="[1, 48, 1, 1]", style=solid]; -"237 Multiply_12349" -> "89 Multiply_12040" [label="[48, 120, 1, 1]", style=solid]; -"238 Transpose_7827" -> "105 Transpose_9959" [label="[1, 120, 1, 1]", style=solid]; -"239 Transpose_3098" -> "101 Convolution_3099" [label="[120, 32, 1, 1]", style=solid]; -"240 Transpose_7819" -> "92 Transpose_7821" [label="[1, 32, 1, 1]", style=solid]; -"241 Transpose_3089" -> "88 Convolution_3090" [label="[32, 120, 1, 1]", style=solid]; -"242 Constant_7813" -> "83 Transpose_7815" [label="[2]", style=dashed]; -"243 Constant_12034" -> "73 Transpose_7793" [label="[1, 120, 1, 1]", style=solid]; -"244 Multiply_12344" -> "69 Multiply_12026" [label="[120, 1, 1, 5, 5]", style=solid]; -"245 Constant_12020" -> "63 Transpose_7751" [label="[1, 120, 1, 1]", style=solid]; -"246 Multiply_12338" -> "59 Multiply_12012" [label="[120, 40, 1, 1]", style=solid]; -"247 Constant_12006" -> "87 Transpose_7725" [label="[1, 40, 1, 1]", style=solid]; -"248 Multiply_12332" -> "82 Multiply_11998" [label="[40, 240, 1, 1]", style=solid]; -"249 Transpose_7679" -> "100 Transpose_9859" [label="[1, 240, 1, 1]", style=solid]; -"250 Transpose_2987" -> "95 Convolution_2988" [label="[240, 64, 1, 1]", style=solid]; -"251 Transpose_7671" -> "86 Transpose_7673" [label="[1, 64, 1, 1]", style=solid]; -"252 Transpose_2978" -> "81 Convolution_2979" [label="[64, 240, 1, 1]", style=solid]; -"253 Constant_7665" -> "76 Transpose_7667" [label="[2]", style=dashed]; -"254 Constant_11992" -> "68 Transpose_7645" [label="[1, 240, 1, 1]", style=solid]; -"255 Multiply_12327" -> "65 Multiply_11984" [label="[240, 1, 1, 5, 5]", style=solid]; -"256 Constant_11978" -> "58 Transpose_7603" [label="[1, 240, 1, 1]", style=solid]; -"257 Multiply_12321" -> "54 Multiply_11970" [label="[240, 40, 1, 1]", style=solid]; -"258 Constant_11964" -> "80 Transpose_7577" [label="[1, 40, 1, 1]", style=solid]; -"259 Multiply_12315" -> "75 Multiply_11956" [label="[40, 240, 1, 1]", style=solid]; -"260 Transpose_7531" -> "94 Transpose_9759" [label="[1, 240, 1, 1]", style=solid]; -"261 Transpose_2876" -> "90 Convolution_2877" [label="[240, 64, 1, 1]", style=solid]; -"262 Transpose_7523" -> "79 Transpose_7525" [label="[1, 64, 1, 1]", style=solid]; -"263 Transpose_2867" -> "74 Convolution_2868" [label="[64, 240, 1, 1]", style=solid]; -"264 Constant_7517" -> "70 Transpose_7519" [label="[2]", style=dashed]; -"265 Constant_11950" -> "64 Transpose_7497" [label="[1, 240, 1, 1]", style=solid]; -"266 Multiply_12310" -> "61 Multiply_11942" [label="[240, 1, 1, 5, 5]", style=solid]; -"267 Constant_11936" -> "53 Transpose_7455" [label="[1, 240, 1, 1]", style=solid]; -"268 Multiply_12304" -> "50 Multiply_11928" [label="[240, 40, 1, 1]", style=solid]; -"269 Constant_11922" -> "48 Transpose_7431" [label="[1, 40, 1, 1]", style=solid]; -"270 Multiply_12298" -> "46 Multiply_11914" [label="[40, 96, 1, 1]", style=solid]; -"271 Transpose_7387" -> "56 Transpose_9691" [label="[1, 96, 1, 1]", style=solid]; -"272 Transpose_2766" -> "52 Convolution_2767" [label="[96, 24, 1, 1]", style=solid]; -"273 Transpose_7379" -> "47 Transpose_7381" [label="[1, 24, 1, 1]", style=solid]; -"274 Transpose_2757" -> "45 Convolution_2758" [label="[24, 96, 1, 1]", style=solid]; -"275 Constant_7373" -> "43 Transpose_7375" [label="[2]", style=dashed]; -"276 Constant_11908" -> "39 Transpose_7353" [label="[1, 96, 1, 1]", style=solid]; -"277 Multiply_12293" -> "37 Multiply_11900" [label="[96, 1, 1, 5, 5]", style=solid]; -"278 Constant_11894" -> "33 Transpose_7286" [label="[1, 96, 1, 1]", style=solid]; -"279 Multiply_12287" -> "31 Multiply_11886" [label="[96, 24, 1, 1]", style=solid]; -"280 Constant_11880" -> "42 Transpose_7260" [label="[1, 24, 1, 1]", style=solid]; -"281 Multiply_12281" -> "40 Multiply_11872" [label="[24, 88, 1, 1]", style=solid]; -"282 Constant_11866" -> "36 Transpose_7232" [label="[1, 88, 1, 1]", style=solid]; -"283 Multiply_12276" -> "34 Multiply_11858" [label="[88, 1, 1, 3, 3]", style=solid]; -"284 Constant_11852" -> "30 Transpose_7206" [label="[1, 88, 1, 1]", style=solid]; -"285 Multiply_12270" -> "28 Multiply_11844" [label="[88, 24, 1, 1]", style=solid]; -"286 Constant_11838" -> "27 Transpose_7182" [label="[1, 24, 1, 1]", style=solid]; -"287 Multiply_12264" -> "26 Multiply_11830" [label="[24, 72, 1, 1]", style=solid]; -"288 Constant_11824" -> "24 Transpose_7156" [label="[1, 72, 1, 1]", style=solid]; -"289 Multiply_12259" -> "23 Multiply_11816" [label="[72, 1, 1, 3, 3]", style=solid]; -"290 Constant_11810" -> "19 Transpose_7105" [label="[1, 72, 1, 1]", style=solid]; -"291 Multiply_12253" -> "17 Multiply_11802" [label="[72, 16, 1, 1]", style=solid]; -"292 Constant_11796" -> "15 Transpose_7081" [label="[1, 16, 1, 1]", style=solid]; -"293 Multiply_12247" -> "13 Multiply_11788" [label="[16, 16, 1, 1]", style=solid]; -"294 Transpose_7037" -> "20 Transpose_9591" [label="[1, 16, 1, 1]", style=solid]; -"295 Transpose_2439" -> "18 Convolution_2440" [label="[16, 8, 1, 1]", style=solid]; -"296 Transpose_7029" -> "14 Transpose_7031" [label="[1, 8, 1, 1]", style=solid]; -"297 Transpose_2430" -> "12 Convolution_2431" [label="[8, 16, 1, 1]", style=solid]; -"298 Constant_7023" -> "10 Transpose_7025" [label="[2]", style=dashed]; -"299 Constant_11782" -> "8 Transpose_7019" [label="[1, 16, 1, 1]", style=solid]; -"300 Multiply_12242" -> "7 Multiply_11774" [label="[16, 1, 1, 3, 3]", style=solid]; -"301 Constant_11768" -> "5 Transpose_6952" [label="[1, 16, 1, 1]", style=solid]; -"302 Gather_12661" -> "4 Multiply_11760" [label="[16, 3, 3, 3]", style=solid]; -"303 Unsqueeze_9541" -> "3 Transpose_2342" [label="[1, 1, 1, 1]", style=solid]; -"304 Unsqueeze_9547" -> "2 Transpose_9539" [label="[1, 1, 1, 1]", style=solid]; -"305 Constant_9544" -> "1 Transpose_9545" [label="[4]", style=dashed]; +"184 Constant_8887" -> "176 MobilenetV3small/Logits/BiasAdd" [label="[4]", style=dashed]; +"185 Transpose_6784" -> "173 Transpose_6786" [label="[1, 1000, 1, 1]", style=solid]; +"186 Transpose_2024" -> "170 Convolution_2025" [label="[1000, 1024, 1, 1]", style=solid]; +"187 Transpose_6760" -> "161 Transpose_6762" [label="[1, 1024, 1, 1]", style=solid]; +"188 Transpose_2012" -> "156 Convolution_2013" [label="[1024, 576, 1, 1]", style=solid]; +"189 Constant_6754" -> "152 Transpose_6756" [label="[2]", style=dashed]; +"190 Constant_9637" -> "146 Transpose_6734" [label="[1, 576, 1, 1]", style=solid]; +"191 Multiply_9830" -> "142 Multiply_9629" [label="[576, 96, 1, 1]", style=solid]; +"192 Constant_9623" -> "169 Transpose_6706" [label="[1, 96, 1, 1]", style=solid]; +"193 Multiply_9824" -> "165 Multiply_9615" [label="[96, 576, 1, 1]", style=solid]; +"194 Transpose_6662" -> "178 Transpose_6664" [label="[1, 576, 1, 1]", style=solid]; +"195 Transpose_1942" -> "175 Convolution_1943" [label="[576, 144, 1, 1]", style=solid]; +"196 Transpose_6654" -> "168 Transpose_6656" [label="[1, 144, 1, 1]", style=solid]; +"197 Transpose_1933" -> "164 Convolution_1934" [label="[144, 576, 1, 1]", style=solid]; +"198 Constant_6648" -> "159 Transpose_6650" [label="[2]", style=dashed]; +"199 Constant_9609" -> "151 Transpose_6628" [label="[1, 576, 1, 1]", style=solid]; +"200 Multiply_9819" -> "148 Multiply_9601" [label="[576, 1, 1, 5, 5]", style=solid]; +"201 Constant_9595" -> "141 Transpose_6586" [label="[1, 576, 1, 1]", style=solid]; +"202 Multiply_9813" -> "136 Multiply_9587" [label="[576, 96, 1, 1]", style=solid]; +"203 Constant_9581" -> "163 Transpose_6558" [label="[1, 96, 1, 1]", style=solid]; +"204 Multiply_9807" -> "158 Multiply_9573" [label="[96, 576, 1, 1]", style=solid]; +"205 Transpose_6514" -> "174 Transpose_6516" [label="[1, 576, 1, 1]", style=solid]; +"206 Transpose_1831" -> "171 Convolution_1832" [label="[576, 144, 1, 1]", style=solid]; +"207 Transpose_6506" -> "162 Transpose_6508" [label="[1, 144, 1, 1]", style=solid]; +"208 Transpose_1822" -> "157 Convolution_1823" [label="[144, 576, 1, 1]", style=solid]; +"209 Constant_6500" -> "153 Transpose_6502" [label="[2]", style=dashed]; +"210 Constant_9567" -> "147 Transpose_6480" [label="[1, 576, 1, 1]", style=solid]; +"211 Multiply_9802" -> "144 Multiply_9559" [label="[576, 1, 1, 5, 5]", style=solid]; +"212 Constant_9553" -> "135 Transpose_6438" [label="[1, 576, 1, 1]", style=solid]; +"213 Multiply_9796" -> "131 Multiply_9545" [label="[576, 96, 1, 1]", style=solid]; +"214 Constant_9539" -> "128 Transpose_6414" [label="[1, 96, 1, 1]", style=solid]; +"215 Multiply_9790" -> "125 Multiply_9531" [label="[96, 288, 1, 1]", style=solid]; +"216 Transpose_6370" -> "139 Transpose_6372" [label="[1, 288, 1, 1]", style=solid]; +"217 Transpose_1721" -> "134 Convolution_1722" [label="[288, 72, 1, 1]", style=solid]; +"218 Transpose_6362" -> "127 Transpose_6364" [label="[1, 72, 1, 1]", style=solid]; +"219 Transpose_1712" -> "124 Convolution_1713" [label="[72, 288, 1, 1]", style=solid]; +"220 Constant_6356" -> "120 Transpose_6358" [label="[2]", style=dashed]; +"221 Constant_9525" -> "114 Transpose_6336" [label="[1, 288, 1, 1]", style=solid]; +"222 Multiply_9785" -> "112 Multiply_9517" [label="[288, 1, 1, 5, 5]", style=solid]; +"223 Constant_9511" -> "107 Transpose_6267" [label="[1, 288, 1, 1]", style=solid]; +"224 Multiply_9779" -> "103 Multiply_9503" [label="[288, 48, 1, 1]", style=solid]; +"225 Constant_9497" -> "123 Transpose_6239" [label="[1, 48, 1, 1]", style=solid]; +"226 Multiply_9773" -> "119 Multiply_9489" [label="[48, 144, 1, 1]", style=solid]; +"227 Transpose_6195" -> "133 Transpose_6197" [label="[1, 144, 1, 1]", style=solid]; +"228 Transpose_1582" -> "129 Convolution_1583" [label="[144, 40, 1, 1]", style=solid]; +"229 Transpose_6187" -> "122 Transpose_6189" [label="[1, 40, 1, 1]", style=solid]; +"230 Transpose_1573" -> "118 Convolution_1574" [label="[40, 144, 1, 1]", style=solid]; +"231 Constant_6181" -> "115 Transpose_6183" [label="[2]", style=dashed]; +"232 Constant_9483" -> "111 Transpose_6161" [label="[1, 144, 1, 1]", style=solid]; +"233 Multiply_9768" -> "109 Multiply_9475" [label="[144, 1, 1, 5, 5]", style=solid]; +"234 Constant_9469" -> "102 Transpose_6119" [label="[1, 144, 1, 1]", style=solid]; +"235 Multiply_9762" -> "97 Multiply_9461" [label="[144, 48, 1, 1]", style=solid]; +"236 Constant_9455" -> "93 Transpose_6095" [label="[1, 48, 1, 1]", style=solid]; +"237 Multiply_9756" -> "89 Multiply_9447" [label="[48, 120, 1, 1]", style=solid]; +"238 Transpose_6051" -> "105 Transpose_6053" [label="[1, 120, 1, 1]", style=solid]; +"239 Transpose_1472" -> "101 Convolution_1473" [label="[120, 32, 1, 1]", style=solid]; +"240 Transpose_6043" -> "92 Transpose_6045" [label="[1, 32, 1, 1]", style=solid]; +"241 Transpose_1463" -> "88 Convolution_1464" [label="[32, 120, 1, 1]", style=solid]; +"242 Constant_6037" -> "83 Transpose_6039" [label="[2]", style=dashed]; +"243 Constant_9441" -> "73 Transpose_6017" [label="[1, 120, 1, 1]", style=solid]; +"244 Multiply_9751" -> "69 Multiply_9433" [label="[120, 1, 1, 5, 5]", style=solid]; +"245 Constant_9427" -> "63 Transpose_5975" [label="[1, 120, 1, 1]", style=solid]; +"246 Multiply_9745" -> "59 Multiply_9419" [label="[120, 40, 1, 1]", style=solid]; +"247 Constant_9413" -> "87 Transpose_5947" [label="[1, 40, 1, 1]", style=solid]; +"248 Multiply_9739" -> "82 Multiply_9405" [label="[40, 240, 1, 1]", style=solid]; +"249 Transpose_5903" -> "100 Transpose_5905" [label="[1, 240, 1, 1]", style=solid]; +"250 Transpose_1361" -> "95 Convolution_1362" [label="[240, 64, 1, 1]", style=solid]; +"251 Transpose_5895" -> "86 Transpose_5897" [label="[1, 64, 1, 1]", style=solid]; +"252 Transpose_1352" -> "81 Convolution_1353" [label="[64, 240, 1, 1]", style=solid]; +"253 Constant_5889" -> "76 Transpose_5891" [label="[2]", style=dashed]; +"254 Constant_9399" -> "68 Transpose_5869" [label="[1, 240, 1, 1]", style=solid]; +"255 Multiply_9734" -> "65 Multiply_9391" [label="[240, 1, 1, 5, 5]", style=solid]; +"256 Constant_9385" -> "58 Transpose_5827" [label="[1, 240, 1, 1]", style=solid]; +"257 Multiply_9728" -> "54 Multiply_9377" [label="[240, 40, 1, 1]", style=solid]; +"258 Constant_9371" -> "80 Transpose_5799" [label="[1, 40, 1, 1]", style=solid]; +"259 Multiply_9722" -> "75 Multiply_9363" [label="[40, 240, 1, 1]", style=solid]; +"260 Transpose_5755" -> "94 Transpose_5757" [label="[1, 240, 1, 1]", style=solid]; +"261 Transpose_1250" -> "90 Convolution_1251" [label="[240, 64, 1, 1]", style=solid]; +"262 Transpose_5747" -> "79 Transpose_5749" [label="[1, 64, 1, 1]", style=solid]; +"263 Transpose_1241" -> "74 Convolution_1242" [label="[64, 240, 1, 1]", style=solid]; +"264 Constant_5741" -> "70 Transpose_5743" [label="[2]", style=dashed]; +"265 Constant_9357" -> "64 Transpose_5721" [label="[1, 240, 1, 1]", style=solid]; +"266 Multiply_9717" -> "61 Multiply_9349" [label="[240, 1, 1, 5, 5]", style=solid]; +"267 Constant_9343" -> "53 Transpose_5679" [label="[1, 240, 1, 1]", style=solid]; +"268 Multiply_9711" -> "50 Multiply_9335" [label="[240, 40, 1, 1]", style=solid]; +"269 Constant_9329" -> "48 Transpose_5655" [label="[1, 40, 1, 1]", style=solid]; +"270 Multiply_9705" -> "46 Multiply_9321" [label="[40, 96, 1, 1]", style=solid]; +"271 Transpose_5611" -> "56 Transpose_5613" [label="[1, 96, 1, 1]", style=solid]; +"272 Transpose_1140" -> "52 Convolution_1141" [label="[96, 24, 1, 1]", style=solid]; +"273 Transpose_5603" -> "47 Transpose_5605" [label="[1, 24, 1, 1]", style=solid]; +"274 Transpose_1131" -> "45 Convolution_1132" [label="[24, 96, 1, 1]", style=solid]; +"275 Constant_5597" -> "43 Transpose_5599" [label="[2]", style=dashed]; +"276 Constant_9315" -> "39 Transpose_5577" [label="[1, 96, 1, 1]", style=solid]; +"277 Multiply_9700" -> "37 Multiply_9307" [label="[96, 1, 1, 5, 5]", style=solid]; +"278 Constant_9301" -> "33 Transpose_5508" [label="[1, 96, 1, 1]", style=solid]; +"279 Multiply_9694" -> "31 Multiply_9293" [label="[96, 24, 1, 1]", style=solid]; +"280 Constant_9287" -> "42 Transpose_5480" [label="[1, 24, 1, 1]", style=solid]; +"281 Multiply_9688" -> "40 Multiply_9279" [label="[24, 88, 1, 1]", style=solid]; +"282 Constant_9273" -> "36 Transpose_5454" [label="[1, 88, 1, 1]", style=solid]; +"283 Multiply_9683" -> "34 Multiply_9265" [label="[88, 1, 1, 3, 3]", style=solid]; +"284 Constant_9259" -> "30 Transpose_5428" [label="[1, 88, 1, 1]", style=solid]; +"285 Multiply_9677" -> "28 Multiply_9251" [label="[88, 24, 1, 1]", style=solid]; +"286 Constant_9245" -> "27 Transpose_5404" [label="[1, 24, 1, 1]", style=solid]; +"287 Multiply_9671" -> "26 Multiply_9237" [label="[24, 72, 1, 1]", style=solid]; +"288 Constant_9231" -> "24 Transpose_5378" [label="[1, 72, 1, 1]", style=solid]; +"289 Multiply_9666" -> "23 Multiply_9223" [label="[72, 1, 1, 3, 3]", style=solid]; +"290 Constant_9217" -> "19 Transpose_5325" [label="[1, 72, 1, 1]", style=solid]; +"291 Multiply_9660" -> "17 Multiply_9209" [label="[72, 16, 1, 1]", style=solid]; +"292 Constant_9203" -> "15 Transpose_5301" [label="[1, 16, 1, 1]", style=solid]; +"293 Multiply_9654" -> "13 Multiply_9195" [label="[16, 16, 1, 1]", style=solid]; +"294 Transpose_5257" -> "20 Transpose_5259" [label="[1, 16, 1, 1]", style=solid]; +"295 Transpose_809" -> "18 Convolution_810" [label="[16, 8, 1, 1]", style=solid]; +"296 Transpose_5249" -> "14 Transpose_5251" [label="[1, 8, 1, 1]", style=solid]; +"297 Transpose_800" -> "12 Convolution_801" [label="[8, 16, 1, 1]", style=solid]; +"298 Constant_5243" -> "10 Transpose_5245" [label="[2]", style=dashed]; +"299 Constant_9189" -> "8 Transpose_5239" [label="[1, 16, 1, 1]", style=solid]; +"300 Multiply_9649" -> "7 Multiply_9181" [label="[16, 1, 1, 3, 3]", style=solid]; +"301 Constant_9175" -> "5 Transpose_5170" [label="[1, 16, 1, 1]", style=solid]; +"302 Gather_10068" -> "4 Multiply_9167" [label="[16, 3, 3, 3]", style=solid]; +"303 Unsqueeze_7776" -> "3 Transpose_710" [label="[1, 1, 1, 1]", style=solid]; +"304 Unsqueeze_7782" -> "2 Transpose_7774" [label="[1, 1, 1, 1]", style=solid]; +"305 Constant_7779" -> "1 Transpose_7780" [label="[4]", style=dashed]; } diff --git a/tests/openvino/native/data/reference_graphs/original_nncf_graph/yolo-v4-tiny-tf.dot b/tests/openvino/native/data/reference_graphs/original_nncf_graph/yolo-v4-tiny-tf.dot index 8a5baba5923..26c6d038178 100644 --- a/tests/openvino/native/data/reference_graphs/original_nncf_graph/yolo-v4-tiny-tf.dot +++ b/tests/openvino/native/data/reference_graphs/original_nncf_graph/yolo-v4-tiny-tf.dot @@ -1,329 +1,329 @@ strict digraph { "0 image_input" [id=0, type=Parameter]; -"1 Divide_2373" [id=1, type=Transpose]; -"2 Multiply_3580" [id=2, type=Convolution]; -"3 Transpose_1182" [id=3, type=Add]; -"4 Transpose_1188" [id=4, type=PRelu]; -"5 Multiply_3594" [id=5, type=Convolution]; -"6 Transpose_1237" [id=6, type=Add]; -"7 Transpose_1243" [id=7, type=PRelu]; -"8 Multiply_3608" [id=8, type=Convolution]; -"9 Transpose_1267" [id=9, type=Add]; -"10 Transpose_1273" [id=10, type=PRelu]; -"11 Transpose_1376" [id=11, type=Concat]; +"1 Divide_2366" [id=1, type=Transpose]; +"2 Multiply_3699" [id=2, type=Convolution]; +"3 Transpose_1171" [id=3, type=Add]; +"4 Transpose_1177" [id=4, type=PRelu]; +"5 Multiply_3713" [id=5, type=Convolution]; +"6 Transpose_1228" [id=6, type=Add]; +"7 Transpose_1234" [id=7, type=PRelu]; +"8 Multiply_3727" [id=8, type=Convolution]; +"9 Transpose_1258" [id=9, type=Add]; +"10 Transpose_1264" [id=10, type=PRelu]; +"11 Transpose_1367" [id=11, type=Concat]; "12 group_route_3/split" [id=12, type=Split]; -"13 MaxPool_303" [id=13, type=MaxPool]; -"14 Multiply_3622" [id=14, type=Convolution]; -"15 Multiply_3664" [id=15, type=Convolution]; -"16 Transpose_1302" [id=16, type=Add]; -"17 Transpose_1400" [id=17, type=Add]; -"18 Transpose_1308" [id=18, type=PRelu]; -"19 Transpose_1406" [id=19, type=PRelu]; -"20 Multiply_3636" [id=20, type=Convolution]; -"21 Transpose_1342" [id=21, type=Concat]; -"22 Transpose_1509" [id=22, type=Concat]; +"13 MaxPool_307" [id=13, type=MaxPool]; +"14 Multiply_3741" [id=14, type=Convolution]; +"15 Multiply_3783" [id=15, type=Convolution]; +"16 Transpose_1293" [id=16, type=Add]; +"17 Transpose_1391" [id=17, type=Add]; +"18 Transpose_1299" [id=18, type=PRelu]; +"19 Transpose_1397" [id=19, type=PRelu]; +"20 Multiply_3755" [id=20, type=Convolution]; +"21 Transpose_1333" [id=21, type=Concat]; +"22 Transpose_1500" [id=22, type=Concat]; "23 group_route_11/split" [id=23, type=Split]; -"24 Transpose_1332" [id=24, type=Add]; -"25 Multiply_3650" [id=25, type=Convolution]; -"26 MaxPool_429" [id=26, type=MaxPool]; -"27 Multiply_3678" [id=27, type=Convolution]; -"28 Transpose_1338" [id=28, type=PRelu]; -"29 Transpose_1366" [id=29, type=Add]; -"30 Multiply_3720" [id=30, type=Convolution]; -"31 Transpose_1435" [id=31, type=Add]; -"32 Transpose_1372" [id=32, type=PRelu]; -"33 Transpose_1533" [id=33, type=Add]; -"34 Transpose_1441" [id=34, type=PRelu]; -"35 Transpose_1539" [id=35, type=PRelu]; -"36 Multiply_3692" [id=36, type=Convolution]; -"37 Transpose_1475" [id=37, type=Concat]; -"38 Transpose_1642" [id=38, type=Concat]; +"24 Transpose_1323" [id=24, type=Add]; +"25 Multiply_3769" [id=25, type=Convolution]; +"26 MaxPool_433" [id=26, type=MaxPool]; +"27 Multiply_3797" [id=27, type=Convolution]; +"28 Transpose_1329" [id=28, type=PRelu]; +"29 Transpose_1357" [id=29, type=Add]; +"30 Multiply_3839" [id=30, type=Convolution]; +"31 Transpose_1426" [id=31, type=Add]; +"32 Transpose_1363" [id=32, type=PRelu]; +"33 Transpose_1524" [id=33, type=Add]; +"34 Transpose_1432" [id=34, type=PRelu]; +"35 Transpose_1530" [id=35, type=PRelu]; +"36 Multiply_3811" [id=36, type=Convolution]; +"37 Transpose_1466" [id=37, type=Concat]; +"38 Transpose_1633" [id=38, type=Concat]; "39 group_route_19/split" [id=39, type=Split]; -"40 Transpose_1465" [id=40, type=Add]; -"41 Multiply_3706" [id=41, type=Convolution]; -"42 MaxPool_575" [id=42, type=MaxPool]; -"43 Multiply_3734" [id=43, type=Convolution]; -"44 Transpose_1471" [id=44, type=PRelu]; -"45 Transpose_1499" [id=45, type=Add]; -"46 Multiply_3776" [id=46, type=Convolution]; -"47 Transpose_1568" [id=47, type=Add]; -"48 Transpose_1505" [id=48, type=PRelu]; -"49 Transpose_1666" [id=49, type=Add]; -"50 Transpose_1574" [id=50, type=PRelu]; -"51 Transpose_1672" [id=51, type=PRelu]; -"52 Multiply_3748" [id=52, type=Convolution]; -"53 Transpose_1608" [id=53, type=Concat]; -"54 Multiply_3790" [id=54, type=Convolution]; -"55 Transpose_1598" [id=55, type=Add]; -"56 Multiply_3762" [id=56, type=Convolution]; -"57 Transpose_1696" [id=57, type=Add]; -"58 Transpose_1604" [id=58, type=PRelu]; -"59 Transpose_1632" [id=59, type=Add]; -"60 Transpose_1702" [id=60, type=PRelu]; -"61 Transpose_1638" [id=61, type=PRelu]; -"62 Multiply_3804" [id=62, type=Convolution]; -"63 Multiply_3832" [id=63, type=Convolution]; -"64 Transpose_1744" [id=64, type=Concat]; -"65 Transpose_1726" [id=65, type=Add]; -"66 Transpose_1804" [id=66, type=Add]; -"67 Multiply_3818" [id=67, type=Convolution]; -"68 Transpose_1732" [id=68, type=PRelu]; -"69 Transpose_1810" [id=69, type=PRelu]; -"70 Transpose_1768" [id=70, type=Add]; -"71 Transpose_1740" [id=71, type=Interpolate]; -"72 leaky_re_lu_17/LeakyRelu" [id=72, type=Transpose]; -"73 Convolution_754" [id=73, type=Convolution]; -"74 Transpose_1774" [id=74, type=PRelu]; -"75 ShapeOf_665" [id=75, type=ShapeOf]; -"76 Transpose_1816" [id=76, type=Add]; -"77 Convolution_711" [id=77, type=Convolution]; -"78 Slice_670" [id=78, type=StridedSlice]; -"79 conv2d_17/BiasAdd" [id=79, type=Transpose]; -"80 Transpose_1780" [id=80, type=Add]; -"81 Convert_671" [id=81, type=Convert]; -"82 conv2d_17/BiasAdd^0" [id=82, label="82 conv2d_17/BiasAdd:0", type=Result]; -"83 conv2d_20/BiasAdd" [id=83, type=Transpose]; -"84 Divide_673" [id=84, type=Divide]; +"40 Transpose_1456" [id=40, type=Add]; +"41 Multiply_3825" [id=41, type=Convolution]; +"42 MaxPool_579" [id=42, type=MaxPool]; +"43 Multiply_3853" [id=43, type=Convolution]; +"44 Transpose_1462" [id=44, type=PRelu]; +"45 Transpose_1490" [id=45, type=Add]; +"46 Multiply_3895" [id=46, type=Convolution]; +"47 Transpose_1559" [id=47, type=Add]; +"48 Transpose_1496" [id=48, type=PRelu]; +"49 Transpose_1657" [id=49, type=Add]; +"50 Transpose_1565" [id=50, type=PRelu]; +"51 Transpose_1663" [id=51, type=PRelu]; +"52 Multiply_3867" [id=52, type=Convolution]; +"53 Transpose_1599" [id=53, type=Concat]; +"54 Multiply_3909" [id=54, type=Convolution]; +"55 Transpose_1589" [id=55, type=Add]; +"56 Multiply_3881" [id=56, type=Convolution]; +"57 Transpose_1687" [id=57, type=Add]; +"58 Transpose_1595" [id=58, type=PRelu]; +"59 Transpose_1623" [id=59, type=Add]; +"60 Transpose_1693" [id=60, type=PRelu]; +"61 Transpose_1629" [id=61, type=PRelu]; +"62 Multiply_3923" [id=62, type=Convolution]; +"63 Multiply_3951" [id=63, type=Convolution]; +"64 Transpose_1727" [id=64, type=Concat]; +"65 Transpose_1717" [id=65, type=Add]; +"66 Transpose_1787" [id=66, type=Add]; +"67 Multiply_3937" [id=67, type=Convolution]; +"68 Transpose_1723" [id=68, type=PRelu]; +"69 Transpose_1793" [id=69, type=PRelu]; +"70 Transpose_1751" [id=70, type=Add]; +"71 leaky_re_lu_17/LeakyRelu" [id=71, type=Transpose]; +"72 Convolution_749" [id=72, type=Convolution]; +"73 Transpose_1757" [id=73, type=PRelu]; +"74 up_sampling2d/Shape" [id=74, type=ShapeOf]; +"75 up_sampling2d/resize/ResizeNearestNeighbor" [id=75, type=Interpolate]; +"76 Transpose_1799" [id=76, type=Add]; +"77 Convolution_706" [id=77, type=Convolution]; +"78 up_sampling2d/strided_slice" [id=78, type=StridedSlice]; +"79 Transpose_1725" [id=79, type=Transpose]; +"80 conv2d_17/BiasAdd" [id=80, type=Transpose]; +"81 Transpose_1763" [id=81, type=Add]; +"82 up_sampling2d/mul" [id=82, type=Multiply]; +"83 conv2d_17/BiasAdd^0" [id=83, label="83 conv2d_17/BiasAdd:0", type=Result]; +"84 conv2d_20/BiasAdd" [id=84, type=Transpose]; "85 conv2d_20/BiasAdd^0" [id=85, label="85 conv2d_20/BiasAdd:0", type=Result]; -"86 Constant_1779" [id=86, type=Constant]; -"87 Transpose_1778" [id=87, type=Constant]; -"88 Transpose_710" [id=88, type=Constant]; -"89 Transpose_1772" [id=89, type=Constant]; -"90 Constant_3826" [id=90, type=Constant]; -"91 Multiply_3951" [id=91, type=Constant]; -"92 Transpose_1636" [id=92, type=Constant]; -"93 Constant_3770" [id=93, type=Constant]; -"94 Multiply_3927" [id=94, type=Constant]; -"95 Transpose_1572" [id=95, type=Constant]; -"96 Constant_3742" [id=96, type=Constant]; -"97 Multiply_3915" [id=97, type=Constant]; -"98 Constant_1540" [id=98, type=Constant]; -"99 Transpose_1537" [id=99, type=Constant]; -"100 Constant_3728" [id=100, type=Constant]; -"101 Multiply_3909" [id=101, type=Constant]; -"102 Transpose_1503" [id=102, type=Constant]; -"103 Constant_3714" [id=103, type=Constant]; -"104 Multiply_3903" [id=104, type=Constant]; -"105 Transpose_1439" [id=105, type=Constant]; -"106 Constant_3686" [id=106, type=Constant]; -"107 Multiply_3891" [id=107, type=Constant]; -"108 Constant_1407" [id=108, type=Constant]; -"109 Transpose_1404" [id=109, type=Constant]; -"110 Constant_3672" [id=110, type=Constant]; -"111 Multiply_3885" [id=111, type=Constant]; -"112 Transpose_1370" [id=112, type=Constant]; -"113 Constant_3658" [id=113, type=Constant]; -"114 Multiply_3879" [id=114, type=Constant]; -"115 Transpose_1306" [id=115, type=Constant]; -"116 Constant_3630" [id=116, type=Constant]; -"117 Multiply_3867" [id=117, type=Constant]; -"118 Constant_1274" [id=118, type=Constant]; -"119 Transpose_1271" [id=119, type=Constant]; -"120 Constant_3616" [id=120, type=Constant]; -"121 Multiply_3861" [id=121, type=Constant]; -"122 Transpose_1241" [id=122, type=Constant]; -"123 Constant_3602" [id=123, type=Constant]; -"124 Multiply_3855" [id=124, type=Constant]; -"125 Transpose_1186" [id=125, type=Constant]; -"126 Constant_3588" [id=126, type=Constant]; -"127 Gather_4127" [id=127, type=Constant]; -"128 Constant_2343" [id=128, type=Constant]; -"129 Transpose_1336" [id=129, type=Constant]; -"130 Constant_3644" [id=130, type=Constant]; -"131 Multiply_3873" [id=131, type=Constant]; -"132 Transpose_1469" [id=132, type=Constant]; -"133 Constant_3700" [id=133, type=Constant]; -"134 Multiply_3897" [id=134, type=Constant]; -"135 Transpose_1602" [id=135, type=Constant]; -"136 Constant_3756" [id=136, type=Constant]; -"137 Multiply_3921" [id=137, type=Constant]; -"138 Gather_1735" [id=138, type=Constant]; -"139 Constant_667" [id=139, type=Constant]; -"140 Constant_668" [id=140, type=Constant]; -"141 Constant_669" [id=141, type=Constant]; -"142 Constant_1731" [id=142, type=Constant]; -"143 Transpose_1730" [id=143, type=Constant]; -"144 Constant_3812" [id=144, type=Constant]; -"145 Multiply_3945" [id=145, type=Constant]; -"146 Transpose_1700" [id=146, type=Constant]; -"147 Constant_3798" [id=147, type=Constant]; -"148 Multiply_3939" [id=148, type=Constant]; -"149 Transpose_1670" [id=149, type=Constant]; -"150 Constant_3784" [id=150, type=Constant]; -"151 Multiply_3933" [id=151, type=Constant]; -"152 Convert_672" [id=152, type=Constant]; -"153 up_sampling2d/mul" [id=153, type=Constant]; -"154 Constant_1815" [id=154, type=Constant]; -"155 Transpose_1814" [id=155, type=Constant]; -"156 Transpose_753" [id=156, type=Constant]; -"157 Transpose_1808" [id=157, type=Constant]; -"158 Constant_3840" [id=158, type=Constant]; -"159 Multiply_3957" [id=159, type=Constant]; -"0 image_input" -> "1 Divide_2373" [label="[1, 416, 416, 3]", style=solid]; -"1 Divide_2373" -> "2 Multiply_3580" [label="[1, 3, 416, 416]", style=solid]; -"2 Multiply_3580" -> "3 Transpose_1182" [label="[1, 32, 208, 208]", style=solid]; -"3 Transpose_1182" -> "4 Transpose_1188" [label="[1, 32, 208, 208]", style=solid]; -"4 Transpose_1188" -> "5 Multiply_3594" [label="[1, 32, 208, 208]", style=solid]; -"5 Multiply_3594" -> "6 Transpose_1237" [label="[1, 64, 104, 104]", style=solid]; -"6 Transpose_1237" -> "7 Transpose_1243" [label="[1, 64, 104, 104]", style=solid]; -"7 Transpose_1243" -> "8 Multiply_3608" [label="[1, 64, 104, 104]", style=solid]; -"8 Multiply_3608" -> "9 Transpose_1267" [label="[1, 64, 104, 104]", style=solid]; -"9 Transpose_1267" -> "10 Transpose_1273" [label="[1, 64, 104, 104]", style=solid]; -"10 Transpose_1273" -> "11 Transpose_1376" [label="[1, 64, 104, 104]", style=solid]; -"10 Transpose_1273" -> "12 group_route_3/split" [label="[1, 64, 104, 104]", style=solid]; -"11 Transpose_1376" -> "13 MaxPool_303" [label="[1, 128, 104, 104]", style=solid]; -"12 group_route_3/split" -> "14 Multiply_3622" [label="[1, 32, 104, 104]", style=solid]; -"13 MaxPool_303" -> "15 Multiply_3664" [label="[1, 128, 52, 52]", style=solid]; -"14 Multiply_3622" -> "16 Transpose_1302" [label="[1, 32, 104, 104]", style=solid]; -"15 Multiply_3664" -> "17 Transpose_1400" [label="[1, 128, 52, 52]", style=solid]; -"16 Transpose_1302" -> "18 Transpose_1308" [label="[1, 32, 104, 104]", style=solid]; -"17 Transpose_1400" -> "19 Transpose_1406" [label="[1, 128, 52, 52]", style=solid]; -"18 Transpose_1308" -> "20 Multiply_3636" [label="[1, 32, 104, 104]", style=solid]; -"18 Transpose_1308" -> "21 Transpose_1342" [label="[1, 32, 104, 104]", style=solid]; -"19 Transpose_1406" -> "22 Transpose_1509" [label="[1, 128, 52, 52]", style=solid]; -"19 Transpose_1406" -> "23 group_route_11/split" [label="[1, 128, 52, 52]", style=solid]; -"20 Multiply_3636" -> "24 Transpose_1332" [label="[1, 32, 104, 104]", style=solid]; -"21 Transpose_1342" -> "25 Multiply_3650" [label="[1, 64, 104, 104]", style=solid]; -"22 Transpose_1509" -> "26 MaxPool_429" [label="[1, 256, 52, 52]", style=solid]; -"23 group_route_11/split" -> "27 Multiply_3678" [label="[1, 64, 52, 52]", style=solid]; -"24 Transpose_1332" -> "28 Transpose_1338" [label="[1, 32, 104, 104]", style=solid]; -"25 Multiply_3650" -> "29 Transpose_1366" [label="[1, 64, 104, 104]", style=solid]; -"26 MaxPool_429" -> "30 Multiply_3720" [label="[1, 256, 26, 26]", style=solid]; -"27 Multiply_3678" -> "31 Transpose_1435" [label="[1, 64, 52, 52]", style=solid]; -"28 Transpose_1338" -> "21 Transpose_1342" [label="[1, 32, 104, 104]", style=solid]; -"29 Transpose_1366" -> "32 Transpose_1372" [label="[1, 64, 104, 104]", style=solid]; -"30 Multiply_3720" -> "33 Transpose_1533" [label="[1, 256, 26, 26]", style=solid]; -"31 Transpose_1435" -> "34 Transpose_1441" [label="[1, 64, 52, 52]", style=solid]; -"32 Transpose_1372" -> "11 Transpose_1376" [label="[1, 64, 104, 104]", style=solid]; -"33 Transpose_1533" -> "35 Transpose_1539" [label="[1, 256, 26, 26]", style=solid]; -"34 Transpose_1441" -> "36 Multiply_3692" [label="[1, 64, 52, 52]", style=solid]; -"34 Transpose_1441" -> "37 Transpose_1475" [label="[1, 64, 52, 52]", style=solid]; -"35 Transpose_1539" -> "38 Transpose_1642" [label="[1, 256, 26, 26]", style=solid]; -"35 Transpose_1539" -> "39 group_route_19/split" [label="[1, 256, 26, 26]", style=solid]; -"36 Multiply_3692" -> "40 Transpose_1465" [label="[1, 64, 52, 52]", style=solid]; -"37 Transpose_1475" -> "41 Multiply_3706" [label="[1, 128, 52, 52]", style=solid]; -"38 Transpose_1642" -> "42 MaxPool_575" [label="[1, 512, 26, 26]", style=solid]; -"39 group_route_19/split" -> "43 Multiply_3734" [label="[1, 128, 26, 26]", style=solid]; -"40 Transpose_1465" -> "44 Transpose_1471" [label="[1, 64, 52, 52]", style=solid]; -"41 Multiply_3706" -> "45 Transpose_1499" [label="[1, 128, 52, 52]", style=solid]; -"42 MaxPool_575" -> "46 Multiply_3776" [label="[1, 512, 13, 13]", style=solid]; -"43 Multiply_3734" -> "47 Transpose_1568" [label="[1, 128, 26, 26]", style=solid]; -"44 Transpose_1471" -> "37 Transpose_1475" [label="[1, 64, 52, 52]", style=solid]; -"45 Transpose_1499" -> "48 Transpose_1505" [label="[1, 128, 52, 52]", style=solid]; -"46 Multiply_3776" -> "49 Transpose_1666" [label="[1, 512, 13, 13]", style=solid]; -"47 Transpose_1568" -> "50 Transpose_1574" [label="[1, 128, 26, 26]", style=solid]; -"48 Transpose_1505" -> "22 Transpose_1509" [label="[1, 128, 52, 52]", style=solid]; -"49 Transpose_1666" -> "51 Transpose_1672" [label="[1, 512, 13, 13]", style=solid]; -"50 Transpose_1574" -> "52 Multiply_3748" [label="[1, 128, 26, 26]", style=solid]; -"50 Transpose_1574" -> "53 Transpose_1608" [label="[1, 128, 26, 26]", style=solid]; -"51 Transpose_1672" -> "54 Multiply_3790" [label="[1, 512, 13, 13]", style=solid]; -"52 Multiply_3748" -> "55 Transpose_1598" [label="[1, 128, 26, 26]", style=solid]; -"53 Transpose_1608" -> "56 Multiply_3762" [label="[1, 256, 26, 26]", style=solid]; -"54 Multiply_3790" -> "57 Transpose_1696" [label="[1, 256, 13, 13]", style=solid]; -"55 Transpose_1598" -> "58 Transpose_1604" [label="[1, 128, 26, 26]", style=solid]; -"56 Multiply_3762" -> "59 Transpose_1632" [label="[1, 256, 26, 26]", style=solid]; -"57 Transpose_1696" -> "60 Transpose_1702" [label="[1, 256, 13, 13]", style=solid]; -"58 Transpose_1604" -> "53 Transpose_1608" [label="[1, 128, 26, 26]", style=solid]; -"59 Transpose_1632" -> "61 Transpose_1638" [label="[1, 256, 26, 26]", style=solid]; -"60 Transpose_1702" -> "62 Multiply_3804" [label="[1, 256, 13, 13]", style=solid]; -"60 Transpose_1702" -> "63 Multiply_3832" [label="[1, 256, 13, 13]", style=solid]; -"61 Transpose_1638" -> "38 Transpose_1642" [label="[1, 256, 26, 26]", style=solid]; -"61 Transpose_1638" -> "64 Transpose_1744" [label="[1, 256, 26, 26]", style=solid]; -"62 Multiply_3804" -> "65 Transpose_1726" [label="[1, 128, 13, 13]", style=solid]; -"63 Multiply_3832" -> "66 Transpose_1804" [label="[1, 512, 13, 13]", style=solid]; -"64 Transpose_1744" -> "67 Multiply_3818" [label="[1, 384, 26, 26]", style=solid]; -"65 Transpose_1726" -> "68 Transpose_1732" [label="[1, 128, 13, 13]", style=solid]; -"66 Transpose_1804" -> "69 Transpose_1810" [label="[1, 512, 13, 13]", style=solid]; -"67 Multiply_3818" -> "70 Transpose_1768" [label="[1, 256, 26, 26]", style=solid]; -"68 Transpose_1732" -> "71 Transpose_1740" [label="[1, 128, 13, 13]", style=solid]; -"68 Transpose_1732" -> "72 leaky_re_lu_17/LeakyRelu" [label="[1, 128, 13, 13]", style=solid]; -"69 Transpose_1810" -> "73 Convolution_754" [label="[1, 512, 13, 13]", style=solid]; -"70 Transpose_1768" -> "74 Transpose_1774" [label="[1, 256, 26, 26]", style=solid]; -"71 Transpose_1740" -> "64 Transpose_1744" [label="[1, 128, 26, 26]", style=solid]; -"72 leaky_re_lu_17/LeakyRelu" -> "75 ShapeOf_665" [label="[1, 13, 13, 128]", style=solid]; -"73 Convolution_754" -> "76 Transpose_1816" [label="[1, 255, 13, 13]", style=solid]; -"74 Transpose_1774" -> "77 Convolution_711" [label="[1, 256, 26, 26]", style=solid]; -"75 ShapeOf_665" -> "78 Slice_670" [label="[4]", style=dashed]; -"76 Transpose_1816" -> "79 conv2d_17/BiasAdd" [label="[1, 255, 13, 13]", style=solid]; -"77 Convolution_711" -> "80 Transpose_1780" [label="[1, 255, 26, 26]", style=solid]; -"78 Slice_670" -> "81 Convert_671" [label="[2]", style=dashed]; -"79 conv2d_17/BiasAdd" -> "82 conv2d_17/BiasAdd^0" [label="[1, 13, 13, 255]", style=solid]; -"80 Transpose_1780" -> "83 conv2d_20/BiasAdd" [label="[1, 255, 26, 26]", style=solid]; -"81 Convert_671" -> "84 Divide_673" [label="[2]", style=solid]; -"83 conv2d_20/BiasAdd" -> "85 conv2d_20/BiasAdd^0" [label="[1, 26, 26, 255]", style=solid]; -"84 Divide_673" -> "71 Transpose_1740" [label="[2]", style=solid]; -"86 Constant_1779" -> "83 conv2d_20/BiasAdd" [label="[4]", style=dashed]; -"87 Transpose_1778" -> "80 Transpose_1780" [label="[1, 255, 1, 1]", style=solid]; -"88 Transpose_710" -> "77 Convolution_711" [label="[255, 256, 1, 1]", style=solid]; -"89 Transpose_1772" -> "74 Transpose_1774" [label="[1, 1, 1, 1]", style=solid]; -"90 Constant_3826" -> "70 Transpose_1768" [label="[1, 256, 1, 1]", style=solid]; -"91 Multiply_3951" -> "67 Multiply_3818" [label="[256, 384, 3, 3]", style=solid]; -"92 Transpose_1636" -> "61 Transpose_1638" [label="[1, 1, 1, 1]", style=solid]; -"93 Constant_3770" -> "59 Transpose_1632" [label="[1, 256, 1, 1]", style=solid]; -"94 Multiply_3927" -> "56 Multiply_3762" [label="[256, 256, 1, 1]", style=solid]; -"95 Transpose_1572" -> "50 Transpose_1574" [label="[1, 1, 1, 1]", style=solid]; -"96 Constant_3742" -> "47 Transpose_1568" [label="[1, 128, 1, 1]", style=solid]; -"97 Multiply_3915" -> "43 Multiply_3734" [label="[128, 128, 3, 3]", style=solid]; -"98 Constant_1540" -> "39 group_route_19/split" [label="[]", style=dashed]; -"99 Transpose_1537" -> "35 Transpose_1539" [label="[1, 1, 1, 1]", style=solid]; -"100 Constant_3728" -> "33 Transpose_1533" [label="[1, 256, 1, 1]", style=solid]; -"101 Multiply_3909" -> "30 Multiply_3720" [label="[256, 256, 3, 3]", style=solid]; -"102 Transpose_1503" -> "48 Transpose_1505" [label="[1, 1, 1, 1]", style=solid]; -"103 Constant_3714" -> "45 Transpose_1499" [label="[1, 128, 1, 1]", style=solid]; -"104 Multiply_3903" -> "41 Multiply_3706" [label="[128, 128, 1, 1]", style=solid]; -"105 Transpose_1439" -> "34 Transpose_1441" [label="[1, 1, 1, 1]", style=solid]; -"106 Constant_3686" -> "31 Transpose_1435" [label="[1, 64, 1, 1]", style=solid]; -"107 Multiply_3891" -> "27 Multiply_3678" [label="[64, 64, 3, 3]", style=solid]; -"108 Constant_1407" -> "23 group_route_11/split" [label="[]", style=dashed]; -"109 Transpose_1404" -> "19 Transpose_1406" [label="[1, 1, 1, 1]", style=solid]; -"110 Constant_3672" -> "17 Transpose_1400" [label="[1, 128, 1, 1]", style=solid]; -"111 Multiply_3885" -> "15 Multiply_3664" [label="[128, 128, 3, 3]", style=solid]; -"112 Transpose_1370" -> "32 Transpose_1372" [label="[1, 1, 1, 1]", style=solid]; -"113 Constant_3658" -> "29 Transpose_1366" [label="[1, 64, 1, 1]", style=solid]; -"114 Multiply_3879" -> "25 Multiply_3650" [label="[64, 64, 1, 1]", style=solid]; -"115 Transpose_1306" -> "18 Transpose_1308" [label="[1, 1, 1, 1]", style=solid]; -"116 Constant_3630" -> "16 Transpose_1302" [label="[1, 32, 1, 1]", style=solid]; -"117 Multiply_3867" -> "14 Multiply_3622" [label="[32, 32, 3, 3]", style=solid]; -"118 Constant_1274" -> "12 group_route_3/split" [label="[]", style=dashed]; -"119 Transpose_1271" -> "10 Transpose_1273" [label="[1, 1, 1, 1]", style=solid]; -"120 Constant_3616" -> "9 Transpose_1267" [label="[1, 64, 1, 1]", style=solid]; -"121 Multiply_3861" -> "8 Multiply_3608" [label="[64, 64, 3, 3]", style=solid]; -"122 Transpose_1241" -> "7 Transpose_1243" [label="[1, 1, 1, 1]", style=solid]; -"123 Constant_3602" -> "6 Transpose_1237" [label="[1, 64, 1, 1]", style=solid]; -"124 Multiply_3855" -> "5 Multiply_3594" [label="[64, 32, 3, 3]", style=solid]; -"125 Transpose_1186" -> "4 Transpose_1188" [label="[1, 1, 1, 1]", style=solid]; -"126 Constant_3588" -> "3 Transpose_1182" [label="[1, 32, 1, 1]", style=solid]; -"127 Gather_4127" -> "2 Multiply_3580" [label="[32, 3, 3, 3]", style=solid]; -"128 Constant_2343" -> "1 Divide_2373" [label="[4]", style=dashed]; -"129 Transpose_1336" -> "28 Transpose_1338" [label="[1, 1, 1, 1]", style=solid]; -"130 Constant_3644" -> "24 Transpose_1332" [label="[1, 32, 1, 1]", style=solid]; -"131 Multiply_3873" -> "20 Multiply_3636" [label="[32, 32, 3, 3]", style=solid]; -"132 Transpose_1469" -> "44 Transpose_1471" [label="[1, 1, 1, 1]", style=solid]; -"133 Constant_3700" -> "40 Transpose_1465" [label="[1, 64, 1, 1]", style=solid]; -"134 Multiply_3897" -> "36 Multiply_3692" [label="[64, 64, 3, 3]", style=solid]; -"135 Transpose_1602" -> "58 Transpose_1604" [label="[1, 1, 1, 1]", style=solid]; -"136 Constant_3756" -> "55 Transpose_1598" [label="[1, 128, 1, 1]", style=solid]; -"137 Multiply_3921" -> "52 Multiply_3748" [label="[128, 128, 3, 3]", style=solid]; -"138 Gather_1735" -> "71 Transpose_1740" [label="[2]", style=dashed]; -"139 Constant_667" -> "78 Slice_670" [label="[1]", style=dashed]; -"140 Constant_668" -> "78 Slice_670" [label="[1]", style=dashed]; -"141 Constant_669" -> "78 Slice_670" [label="[1]", style=dashed]; -"142 Constant_1731" -> "72 leaky_re_lu_17/LeakyRelu" [label="[4]", style=dashed]; -"143 Transpose_1730" -> "68 Transpose_1732" [label="[1, 1, 1, 1]", style=solid]; -"144 Constant_3812" -> "65 Transpose_1726" [label="[1, 128, 1, 1]", style=solid]; -"145 Multiply_3945" -> "62 Multiply_3804" [label="[128, 256, 1, 1]", style=solid]; -"146 Transpose_1700" -> "60 Transpose_1702" [label="[1, 1, 1, 1]", style=solid]; -"147 Constant_3798" -> "57 Transpose_1696" [label="[1, 256, 1, 1]", style=solid]; -"148 Multiply_3939" -> "54 Multiply_3790" [label="[256, 512, 1, 1]", style=solid]; -"149 Transpose_1670" -> "51 Transpose_1672" [label="[1, 1, 1, 1]", style=solid]; -"150 Constant_3784" -> "49 Transpose_1666" [label="[1, 512, 1, 1]", style=solid]; -"151 Multiply_3933" -> "46 Multiply_3776" [label="[512, 512, 3, 3]", style=solid]; -"152 Convert_672" -> "84 Divide_673" [label="[2]", style=solid]; -"153 up_sampling2d/mul" -> "71 Transpose_1740" [label="[2]", style=dashed]; -"154 Constant_1815" -> "79 conv2d_17/BiasAdd" [label="[4]", style=dashed]; -"155 Transpose_1814" -> "76 Transpose_1816" [label="[1, 255, 1, 1]", style=solid]; -"156 Transpose_753" -> "73 Convolution_754" [label="[255, 512, 1, 1]", style=solid]; -"157 Transpose_1808" -> "69 Transpose_1810" [label="[1, 1, 1, 1]", style=solid]; -"158 Constant_3840" -> "66 Transpose_1804" [label="[1, 512, 1, 1]", style=solid]; -"159 Multiply_3957" -> "63 Multiply_3832" [label="[512, 256, 3, 3]", style=solid]; +"86 Constant_1762" [id=86, type=Constant]; +"87 Transpose_1761" [id=87, type=Constant]; +"88 Transpose_705" [id=88, type=Constant]; +"89 Transpose_1755" [id=89, type=Constant]; +"90 Constant_3945" [id=90, type=Constant]; +"91 Multiply_4070" [id=91, type=Constant]; +"92 Transpose_1627" [id=92, type=Constant]; +"93 Constant_3889" [id=93, type=Constant]; +"94 Multiply_4046" [id=94, type=Constant]; +"95 Transpose_1563" [id=95, type=Constant]; +"96 Constant_3861" [id=96, type=Constant]; +"97 Multiply_4034" [id=97, type=Constant]; +"98 Constant_1531" [id=98, type=Constant]; +"99 Transpose_1528" [id=99, type=Constant]; +"100 Constant_3847" [id=100, type=Constant]; +"101 Multiply_4028" [id=101, type=Constant]; +"102 Transpose_1494" [id=102, type=Constant]; +"103 Constant_3833" [id=103, type=Constant]; +"104 Multiply_4022" [id=104, type=Constant]; +"105 Transpose_1430" [id=105, type=Constant]; +"106 Constant_3805" [id=106, type=Constant]; +"107 Multiply_4010" [id=107, type=Constant]; +"108 Constant_1398" [id=108, type=Constant]; +"109 Transpose_1395" [id=109, type=Constant]; +"110 Constant_3791" [id=110, type=Constant]; +"111 Multiply_4004" [id=111, type=Constant]; +"112 Transpose_1361" [id=112, type=Constant]; +"113 Constant_3777" [id=113, type=Constant]; +"114 Multiply_3998" [id=114, type=Constant]; +"115 Transpose_1297" [id=115, type=Constant]; +"116 Constant_3749" [id=116, type=Constant]; +"117 Multiply_3986" [id=117, type=Constant]; +"118 Constant_1265" [id=118, type=Constant]; +"119 Transpose_1262" [id=119, type=Constant]; +"120 Constant_3735" [id=120, type=Constant]; +"121 Multiply_3980" [id=121, type=Constant]; +"122 Transpose_1232" [id=122, type=Constant]; +"123 Constant_3721" [id=123, type=Constant]; +"124 Multiply_3974" [id=124, type=Constant]; +"125 Transpose_1175" [id=125, type=Constant]; +"126 Constant_3707" [id=126, type=Constant]; +"127 Gather_4242" [id=127, type=Constant]; +"128 Constant_2326" [id=128, type=Constant]; +"129 Transpose_1327" [id=129, type=Constant]; +"130 Constant_3763" [id=130, type=Constant]; +"131 Multiply_3992" [id=131, type=Constant]; +"132 Transpose_1460" [id=132, type=Constant]; +"133 Constant_3819" [id=133, type=Constant]; +"134 Multiply_4016" [id=134, type=Constant]; +"135 Transpose_1593" [id=135, type=Constant]; +"136 Constant_3875" [id=136, type=Constant]; +"137 Multiply_4040" [id=137, type=Constant]; +"138 Constant_1724" [id=138, type=Constant]; +"139 Constant_669" [id=139, type=Constant]; +"140 up_sampling2d/Const" [id=140, type=Constant]; +"141 up_sampling2d/strided_slice/stack_2" [id=141, type=Constant]; +"142 up_sampling2d/strided_slice/stack_1" [id=142, type=Constant]; +"143 up_sampling2d/strided_slice/stack" [id=143, type=Constant]; +"144 Constant_1722" [id=144, type=Constant]; +"145 Transpose_1721" [id=145, type=Constant]; +"146 Constant_3931" [id=146, type=Constant]; +"147 Multiply_4064" [id=147, type=Constant]; +"148 Transpose_1691" [id=148, type=Constant]; +"149 Constant_3917" [id=149, type=Constant]; +"150 Multiply_4058" [id=150, type=Constant]; +"151 Transpose_1661" [id=151, type=Constant]; +"152 Constant_3903" [id=152, type=Constant]; +"153 Multiply_4052" [id=153, type=Constant]; +"154 Constant_1798" [id=154, type=Constant]; +"155 Transpose_1797" [id=155, type=Constant]; +"156 Transpose_748" [id=156, type=Constant]; +"157 Transpose_1791" [id=157, type=Constant]; +"158 Constant_3959" [id=158, type=Constant]; +"159 Multiply_4076" [id=159, type=Constant]; +"0 image_input" -> "1 Divide_2366" [label="[1, 416, 416, 3]", style=solid]; +"1 Divide_2366" -> "2 Multiply_3699" [label="[1, 3, 416, 416]", style=solid]; +"2 Multiply_3699" -> "3 Transpose_1171" [label="[1, 32, 208, 208]", style=solid]; +"3 Transpose_1171" -> "4 Transpose_1177" [label="[1, 32, 208, 208]", style=solid]; +"4 Transpose_1177" -> "5 Multiply_3713" [label="[1, 32, 208, 208]", style=solid]; +"5 Multiply_3713" -> "6 Transpose_1228" [label="[1, 64, 104, 104]", style=solid]; +"6 Transpose_1228" -> "7 Transpose_1234" [label="[1, 64, 104, 104]", style=solid]; +"7 Transpose_1234" -> "8 Multiply_3727" [label="[1, 64, 104, 104]", style=solid]; +"8 Multiply_3727" -> "9 Transpose_1258" [label="[1, 64, 104, 104]", style=solid]; +"9 Transpose_1258" -> "10 Transpose_1264" [label="[1, 64, 104, 104]", style=solid]; +"10 Transpose_1264" -> "11 Transpose_1367" [label="[1, 64, 104, 104]", style=solid]; +"10 Transpose_1264" -> "12 group_route_3/split" [label="[1, 64, 104, 104]", style=solid]; +"11 Transpose_1367" -> "13 MaxPool_307" [label="[1, 128, 104, 104]", style=solid]; +"12 group_route_3/split" -> "14 Multiply_3741" [label="[1, 32, 104, 104]", style=solid]; +"13 MaxPool_307" -> "15 Multiply_3783" [label="[1, 128, 52, 52]", style=solid]; +"14 Multiply_3741" -> "16 Transpose_1293" [label="[1, 32, 104, 104]", style=solid]; +"15 Multiply_3783" -> "17 Transpose_1391" [label="[1, 128, 52, 52]", style=solid]; +"16 Transpose_1293" -> "18 Transpose_1299" [label="[1, 32, 104, 104]", style=solid]; +"17 Transpose_1391" -> "19 Transpose_1397" [label="[1, 128, 52, 52]", style=solid]; +"18 Transpose_1299" -> "20 Multiply_3755" [label="[1, 32, 104, 104]", style=solid]; +"18 Transpose_1299" -> "21 Transpose_1333" [label="[1, 32, 104, 104]", style=solid]; +"19 Transpose_1397" -> "22 Transpose_1500" [label="[1, 128, 52, 52]", style=solid]; +"19 Transpose_1397" -> "23 group_route_11/split" [label="[1, 128, 52, 52]", style=solid]; +"20 Multiply_3755" -> "24 Transpose_1323" [label="[1, 32, 104, 104]", style=solid]; +"21 Transpose_1333" -> "25 Multiply_3769" [label="[1, 64, 104, 104]", style=solid]; +"22 Transpose_1500" -> "26 MaxPool_433" [label="[1, 256, 52, 52]", style=solid]; +"23 group_route_11/split" -> "27 Multiply_3797" [label="[1, 64, 52, 52]", style=solid]; +"24 Transpose_1323" -> "28 Transpose_1329" [label="[1, 32, 104, 104]", style=solid]; +"25 Multiply_3769" -> "29 Transpose_1357" [label="[1, 64, 104, 104]", style=solid]; +"26 MaxPool_433" -> "30 Multiply_3839" [label="[1, 256, 26, 26]", style=solid]; +"27 Multiply_3797" -> "31 Transpose_1426" [label="[1, 64, 52, 52]", style=solid]; +"28 Transpose_1329" -> "21 Transpose_1333" [label="[1, 32, 104, 104]", style=solid]; +"29 Transpose_1357" -> "32 Transpose_1363" [label="[1, 64, 104, 104]", style=solid]; +"30 Multiply_3839" -> "33 Transpose_1524" [label="[1, 256, 26, 26]", style=solid]; +"31 Transpose_1426" -> "34 Transpose_1432" [label="[1, 64, 52, 52]", style=solid]; +"32 Transpose_1363" -> "11 Transpose_1367" [label="[1, 64, 104, 104]", style=solid]; +"33 Transpose_1524" -> "35 Transpose_1530" [label="[1, 256, 26, 26]", style=solid]; +"34 Transpose_1432" -> "36 Multiply_3811" [label="[1, 64, 52, 52]", style=solid]; +"34 Transpose_1432" -> "37 Transpose_1466" [label="[1, 64, 52, 52]", style=solid]; +"35 Transpose_1530" -> "38 Transpose_1633" [label="[1, 256, 26, 26]", style=solid]; +"35 Transpose_1530" -> "39 group_route_19/split" [label="[1, 256, 26, 26]", style=solid]; +"36 Multiply_3811" -> "40 Transpose_1456" [label="[1, 64, 52, 52]", style=solid]; +"37 Transpose_1466" -> "41 Multiply_3825" [label="[1, 128, 52, 52]", style=solid]; +"38 Transpose_1633" -> "42 MaxPool_579" [label="[1, 512, 26, 26]", style=solid]; +"39 group_route_19/split" -> "43 Multiply_3853" [label="[1, 128, 26, 26]", style=solid]; +"40 Transpose_1456" -> "44 Transpose_1462" [label="[1, 64, 52, 52]", style=solid]; +"41 Multiply_3825" -> "45 Transpose_1490" [label="[1, 128, 52, 52]", style=solid]; +"42 MaxPool_579" -> "46 Multiply_3895" [label="[1, 512, 13, 13]", style=solid]; +"43 Multiply_3853" -> "47 Transpose_1559" [label="[1, 128, 26, 26]", style=solid]; +"44 Transpose_1462" -> "37 Transpose_1466" [label="[1, 64, 52, 52]", style=solid]; +"45 Transpose_1490" -> "48 Transpose_1496" [label="[1, 128, 52, 52]", style=solid]; +"46 Multiply_3895" -> "49 Transpose_1657" [label="[1, 512, 13, 13]", style=solid]; +"47 Transpose_1559" -> "50 Transpose_1565" [label="[1, 128, 26, 26]", style=solid]; +"48 Transpose_1496" -> "22 Transpose_1500" [label="[1, 128, 52, 52]", style=solid]; +"49 Transpose_1657" -> "51 Transpose_1663" [label="[1, 512, 13, 13]", style=solid]; +"50 Transpose_1565" -> "52 Multiply_3867" [label="[1, 128, 26, 26]", style=solid]; +"50 Transpose_1565" -> "53 Transpose_1599" [label="[1, 128, 26, 26]", style=solid]; +"51 Transpose_1663" -> "54 Multiply_3909" [label="[1, 512, 13, 13]", style=solid]; +"52 Multiply_3867" -> "55 Transpose_1589" [label="[1, 128, 26, 26]", style=solid]; +"53 Transpose_1599" -> "56 Multiply_3881" [label="[1, 256, 26, 26]", style=solid]; +"54 Multiply_3909" -> "57 Transpose_1687" [label="[1, 256, 13, 13]", style=solid]; +"55 Transpose_1589" -> "58 Transpose_1595" [label="[1, 128, 26, 26]", style=solid]; +"56 Multiply_3881" -> "59 Transpose_1623" [label="[1, 256, 26, 26]", style=solid]; +"57 Transpose_1687" -> "60 Transpose_1693" [label="[1, 256, 13, 13]", style=solid]; +"58 Transpose_1595" -> "53 Transpose_1599" [label="[1, 128, 26, 26]", style=solid]; +"59 Transpose_1623" -> "61 Transpose_1629" [label="[1, 256, 26, 26]", style=solid]; +"60 Transpose_1693" -> "62 Multiply_3923" [label="[1, 256, 13, 13]", style=solid]; +"60 Transpose_1693" -> "63 Multiply_3951" [label="[1, 256, 13, 13]", style=solid]; +"61 Transpose_1629" -> "38 Transpose_1633" [label="[1, 256, 26, 26]", style=solid]; +"61 Transpose_1629" -> "64 Transpose_1727" [label="[1, 256, 26, 26]", style=solid]; +"62 Multiply_3923" -> "65 Transpose_1717" [label="[1, 128, 13, 13]", style=solid]; +"63 Multiply_3951" -> "66 Transpose_1787" [label="[1, 512, 13, 13]", style=solid]; +"64 Transpose_1727" -> "67 Multiply_3937" [label="[1, 384, 26, 26]", style=solid]; +"65 Transpose_1717" -> "68 Transpose_1723" [label="[1, 128, 13, 13]", style=solid]; +"66 Transpose_1787" -> "69 Transpose_1793" [label="[1, 512, 13, 13]", style=solid]; +"67 Multiply_3937" -> "70 Transpose_1751" [label="[1, 256, 26, 26]", style=solid]; +"68 Transpose_1723" -> "71 leaky_re_lu_17/LeakyRelu" [label="[1, 128, 13, 13]", style=solid]; +"69 Transpose_1793" -> "72 Convolution_749" [label="[1, 512, 13, 13]", style=solid]; +"70 Transpose_1751" -> "73 Transpose_1757" [label="[1, 256, 26, 26]", style=solid]; +"71 leaky_re_lu_17/LeakyRelu" -> "74 up_sampling2d/Shape" [label="[1, 13, 13, 128]", style=solid]; +"71 leaky_re_lu_17/LeakyRelu" -> "75 up_sampling2d/resize/ResizeNearestNeighbor" [label="[1, 13, 13, 128]", style=solid]; +"72 Convolution_749" -> "76 Transpose_1799" [label="[1, 255, 13, 13]", style=solid]; +"73 Transpose_1757" -> "77 Convolution_706" [label="[1, 256, 26, 26]", style=solid]; +"74 up_sampling2d/Shape" -> "78 up_sampling2d/strided_slice" [label="[4]", style=dashed]; +"75 up_sampling2d/resize/ResizeNearestNeighbor" -> "79 Transpose_1725" [label="[1, 26, 26, 128]", style=solid]; +"76 Transpose_1799" -> "80 conv2d_17/BiasAdd" [label="[1, 255, 13, 13]", style=solid]; +"77 Convolution_706" -> "81 Transpose_1763" [label="[1, 255, 26, 26]", style=solid]; +"78 up_sampling2d/strided_slice" -> "82 up_sampling2d/mul" [label="[2]", style=dashed]; +"79 Transpose_1725" -> "64 Transpose_1727" [label="[1, 128, 26, 26]", style=solid]; +"80 conv2d_17/BiasAdd" -> "83 conv2d_17/BiasAdd^0" [label="[1, 13, 13, 255]", style=solid]; +"81 Transpose_1763" -> "84 conv2d_20/BiasAdd" [label="[1, 255, 26, 26]", style=solid]; +"82 up_sampling2d/mul" -> "75 up_sampling2d/resize/ResizeNearestNeighbor" [label="[2]", style=dashed]; +"84 conv2d_20/BiasAdd" -> "85 conv2d_20/BiasAdd^0" [label="[1, 26, 26, 255]", style=solid]; +"86 Constant_1762" -> "84 conv2d_20/BiasAdd" [label="[4]", style=dashed]; +"87 Transpose_1761" -> "81 Transpose_1763" [label="[1, 255, 1, 1]", style=solid]; +"88 Transpose_705" -> "77 Convolution_706" [label="[255, 256, 1, 1]", style=solid]; +"89 Transpose_1755" -> "73 Transpose_1757" [label="[1, 1, 1, 1]", style=solid]; +"90 Constant_3945" -> "70 Transpose_1751" [label="[1, 256, 1, 1]", style=solid]; +"91 Multiply_4070" -> "67 Multiply_3937" [label="[256, 384, 3, 3]", style=solid]; +"92 Transpose_1627" -> "61 Transpose_1629" [label="[1, 1, 1, 1]", style=solid]; +"93 Constant_3889" -> "59 Transpose_1623" [label="[1, 256, 1, 1]", style=solid]; +"94 Multiply_4046" -> "56 Multiply_3881" [label="[256, 256, 1, 1]", style=solid]; +"95 Transpose_1563" -> "50 Transpose_1565" [label="[1, 1, 1, 1]", style=solid]; +"96 Constant_3861" -> "47 Transpose_1559" [label="[1, 128, 1, 1]", style=solid]; +"97 Multiply_4034" -> "43 Multiply_3853" [label="[128, 128, 3, 3]", style=solid]; +"98 Constant_1531" -> "39 group_route_19/split" [label="[]", style=dashed]; +"99 Transpose_1528" -> "35 Transpose_1530" [label="[1, 1, 1, 1]", style=solid]; +"100 Constant_3847" -> "33 Transpose_1524" [label="[1, 256, 1, 1]", style=solid]; +"101 Multiply_4028" -> "30 Multiply_3839" [label="[256, 256, 3, 3]", style=solid]; +"102 Transpose_1494" -> "48 Transpose_1496" [label="[1, 1, 1, 1]", style=solid]; +"103 Constant_3833" -> "45 Transpose_1490" [label="[1, 128, 1, 1]", style=solid]; +"104 Multiply_4022" -> "41 Multiply_3825" [label="[128, 128, 1, 1]", style=solid]; +"105 Transpose_1430" -> "34 Transpose_1432" [label="[1, 1, 1, 1]", style=solid]; +"106 Constant_3805" -> "31 Transpose_1426" [label="[1, 64, 1, 1]", style=solid]; +"107 Multiply_4010" -> "27 Multiply_3797" [label="[64, 64, 3, 3]", style=solid]; +"108 Constant_1398" -> "23 group_route_11/split" [label="[]", style=dashed]; +"109 Transpose_1395" -> "19 Transpose_1397" [label="[1, 1, 1, 1]", style=solid]; +"110 Constant_3791" -> "17 Transpose_1391" [label="[1, 128, 1, 1]", style=solid]; +"111 Multiply_4004" -> "15 Multiply_3783" [label="[128, 128, 3, 3]", style=solid]; +"112 Transpose_1361" -> "32 Transpose_1363" [label="[1, 1, 1, 1]", style=solid]; +"113 Constant_3777" -> "29 Transpose_1357" [label="[1, 64, 1, 1]", style=solid]; +"114 Multiply_3998" -> "25 Multiply_3769" [label="[64, 64, 1, 1]", style=solid]; +"115 Transpose_1297" -> "18 Transpose_1299" [label="[1, 1, 1, 1]", style=solid]; +"116 Constant_3749" -> "16 Transpose_1293" [label="[1, 32, 1, 1]", style=solid]; +"117 Multiply_3986" -> "14 Multiply_3741" [label="[32, 32, 3, 3]", style=solid]; +"118 Constant_1265" -> "12 group_route_3/split" [label="[]", style=dashed]; +"119 Transpose_1262" -> "10 Transpose_1264" [label="[1, 1, 1, 1]", style=solid]; +"120 Constant_3735" -> "9 Transpose_1258" [label="[1, 64, 1, 1]", style=solid]; +"121 Multiply_3980" -> "8 Multiply_3727" [label="[64, 64, 3, 3]", style=solid]; +"122 Transpose_1232" -> "7 Transpose_1234" [label="[1, 1, 1, 1]", style=solid]; +"123 Constant_3721" -> "6 Transpose_1228" [label="[1, 64, 1, 1]", style=solid]; +"124 Multiply_3974" -> "5 Multiply_3713" [label="[64, 32, 3, 3]", style=solid]; +"125 Transpose_1175" -> "4 Transpose_1177" [label="[1, 1, 1, 1]", style=solid]; +"126 Constant_3707" -> "3 Transpose_1171" [label="[1, 32, 1, 1]", style=solid]; +"127 Gather_4242" -> "2 Multiply_3699" [label="[32, 3, 3, 3]", style=solid]; +"128 Constant_2326" -> "1 Divide_2366" [label="[4]", style=dashed]; +"129 Transpose_1327" -> "28 Transpose_1329" [label="[1, 1, 1, 1]", style=solid]; +"130 Constant_3763" -> "24 Transpose_1323" [label="[1, 32, 1, 1]", style=solid]; +"131 Multiply_3992" -> "20 Multiply_3755" [label="[32, 32, 3, 3]", style=solid]; +"132 Transpose_1460" -> "44 Transpose_1462" [label="[1, 1, 1, 1]", style=solid]; +"133 Constant_3819" -> "40 Transpose_1456" [label="[1, 64, 1, 1]", style=solid]; +"134 Multiply_4016" -> "36 Multiply_3811" [label="[64, 64, 3, 3]", style=solid]; +"135 Transpose_1593" -> "58 Transpose_1595" [label="[1, 1, 1, 1]", style=solid]; +"136 Constant_3875" -> "55 Transpose_1589" [label="[1, 128, 1, 1]", style=solid]; +"137 Multiply_4040" -> "52 Multiply_3867" [label="[128, 128, 3, 3]", style=solid]; +"138 Constant_1724" -> "79 Transpose_1725" [label="[4]", style=dashed]; +"139 Constant_669" -> "75 up_sampling2d/resize/ResizeNearestNeighbor" [label="[2]", style=dashed]; +"140 up_sampling2d/Const" -> "82 up_sampling2d/mul" [label="[2]", style=dashed]; +"141 up_sampling2d/strided_slice/stack_2" -> "78 up_sampling2d/strided_slice" [label="[1]", style=dashed]; +"142 up_sampling2d/strided_slice/stack_1" -> "78 up_sampling2d/strided_slice" [label="[1]", style=dashed]; +"143 up_sampling2d/strided_slice/stack" -> "78 up_sampling2d/strided_slice" [label="[1]", style=dashed]; +"144 Constant_1722" -> "71 leaky_re_lu_17/LeakyRelu" [label="[4]", style=dashed]; +"145 Transpose_1721" -> "68 Transpose_1723" [label="[1, 1, 1, 1]", style=solid]; +"146 Constant_3931" -> "65 Transpose_1717" [label="[1, 128, 1, 1]", style=solid]; +"147 Multiply_4064" -> "62 Multiply_3923" [label="[128, 256, 1, 1]", style=solid]; +"148 Transpose_1691" -> "60 Transpose_1693" [label="[1, 1, 1, 1]", style=solid]; +"149 Constant_3917" -> "57 Transpose_1687" [label="[1, 256, 1, 1]", style=solid]; +"150 Multiply_4058" -> "54 Multiply_3909" [label="[256, 512, 1, 1]", style=solid]; +"151 Transpose_1661" -> "51 Transpose_1663" [label="[1, 1, 1, 1]", style=solid]; +"152 Constant_3903" -> "49 Transpose_1657" [label="[1, 512, 1, 1]", style=solid]; +"153 Multiply_4052" -> "46 Multiply_3895" [label="[512, 512, 3, 3]", style=solid]; +"154 Constant_1798" -> "80 conv2d_17/BiasAdd" [label="[4]", style=dashed]; +"155 Transpose_1797" -> "76 Transpose_1799" [label="[1, 255, 1, 1]", style=solid]; +"156 Transpose_748" -> "72 Convolution_749" [label="[255, 512, 1, 1]", style=solid]; +"157 Transpose_1791" -> "69 Transpose_1793" [label="[1, 1, 1, 1]", style=solid]; +"158 Constant_3959" -> "66 Transpose_1787" [label="[1, 512, 1, 1]", style=solid]; +"159 Multiply_4076" -> "63 Multiply_3951" [label="[512, 256, 3, 3]", style=solid]; } diff --git a/tests/openvino/native/data/reference_graphs/quantized/GRUSequenceModel_linear_before_reset_F.dot b/tests/openvino/native/data/reference_graphs/quantized/GRUSequenceModel_linear_before_reset_F.dot new file mode 100644 index 00000000000..f97ac8a2d68 --- /dev/null +++ b/tests/openvino/native/data/reference_graphs/quantized/GRUSequenceModel_linear_before_reset_F.dot @@ -0,0 +1,81 @@ +strict digraph { +"0 X" [id=0, type=Parameter]; +"1 initial_hidden_state" [id=1, type=Parameter]; +"2 X/fq_output_0" [id=2, type=FakeQuantize]; +"3 initial_hidden_state/fq_output_0" [id=3, type=FakeQuantize]; +"4 GRUSequence" [id=4, type=GRUSequence]; +"5 GRUSequence/fq_output_0" [id=5, type=FakeQuantize]; +"6 MatMul" [id=6, type=MatMul]; +"7 Result" [id=7, type=Result]; +"8 MatMul/fq_weights_1" [id=8, type=FakeQuantize]; +"9 Constant_2541" [id=9, type=Constant]; +"10 Constant_2540" [id=10, type=Constant]; +"11 Constant_2539" [id=11, type=Constant]; +"12 Constant_2538" [id=12, type=Constant]; +"13 Constant_8" [id=13, type=Constant]; +"14 Constant_2526" [id=14, type=Constant]; +"15 Constant_2525" [id=15, type=Constant]; +"16 Constant_2524" [id=16, type=Constant]; +"17 Constant_2523" [id=17, type=Constant]; +"18 Constant_6" [id=18, type=Constant]; +"19 GRUSequence/fq_weights_4" [id=19, type=FakeQuantize]; +"20 Constant_2536" [id=20, type=Constant]; +"21 Constant_2535" [id=21, type=Constant]; +"22 Constant_2534" [id=22, type=Constant]; +"23 Constant_2533" [id=23, type=Constant]; +"24 Constant_5" [id=24, type=Constant]; +"25 GRUSequence/fq_weights_3" [id=25, type=FakeQuantize]; +"26 Constant_2531" [id=26, type=Constant]; +"27 Constant_2530" [id=27, type=Constant]; +"28 Constant_2529" [id=28, type=Constant]; +"29 Constant_2528" [id=29, type=Constant]; +"30 Constant_4" [id=30, type=Constant]; +"31 Constant_3" [id=31, type=Constant]; +"32 Constant_2521" [id=32, type=Constant]; +"33 Constant_2520" [id=33, type=Constant]; +"34 Constant_2519" [id=34, type=Constant]; +"35 Constant_2518" [id=35, type=Constant]; +"36 Constant_2516" [id=36, type=Constant]; +"37 Constant_2515" [id=37, type=Constant]; +"38 Constant_2514" [id=38, type=Constant]; +"39 Constant_2513" [id=39, type=Constant]; +"0 X" -> "2 X/fq_output_0" [label="[3, 2, 16]", style=solid]; +"1 initial_hidden_state" -> "3 initial_hidden_state/fq_output_0" [label="[3, 1, 128]", style=solid]; +"2 X/fq_output_0" -> "4 GRUSequence" [label="[3, 2, 16]", style=solid]; +"3 initial_hidden_state/fq_output_0" -> "4 GRUSequence" [label="[3, 1, 128]", style=solid]; +"4 GRUSequence" -> "5 GRUSequence/fq_output_0" [label="[3, 1, 2, 128]", style=solid]; +"5 GRUSequence/fq_output_0" -> "6 MatMul" [label="[3, 1, 2, 128]", style=solid]; +"6 MatMul" -> "7 Result" [label="[3, 1, 2, 3]", style=solid]; +"8 MatMul/fq_weights_1" -> "6 MatMul" [label="[3, 1, 128, 3]", style=solid]; +"9 Constant_2541" -> "8 MatMul/fq_weights_1" [label="[3, 1, 1, 3]", style=solid]; +"10 Constant_2540" -> "8 MatMul/fq_weights_1" [label="[3, 1, 1, 3]", style=solid]; +"11 Constant_2539" -> "8 MatMul/fq_weights_1" [label="[3, 1, 1, 3]", style=solid]; +"12 Constant_2538" -> "8 MatMul/fq_weights_1" [label="[3, 1, 1, 3]", style=solid]; +"13 Constant_8" -> "8 MatMul/fq_weights_1" [label="[3, 1, 128, 3]", style=solid]; +"14 Constant_2526" -> "5 GRUSequence/fq_output_0" [label="[]", style=solid]; +"15 Constant_2525" -> "5 GRUSequence/fq_output_0" [label="[]", style=solid]; +"16 Constant_2524" -> "5 GRUSequence/fq_output_0" [label="[]", style=solid]; +"17 Constant_2523" -> "5 GRUSequence/fq_output_0" [label="[]", style=solid]; +"18 Constant_6" -> "4 GRUSequence" [label="[1, 384]", style=solid]; +"19 GRUSequence/fq_weights_4" -> "4 GRUSequence" [label="[1, 384, 128]", style=solid]; +"20 Constant_2536" -> "19 GRUSequence/fq_weights_4" [label="[1, 384, 1]", style=solid]; +"21 Constant_2535" -> "19 GRUSequence/fq_weights_4" [label="[1, 384, 1]", style=solid]; +"22 Constant_2534" -> "19 GRUSequence/fq_weights_4" [label="[1, 384, 1]", style=solid]; +"23 Constant_2533" -> "19 GRUSequence/fq_weights_4" [label="[1, 384, 1]", style=solid]; +"24 Constant_5" -> "19 GRUSequence/fq_weights_4" [label="[1, 384, 128]", style=solid]; +"25 GRUSequence/fq_weights_3" -> "4 GRUSequence" [label="[1, 384, 16]", style=solid]; +"26 Constant_2531" -> "25 GRUSequence/fq_weights_3" [label="[1, 384, 1]", style=solid]; +"27 Constant_2530" -> "25 GRUSequence/fq_weights_3" [label="[1, 384, 1]", style=solid]; +"28 Constant_2529" -> "25 GRUSequence/fq_weights_3" [label="[1, 384, 1]", style=solid]; +"29 Constant_2528" -> "25 GRUSequence/fq_weights_3" [label="[1, 384, 1]", style=solid]; +"30 Constant_4" -> "25 GRUSequence/fq_weights_3" [label="[1, 384, 16]", style=solid]; +"31 Constant_3" -> "4 GRUSequence" [label="[3]", style=dashed]; +"32 Constant_2521" -> "3 initial_hidden_state/fq_output_0" [label="[]", style=solid]; +"33 Constant_2520" -> "3 initial_hidden_state/fq_output_0" [label="[]", style=solid]; +"34 Constant_2519" -> "3 initial_hidden_state/fq_output_0" [label="[]", style=solid]; +"35 Constant_2518" -> "3 initial_hidden_state/fq_output_0" [label="[]", style=solid]; +"36 Constant_2516" -> "2 X/fq_output_0" [label="[]", style=solid]; +"37 Constant_2515" -> "2 X/fq_output_0" [label="[]", style=solid]; +"38 Constant_2514" -> "2 X/fq_output_0" [label="[]", style=solid]; +"39 Constant_2513" -> "2 X/fq_output_0" [label="[]", style=solid]; +} diff --git a/tests/openvino/native/data/reference_graphs/quantized/GRUSequenceModel_linear_before_reset_T.dot b/tests/openvino/native/data/reference_graphs/quantized/GRUSequenceModel_linear_before_reset_T.dot new file mode 100644 index 00000000000..a9819020b25 --- /dev/null +++ b/tests/openvino/native/data/reference_graphs/quantized/GRUSequenceModel_linear_before_reset_T.dot @@ -0,0 +1,41 @@ +strict digraph { +"0 X" [id=0, type=Parameter]; +"1 initial_hidden_state" [id=1, type=Parameter]; +"2 GRUSequence" [id=2, type=GRUSequence]; +"3 GRUSequence/fq_output_0" [id=3, type=FakeQuantize]; +"4 MatMul" [id=4, type=MatMul]; +"5 Result" [id=5, type=Result]; +"6 MatMul/fq_weights_1" [id=6, type=FakeQuantize]; +"7 Constant_2205" [id=7, type=Constant]; +"8 Constant_2204" [id=8, type=Constant]; +"9 Constant_2203" [id=9, type=Constant]; +"10 Constant_2202" [id=10, type=Constant]; +"11 Constant_8" [id=11, type=Constant]; +"12 Constant_2200" [id=12, type=Constant]; +"13 Constant_2199" [id=13, type=Constant]; +"14 Constant_2198" [id=14, type=Constant]; +"15 Constant_2197" [id=15, type=Constant]; +"16 Constant_6" [id=16, type=Constant]; +"17 Constant_5" [id=17, type=Constant]; +"18 Constant_4" [id=18, type=Constant]; +"19 Constant_3" [id=19, type=Constant]; +"0 X" -> "2 GRUSequence" [label="[3, 2, 16]", style=solid]; +"1 initial_hidden_state" -> "2 GRUSequence" [label="[3, 1, 128]", style=solid]; +"2 GRUSequence" -> "3 GRUSequence/fq_output_0" [label="[3, 1, 2, 128]", style=solid]; +"3 GRUSequence/fq_output_0" -> "4 MatMul" [label="[3, 1, 2, 128]", style=solid]; +"4 MatMul" -> "5 Result" [label="[3, 1, 2, 3]", style=solid]; +"6 MatMul/fq_weights_1" -> "4 MatMul" [label="[3, 1, 128, 3]", style=solid]; +"7 Constant_2205" -> "6 MatMul/fq_weights_1" [label="[3, 1, 1, 3]", style=solid]; +"8 Constant_2204" -> "6 MatMul/fq_weights_1" [label="[3, 1, 1, 3]", style=solid]; +"9 Constant_2203" -> "6 MatMul/fq_weights_1" [label="[3, 1, 1, 3]", style=solid]; +"10 Constant_2202" -> "6 MatMul/fq_weights_1" [label="[3, 1, 1, 3]", style=solid]; +"11 Constant_8" -> "6 MatMul/fq_weights_1" [label="[3, 1, 128, 3]", style=solid]; +"12 Constant_2200" -> "3 GRUSequence/fq_output_0" [label="[]", style=solid]; +"13 Constant_2199" -> "3 GRUSequence/fq_output_0" [label="[]", style=solid]; +"14 Constant_2198" -> "3 GRUSequence/fq_output_0" [label="[]", style=solid]; +"15 Constant_2197" -> "3 GRUSequence/fq_output_0" [label="[]", style=solid]; +"16 Constant_6" -> "2 GRUSequence" [label="[1, 512]", style=solid]; +"17 Constant_5" -> "2 GRUSequence" [label="[1, 384, 128]", style=solid]; +"18 Constant_4" -> "2 GRUSequence" [label="[1, 384, 16]", style=solid]; +"19 Constant_3" -> "2 GRUSequence" [label="[3]", style=dashed]; +} diff --git a/tests/openvino/native/data/reference_graphs/quantized/IntegerModel.dot b/tests/openvino/native/data/reference_graphs/quantized/IntegerModel.dot index 9a6417ae329..03cfb73b101 100644 --- a/tests/openvino/native/data/reference_graphs/quantized/IntegerModel.dot +++ b/tests/openvino/native/data/reference_graphs/quantized/IntegerModel.dot @@ -1,102 +1,102 @@ strict digraph { "0 Input" [id=0, type=Parameter]; -"1 Input/fq_output_0" [id=1, type=FakeQuantize]; -"2 Convert_1" [id=2, type=Convert]; -"3 Gather_135216" [id=3, type=Gather]; -"4 Gather_135205" [id=4, type=Gather]; -"5 MatMul_2" [id=5, type=MatMul]; -"6 Gather_135208" [id=6, type=Gather]; -"7 MatMul_2/fq_output_0" [id=7, type=FakeQuantize]; -"8 Gather_135208/fq_output_0" [id=8, type=FakeQuantize]; -"9 Add_1" [id=9, type=Add]; -"10 Gather_135211" [id=10, type=Gather]; -"11 Result" [id=11, type=Result]; -"12 MatMul_1" [id=12, type=MatMul]; -"13 MatMul_1/fq_output_0" [id=13, type=FakeQuantize]; -"14 Constant_137425" [id=14, type=Constant]; -"15 Constant_137424" [id=15, type=Constant]; -"16 Constant_137423" [id=16, type=Constant]; -"17 Constant_137422" [id=17, type=Constant]; -"18 MatMul_2/fq_weights_1" [id=18, type=FakeQuantize]; -"19 Constant_137435" [id=19, type=Constant]; -"20 Constant_137434" [id=20, type=Constant]; -"21 Constant_137433" [id=21, type=Constant]; -"22 Constant_137432" [id=22, type=Constant]; -"23 Constant_135217" [id=23, type=Constant]; -"24 Constant_135215" [id=24, type=Constant]; -"25 Constant_135214" [id=25, type=Constant]; -"26 Constant_137415" [id=26, type=Constant]; -"27 Constant_137414" [id=27, type=Constant]; -"28 Constant_137413" [id=28, type=Constant]; -"29 Constant_137412" [id=29, type=Constant]; -"30 Constant_137420" [id=30, type=Constant]; -"31 Constant_137419" [id=31, type=Constant]; -"32 Constant_137418" [id=32, type=Constant]; -"33 Constant_137417" [id=33, type=Constant]; -"34 MatMul_1/fq_weights_1" [id=34, type=FakeQuantize]; -"35 Constant_137430" [id=35, type=Constant]; -"36 Constant_137429" [id=36, type=Constant]; -"37 Constant_137428" [id=37, type=Constant]; -"38 Constant_137427" [id=38, type=Constant]; -"39 Constant_135212" [id=39, type=Constant]; -"40 Constant_135210" [id=40, type=Constant]; -"41 Constant_135209" [id=41, type=Constant]; -"42 Constant_137410" [id=42, type=Constant]; -"43 Constant_137409" [id=43, type=Constant]; -"44 Constant_137408" [id=44, type=Constant]; -"45 Constant_137407" [id=45, type=Constant]; -"46 Constant_135207" [id=46, type=Constant]; -"47 Constant_135204" [id=47, type=Constant]; -"48 Constant_135203" [id=48, type=Constant]; -"49 Constant_135206" [id=49, type=Constant]; -"0 Input" -> "1 Input/fq_output_0" [label="[1, 192, 1]", style=solid]; -"1 Input/fq_output_0" -> "2 Convert_1" [label="[1, 192, 1]", style=solid]; -"1 Input/fq_output_0" -> "3 Gather_135216" [label="[1, 192, 1]", style=solid]; -"2 Convert_1" -> "4 Gather_135205" [label="[1, 192, 1]", style=dashed]; -"3 Gather_135216" -> "5 MatMul_2" [label="[1, 192]", style=solid]; -"4 Gather_135205" -> "6 Gather_135208" [label="[192, 1]", style=dashed]; -"5 MatMul_2" -> "7 MatMul_2/fq_output_0" [label="[1, 160]", style=solid]; -"6 Gather_135208" -> "8 Gather_135208/fq_output_0" [label="[192, 1, 160]", style=solid]; -"7 MatMul_2/fq_output_0" -> "9 Add_1" [label="[1, 160]", style=solid]; -"8 Gather_135208/fq_output_0" -> "10 Gather_135211" [label="[192, 1, 160]", style=solid]; -"9 Add_1" -> "11 Result" [label="[1, 160]", style=solid]; -"10 Gather_135211" -> "12 MatMul_1" [label="[1, 160]", style=solid]; -"12 MatMul_1" -> "13 MatMul_1/fq_output_0" [label="[1, 160]", style=solid]; -"13 MatMul_1/fq_output_0" -> "9 Add_1" [label="[1, 160]", style=solid]; -"14 Constant_137425" -> "7 MatMul_2/fq_output_0" [label="[]", style=solid]; -"15 Constant_137424" -> "7 MatMul_2/fq_output_0" [label="[]", style=solid]; -"16 Constant_137423" -> "7 MatMul_2/fq_output_0" [label="[]", style=solid]; -"17 Constant_137422" -> "7 MatMul_2/fq_output_0" [label="[]", style=solid]; -"18 MatMul_2/fq_weights_1" -> "5 MatMul_2" [label="[160, 192]", style=solid]; -"19 Constant_137435" -> "18 MatMul_2/fq_weights_1" [label="[160, 1]", style=solid]; -"20 Constant_137434" -> "18 MatMul_2/fq_weights_1" [label="[160, 1]", style=solid]; -"21 Constant_137433" -> "18 MatMul_2/fq_weights_1" [label="[160, 1]", style=solid]; -"22 Constant_137432" -> "18 MatMul_2/fq_weights_1" [label="[160, 1]", style=solid]; -"23 Constant_135217" -> "18 MatMul_2/fq_weights_1" [label="[160, 192]", style=solid]; -"24 Constant_135215" -> "3 Gather_135216" [label="[]", style=dashed]; -"25 Constant_135214" -> "3 Gather_135216" [label="[]", style=dashed]; -"26 Constant_137415" -> "1 Input/fq_output_0" [label="[]", style=solid]; -"27 Constant_137414" -> "1 Input/fq_output_0" [label="[]", style=solid]; -"28 Constant_137413" -> "1 Input/fq_output_0" [label="[]", style=solid]; -"29 Constant_137412" -> "1 Input/fq_output_0" [label="[]", style=solid]; -"30 Constant_137420" -> "13 MatMul_1/fq_output_0" [label="[]", style=solid]; -"31 Constant_137419" -> "13 MatMul_1/fq_output_0" [label="[]", style=solid]; -"32 Constant_137418" -> "13 MatMul_1/fq_output_0" [label="[]", style=solid]; -"33 Constant_137417" -> "13 MatMul_1/fq_output_0" [label="[]", style=solid]; -"34 MatMul_1/fq_weights_1" -> "12 MatMul_1" [label="[160, 160]", style=solid]; -"35 Constant_137430" -> "34 MatMul_1/fq_weights_1" [label="[160, 1]", style=solid]; -"36 Constant_137429" -> "34 MatMul_1/fq_weights_1" [label="[160, 1]", style=solid]; -"37 Constant_137428" -> "34 MatMul_1/fq_weights_1" [label="[160, 1]", style=solid]; -"38 Constant_137427" -> "34 MatMul_1/fq_weights_1" [label="[160, 1]", style=solid]; -"39 Constant_135212" -> "34 MatMul_1/fq_weights_1" [label="[160, 160]", style=solid]; -"40 Constant_135210" -> "10 Gather_135211" [label="[]", style=dashed]; -"41 Constant_135209" -> "10 Gather_135211" [label="[]", style=dashed]; -"42 Constant_137410" -> "8 Gather_135208/fq_output_0" [label="[]", style=solid]; -"43 Constant_137409" -> "8 Gather_135208/fq_output_0" [label="[]", style=solid]; -"44 Constant_137408" -> "8 Gather_135208/fq_output_0" [label="[]", style=solid]; -"45 Constant_137407" -> "8 Gather_135208/fq_output_0" [label="[]", style=solid]; -"46 Constant_135207" -> "6 Gather_135208" [label="[]", style=dashed]; -"47 Constant_135204" -> "4 Gather_135205" [label="[]", style=dashed]; -"48 Constant_135203" -> "4 Gather_135205" [label="[]", style=dashed]; -"49 Constant_135206" -> "6 Gather_135208" [label="[369, 160]", style=solid]; +"1 Convert_1" [id=1, type=Convert]; +"2 Gather_4/fq_input_0" [id=2, type=FakeQuantize]; +"3 Gather_1" [id=3, type=Gather]; +"4 Gather_4" [id=4, type=Gather]; +"5 Gather_2" [id=5, type=Gather]; +"6 MatMul_2" [id=6, type=MatMul]; +"7 Gather_3" [id=7, type=Gather]; +"8 MatMul_2/fq_output_0" [id=8, type=FakeQuantize]; +"9 MatMul_1" [id=9, type=MatMul]; +"10 Add_1" [id=10, type=Add]; +"11 MatMul_1/fq_output_0" [id=11, type=FakeQuantize]; +"12 Result" [id=12, type=Result]; +"13 Constant_7164" [id=13, type=Constant]; +"14 Constant_7163" [id=14, type=Constant]; +"15 Constant_7162" [id=15, type=Constant]; +"16 Constant_7161" [id=16, type=Constant]; +"17 MatMul_2/fq_weights_1" [id=17, type=FakeQuantize]; +"18 Constant_7169" [id=18, type=Constant]; +"19 Constant_7168" [id=19, type=Constant]; +"20 Constant_7167" [id=20, type=Constant]; +"21 Constant_7166" [id=21, type=Constant]; +"22 Constant_4701" [id=22, type=Constant]; +"23 Constant_4699" [id=23, type=Constant]; +"24 Constant_4698" [id=24, type=Constant]; +"25 Constant_7159" [id=25, type=Constant]; +"26 Constant_7158" [id=26, type=Constant]; +"27 Constant_7157" [id=27, type=Constant]; +"28 Constant_7156" [id=28, type=Constant]; +"29 Constant_7179" [id=29, type=Constant]; +"30 Constant_7178" [id=30, type=Constant]; +"31 Constant_7177" [id=31, type=Constant]; +"32 Constant_7176" [id=32, type=Constant]; +"33 MatMul_1/fq_weights_1" [id=33, type=FakeQuantize]; +"34 Constant_7184" [id=34, type=Constant]; +"35 Constant_7183" [id=35, type=Constant]; +"36 Constant_7182" [id=36, type=Constant]; +"37 Constant_7181" [id=37, type=Constant]; +"38 Constant_4696" [id=38, type=Constant]; +"39 Constant_4694" [id=39, type=Constant]; +"40 Constant_4693" [id=40, type=Constant]; +"41 Constant_4691" [id=41, type=Constant]; +"42 Constant_4688" [id=42, type=Constant]; +"43 Constant_4687" [id=43, type=Constant]; +"44 Gather_2/fq_weights_0" [id=44, type=FakeQuantize]; +"45 Constant_7174" [id=45, type=Constant]; +"46 Constant_7173" [id=46, type=Constant]; +"47 Constant_7172" [id=47, type=Constant]; +"48 Constant_7171" [id=48, type=Constant]; +"49 Constant_4690" [id=49, type=Constant]; +"0 Input" -> "1 Convert_1" [label="[1, 192, 1]", style=solid]; +"0 Input" -> "2 Gather_4/fq_input_0" [label="[1, 192, 1]", style=solid]; +"1 Convert_1" -> "3 Gather_1" [label="[1, 192, 1]", style=dashed]; +"2 Gather_4/fq_input_0" -> "4 Gather_4" [label="[1, 192, 1]", style=solid]; +"3 Gather_1" -> "5 Gather_2" [label="[192, 1]", style=dashed]; +"4 Gather_4" -> "6 MatMul_2" [label="[1, 192]", style=solid]; +"5 Gather_2" -> "7 Gather_3" [label="[192, 1, 160]", style=solid]; +"6 MatMul_2" -> "8 MatMul_2/fq_output_0" [label="[1, 160]", style=solid]; +"7 Gather_3" -> "9 MatMul_1" [label="[1, 160]", style=solid]; +"8 MatMul_2/fq_output_0" -> "10 Add_1" [label="[1, 160]", style=solid]; +"9 MatMul_1" -> "11 MatMul_1/fq_output_0" [label="[1, 160]", style=solid]; +"10 Add_1" -> "12 Result" [label="[1, 160]", style=solid]; +"11 MatMul_1/fq_output_0" -> "10 Add_1" [label="[1, 160]", style=solid]; +"13 Constant_7164" -> "8 MatMul_2/fq_output_0" [label="[]", style=solid]; +"14 Constant_7163" -> "8 MatMul_2/fq_output_0" [label="[]", style=solid]; +"15 Constant_7162" -> "8 MatMul_2/fq_output_0" [label="[]", style=solid]; +"16 Constant_7161" -> "8 MatMul_2/fq_output_0" [label="[]", style=solid]; +"17 MatMul_2/fq_weights_1" -> "6 MatMul_2" [label="[160, 192]", style=solid]; +"18 Constant_7169" -> "17 MatMul_2/fq_weights_1" [label="[160, 1]", style=solid]; +"19 Constant_7168" -> "17 MatMul_2/fq_weights_1" [label="[160, 1]", style=solid]; +"20 Constant_7167" -> "17 MatMul_2/fq_weights_1" [label="[160, 1]", style=solid]; +"21 Constant_7166" -> "17 MatMul_2/fq_weights_1" [label="[160, 1]", style=solid]; +"22 Constant_4701" -> "17 MatMul_2/fq_weights_1" [label="[160, 192]", style=solid]; +"23 Constant_4699" -> "4 Gather_4" [label="[]", style=dashed]; +"24 Constant_4698" -> "4 Gather_4" [label="[]", style=dashed]; +"25 Constant_7159" -> "2 Gather_4/fq_input_0" [label="[]", style=solid]; +"26 Constant_7158" -> "2 Gather_4/fq_input_0" [label="[]", style=solid]; +"27 Constant_7157" -> "2 Gather_4/fq_input_0" [label="[]", style=solid]; +"28 Constant_7156" -> "2 Gather_4/fq_input_0" [label="[]", style=solid]; +"29 Constant_7179" -> "11 MatMul_1/fq_output_0" [label="[]", style=solid]; +"30 Constant_7178" -> "11 MatMul_1/fq_output_0" [label="[]", style=solid]; +"31 Constant_7177" -> "11 MatMul_1/fq_output_0" [label="[]", style=solid]; +"32 Constant_7176" -> "11 MatMul_1/fq_output_0" [label="[]", style=solid]; +"33 MatMul_1/fq_weights_1" -> "9 MatMul_1" [label="[160, 160]", style=solid]; +"34 Constant_7184" -> "33 MatMul_1/fq_weights_1" [label="[160, 1]", style=solid]; +"35 Constant_7183" -> "33 MatMul_1/fq_weights_1" [label="[160, 1]", style=solid]; +"36 Constant_7182" -> "33 MatMul_1/fq_weights_1" [label="[160, 1]", style=solid]; +"37 Constant_7181" -> "33 MatMul_1/fq_weights_1" [label="[160, 1]", style=solid]; +"38 Constant_4696" -> "33 MatMul_1/fq_weights_1" [label="[160, 160]", style=solid]; +"39 Constant_4694" -> "7 Gather_3" [label="[]", style=dashed]; +"40 Constant_4693" -> "7 Gather_3" [label="[]", style=dashed]; +"41 Constant_4691" -> "5 Gather_2" [label="[]", style=dashed]; +"42 Constant_4688" -> "3 Gather_1" [label="[]", style=dashed]; +"43 Constant_4687" -> "3 Gather_1" [label="[]", style=dashed]; +"44 Gather_2/fq_weights_0" -> "5 Gather_2" [label="[369, 160]", style=solid]; +"45 Constant_7174" -> "44 Gather_2/fq_weights_0" [label="[]", style=solid]; +"46 Constant_7173" -> "44 Gather_2/fq_weights_0" [label="[]", style=solid]; +"47 Constant_7172" -> "44 Gather_2/fq_weights_0" [label="[]", style=solid]; +"48 Constant_7171" -> "44 Gather_2/fq_weights_0" [label="[]", style=solid]; +"49 Constant_4690" -> "44 Gather_2/fq_weights_0" [label="[369, 160]", style=solid]; } diff --git a/tests/openvino/native/data/reference_graphs/quantized/UnifiedEmbeddingModel.dot b/tests/openvino/native/data/reference_graphs/quantized/UnifiedEmbeddingModel.dot new file mode 100644 index 00000000000..24cd2d4545e --- /dev/null +++ b/tests/openvino/native/data/reference_graphs/quantized/UnifiedEmbeddingModel.dot @@ -0,0 +1,78 @@ +strict digraph { +"0 Input" [id=0, type=Parameter]; +"1 Convert_1" [id=1, type=Convert]; +"2 MatMul_1/fq_input_0" [id=2, type=FakeQuantize]; +"3 Gather_1" [id=3, type=Gather]; +"4 MatMul_1" [id=4, type=MatMul]; +"5 Concat_12" [id=5, type=Concat]; +"6 MatMul_1/fq_output_0" [id=6, type=FakeQuantize]; +"7 MatMul_2" [id=7, type=MatMul]; +"8 Reshape_1" [id=8, type=Reshape]; +"9 Result" [id=9, type=Result]; +"10 MatMul_2/fq_weights_1" [id=10, type=FakeQuantize]; +"11 Constant_2323" [id=11, type=Constant]; +"12 Constant_2322" [id=12, type=Constant]; +"13 Constant_2321" [id=13, type=Constant]; +"14 Constant_2320" [id=14, type=Constant]; +"15 matmul_2_data" [id=15, type=Constant]; +"16 Constant_8" [id=16, type=Constant]; +"17 Constant_2303" [id=17, type=Constant]; +"18 Constant_2302" [id=18, type=Constant]; +"19 Constant_2301" [id=19, type=Constant]; +"20 Constant_2300" [id=20, type=Constant]; +"21 MatMul_1/fq_weights_1" [id=21, type=FakeQuantize]; +"22 Constant_2318" [id=22, type=Constant]; +"23 Constant_2317" [id=23, type=Constant]; +"24 Constant_2316" [id=24, type=Constant]; +"25 Constant_2315" [id=25, type=Constant]; +"26 matmul_1_data" [id=26, type=Constant]; +"27 Constant_2313" [id=27, type=Constant]; +"28 Constant_2312" [id=28, type=Constant]; +"29 Constant_2311" [id=29, type=Constant]; +"30 Constant_2310" [id=30, type=Constant]; +"31 Constant_4" [id=31, type=Constant]; +"32 Gather_1/fq_weights_0" [id=32, type=FakeQuantize]; +"33 Constant_2308" [id=33, type=Constant]; +"34 Constant_2307" [id=34, type=Constant]; +"35 Constant_2306" [id=35, type=Constant]; +"36 Constant_2305" [id=36, type=Constant]; +"37 gather_1_data" [id=37, type=Constant]; +"0 Input" -> "1 Convert_1" [label="[1, 3]", style=solid]; +"0 Input" -> "2 MatMul_1/fq_input_0" [label="[1, 3]", style=solid]; +"1 Convert_1" -> "3 Gather_1" [label="[1, 3]", style=dashed]; +"2 MatMul_1/fq_input_0" -> "4 MatMul_1" [label="[1, 3]", style=solid]; +"3 Gather_1" -> "5 Concat_12" [label="[1, 3, 5]", style=solid]; +"4 MatMul_1" -> "6 MatMul_1/fq_output_0" [label="[3, 1, 5]", style=solid]; +"5 Concat_12" -> "7 MatMul_2" [label="[1, 6, 5]", style=solid]; +"6 MatMul_1/fq_output_0" -> "8 Reshape_1" [label="[3, 1, 5]", style=solid]; +"7 MatMul_2" -> "9 Result" [label="[1, 6, 1]", style=solid]; +"8 Reshape_1" -> "5 Concat_12" [label="[1, 3, 5]", style=solid]; +"10 MatMul_2/fq_weights_1" -> "7 MatMul_2" [label="[1, 5]", style=solid]; +"11 Constant_2323" -> "10 MatMul_2/fq_weights_1" [label="[1, 1]", style=solid]; +"12 Constant_2322" -> "10 MatMul_2/fq_weights_1" [label="[1, 1]", style=solid]; +"13 Constant_2321" -> "10 MatMul_2/fq_weights_1" [label="[1, 1]", style=solid]; +"14 Constant_2320" -> "10 MatMul_2/fq_weights_1" [label="[1, 1]", style=solid]; +"15 matmul_2_data" -> "10 MatMul_2/fq_weights_1" [label="[1, 5]", style=solid]; +"16 Constant_8" -> "8 Reshape_1" [label="[3]", style=dashed]; +"17 Constant_2303" -> "6 MatMul_1/fq_output_0" [label="[]", style=solid]; +"18 Constant_2302" -> "6 MatMul_1/fq_output_0" [label="[]", style=solid]; +"19 Constant_2301" -> "6 MatMul_1/fq_output_0" [label="[]", style=solid]; +"20 Constant_2300" -> "6 MatMul_1/fq_output_0" [label="[]", style=solid]; +"21 MatMul_1/fq_weights_1" -> "4 MatMul_1" [label="[3, 3, 5]", style=solid]; +"22 Constant_2318" -> "21 MatMul_1/fq_weights_1" [label="[3, 1, 5]", style=solid]; +"23 Constant_2317" -> "21 MatMul_1/fq_weights_1" [label="[3, 1, 5]", style=solid]; +"24 Constant_2316" -> "21 MatMul_1/fq_weights_1" [label="[3, 1, 5]", style=solid]; +"25 Constant_2315" -> "21 MatMul_1/fq_weights_1" [label="[3, 1, 5]", style=solid]; +"26 matmul_1_data" -> "21 MatMul_1/fq_weights_1" [label="[3, 3, 5]", style=solid]; +"27 Constant_2313" -> "2 MatMul_1/fq_input_0" [label="[]", style=solid]; +"28 Constant_2312" -> "2 MatMul_1/fq_input_0" [label="[]", style=solid]; +"29 Constant_2311" -> "2 MatMul_1/fq_input_0" [label="[]", style=solid]; +"30 Constant_2310" -> "2 MatMul_1/fq_input_0" [label="[]", style=solid]; +"31 Constant_4" -> "3 Gather_1" [label="[]", style=dashed]; +"32 Gather_1/fq_weights_0" -> "3 Gather_1" [label="[4, 5]", style=solid]; +"33 Constant_2308" -> "32 Gather_1/fq_weights_0" [label="[]", style=solid]; +"34 Constant_2307" -> "32 Gather_1/fq_weights_0" [label="[]", style=solid]; +"35 Constant_2306" -> "32 Gather_1/fq_weights_0" [label="[]", style=solid]; +"36 Constant_2305" -> "32 Gather_1/fq_weights_0" [label="[]", style=solid]; +"37 gather_1_data" -> "32 Gather_1/fq_weights_0" [label="[4, 5]", style=solid]; +} diff --git a/tests/openvino/native/data/reference_graphs/quantized/WeightsModel.dot b/tests/openvino/native/data/reference_graphs/quantized/WeightsModel.dot index 0abe95025aa..c61a2ce4397 100644 --- a/tests/openvino/native/data/reference_graphs/quantized/WeightsModel.dot +++ b/tests/openvino/native/data/reference_graphs/quantized/WeightsModel.dot @@ -9,57 +9,55 @@ strict digraph { "7 MatMul_1/fq_output_0" [id=7, type=FakeQuantize]; "8 MatMul" [id=8, type=MatMul]; "9 MatMul_0" [id=9, type=MatMul]; -"10 Add_19" [id=10, type=Add]; +"10 Add_15" [id=10, type=Add]; "11 MatMul_0/fq_output_0" [id=11, type=FakeQuantize]; "12 Result" [id=12, type=Result]; -"13 Constant_2267" [id=13, type=Constant]; -"14 Constant_2266" [id=14, type=Constant]; -"15 Constant_2265" [id=15, type=Constant]; -"16 Constant_2264" [id=16, type=Constant]; +"13 Constant_2959" [id=13, type=Constant]; +"14 Constant_2958" [id=14, type=Constant]; +"15 Constant_2957" [id=15, type=Constant]; +"16 Constant_2956" [id=16, type=Constant]; "17 MatMul_1/fq_weights_1" [id=17, type=FakeQuantize]; -"18 Constant_2287" [id=18, type=Constant]; -"19 Constant_2286" [id=19, type=Constant]; -"20 Constant_2285" [id=20, type=Constant]; -"21 Constant_2284" [id=21, type=Constant]; -"22 Constant_11" [id=22, type=Constant]; -"23 Constant_2252" [id=23, type=Constant]; -"24 Constant_2251" [id=24, type=Constant]; -"25 Constant_2250" [id=25, type=Constant]; -"26 Constant_2249" [id=26, type=Constant]; -"27 Constant_4" [id=27, type=Constant]; +"18 Constant_2964" [id=18, type=Constant]; +"19 Constant_2963" [id=19, type=Constant]; +"20 Constant_2962" [id=20, type=Constant]; +"21 Constant_2961" [id=21, type=Constant]; +"22 weights_1" [id=22, type=Constant]; +"23 Constant_2949" [id=23, type=Constant]; +"24 Constant_2948" [id=24, type=Constant]; +"25 Constant_2947" [id=25, type=Constant]; +"26 Constant_2946" [id=26, type=Constant]; +"27 Constant_5" [id=27, type=Constant]; "28 Conv_backprop/fq_weights_1" [id=28, type=FakeQuantize]; -"29 Constant_2277" [id=29, type=Constant]; -"30 Constant_2276" [id=30, type=Constant]; -"31 Constant_2275" [id=31, type=Constant]; -"32 Constant_2274" [id=32, type=Constant]; -"33 Constant_3" [id=33, type=Constant]; -"34 Constant_2247" [id=34, type=Constant]; -"35 Constant_2246" [id=35, type=Constant]; -"36 Constant_2245" [id=36, type=Constant]; -"37 Constant_2244" [id=37, type=Constant]; +"29 Constant_2954" [id=29, type=Constant]; +"30 Constant_2953" [id=30, type=Constant]; +"31 Constant_2952" [id=31, type=Constant]; +"32 Constant_2951" [id=32, type=Constant]; +"33 Constant_4" [id=33, type=Constant]; +"34 Constant_2939" [id=34, type=Constant]; +"35 Constant_2938" [id=35, type=Constant]; +"36 Constant_2937" [id=36, type=Constant]; +"37 Constant_2936" [id=37, type=Constant]; "38 Conv/fq_weights_1" [id=38, type=FakeQuantize]; -"39 Constant_2272" [id=39, type=Constant]; -"40 Constant_2271" [id=40, type=Constant]; -"41 Constant_2270" [id=41, type=Constant]; -"42 Constant_2269" [id=42, type=Constant]; -"43 Constant_1" [id=43, type=Constant]; -"44 Constant_2257" [id=44, type=Constant]; -"45 Constant_2256" [id=45, type=Constant]; -"46 Constant_2255" [id=46, type=Constant]; -"47 Constant_2254" [id=47, type=Constant]; -"48 Constant_2262" [id=48, type=Constant]; -"49 Constant_2261" [id=49, type=Constant]; -"50 Constant_2260" [id=50, type=Constant]; -"51 Constant_2259" [id=51, type=Constant]; +"39 Constant_2944" [id=39, type=Constant]; +"40 Constant_2943" [id=40, type=Constant]; +"41 Constant_2942" [id=41, type=Constant]; +"42 Constant_2941" [id=42, type=Constant]; +"43 Constant_2" [id=43, type=Constant]; +"44 Constant_2934" [id=44, type=Constant]; +"45 Constant_2933" [id=45, type=Constant]; +"46 Constant_2932" [id=46, type=Constant]; +"47 Constant_2931" [id=47, type=Constant]; +"48 Constant_2969" [id=48, type=Constant]; +"49 Constant_2968" [id=49, type=Constant]; +"50 Constant_2967" [id=50, type=Constant]; +"51 Constant_2966" [id=51, type=Constant]; "52 MatMul_0/fq_weights_0" [id=52, type=FakeQuantize]; -"53 Constant_2282" [id=53, type=Constant]; -"54 Constant_2281" [id=54, type=Constant]; -"55 Constant_2280" [id=55, type=Constant]; -"56 Constant_2279" [id=56, type=Constant]; -"57 Constant_13" [id=57, type=Constant]; +"53 Constant_2974" [id=53, type=Constant]; +"54 Constant_2973" [id=54, type=Constant]; +"55 Constant_2972" [id=55, type=Constant]; +"56 Constant_2971" [id=56, type=Constant]; +"57 weights_0" [id=57, type=Constant]; "58 MatMul_const" [id=58, type=MatMul]; -"59 Constant_17" [id=59, type=Constant]; -"60 Constant_16" [id=60, type=Constant]; "0 Input_1" -> "1 Input_1/fq_output_0" [label="[1, 3, 5, 5]", style=solid]; "1 Input_1/fq_output_0" -> "2 Conv" [label="[1, 3, 5, 5]", style=solid]; "2 Conv" -> "3 Conv/fq_output_0" [label="[1, 3, 5, 5]", style=solid]; @@ -69,56 +67,56 @@ strict digraph { "6 MatMul_1" -> "7 MatMul_1/fq_output_0" [label="[1, 3, 1, 4]", style=solid]; "7 MatMul_1/fq_output_0" -> "8 MatMul" [label="[1, 3, 1, 4]", style=solid]; "7 MatMul_1/fq_output_0" -> "9 MatMul_0" [label="[1, 3, 1, 4]", style=solid]; -"8 MatMul" -> "10 Add_19" [label="[1, 3, 1, 1]", style=solid]; +"8 MatMul" -> "10 Add_15" [label="[1, 3, 1, 1]", style=solid]; "9 MatMul_0" -> "11 MatMul_0/fq_output_0" [label="[1, 3, 1, 4]", style=solid]; -"10 Add_19" -> "12 Result" [label="[1, 3, 4, 1]", style=solid]; +"10 Add_15" -> "12 Result" [label="[1, 3, 4, 1]", style=solid]; "11 MatMul_0/fq_output_0" -> "8 MatMul" [label="[1, 3, 1, 4]", style=solid]; -"13 Constant_2267" -> "7 MatMul_1/fq_output_0" [label="[]", style=solid]; -"14 Constant_2266" -> "7 MatMul_1/fq_output_0" [label="[]", style=solid]; -"15 Constant_2265" -> "7 MatMul_1/fq_output_0" [label="[]", style=solid]; -"16 Constant_2264" -> "7 MatMul_1/fq_output_0" [label="[]", style=solid]; -"17 MatMul_1/fq_weights_1" -> "6 MatMul_1" [label="[1, 3, 1, 4]", style=solid]; -"18 Constant_2287" -> "17 MatMul_1/fq_weights_1" [label="[1, 1, 1, 1]", style=solid]; -"19 Constant_2286" -> "17 MatMul_1/fq_weights_1" [label="[1, 1, 1, 1]", style=solid]; -"20 Constant_2285" -> "17 MatMul_1/fq_weights_1" [label="[1, 1, 1, 1]", style=solid]; -"21 Constant_2284" -> "17 MatMul_1/fq_weights_1" [label="[1, 1, 1, 1]", style=solid]; -"22 Constant_11" -> "17 MatMul_1/fq_weights_1" [label="[1, 3, 1, 4]", style=solid]; -"23 Constant_2252" -> "5 Conv_backprop/fq_output_0" [label="[]", style=solid]; -"24 Constant_2251" -> "5 Conv_backprop/fq_output_0" [label="[]", style=solid]; -"25 Constant_2250" -> "5 Conv_backprop/fq_output_0" [label="[]", style=solid]; -"26 Constant_2249" -> "5 Conv_backprop/fq_output_0" [label="[]", style=solid]; -"27 Constant_4" -> "4 Conv_backprop" [label="[2]", style=dashed]; +"13 Constant_2959" -> "7 MatMul_1/fq_output_0" [label="[]", style=solid]; +"14 Constant_2958" -> "7 MatMul_1/fq_output_0" [label="[]", style=solid]; +"15 Constant_2957" -> "7 MatMul_1/fq_output_0" [label="[]", style=solid]; +"16 Constant_2956" -> "7 MatMul_1/fq_output_0" [label="[]", style=solid]; +"17 MatMul_1/fq_weights_1" -> "6 MatMul_1" [label="[1, 4]", style=solid]; +"18 Constant_2964" -> "17 MatMul_1/fq_weights_1" [label="[1, 4]", style=solid]; +"19 Constant_2963" -> "17 MatMul_1/fq_weights_1" [label="[1, 4]", style=solid]; +"20 Constant_2962" -> "17 MatMul_1/fq_weights_1" [label="[1, 4]", style=solid]; +"21 Constant_2961" -> "17 MatMul_1/fq_weights_1" [label="[1, 4]", style=solid]; +"22 weights_1" -> "17 MatMul_1/fq_weights_1" [label="[1, 4]", style=solid]; +"22 weights_1" -> "58 MatMul_const" [label="[1, 4]", style=solid]; +"23 Constant_2949" -> "5 Conv_backprop/fq_output_0" [label="[]", style=solid]; +"24 Constant_2948" -> "5 Conv_backprop/fq_output_0" [label="[]", style=solid]; +"25 Constant_2947" -> "5 Conv_backprop/fq_output_0" [label="[]", style=solid]; +"26 Constant_2946" -> "5 Conv_backprop/fq_output_0" [label="[]", style=solid]; +"27 Constant_5" -> "4 Conv_backprop" [label="[2]", style=dashed]; "28 Conv_backprop/fq_weights_1" -> "4 Conv_backprop" [label="[3, 3, 1, 1]", style=solid]; -"29 Constant_2277" -> "28 Conv_backprop/fq_weights_1" [label="[1, 3, 1, 1]", style=solid]; -"30 Constant_2276" -> "28 Conv_backprop/fq_weights_1" [label="[1, 3, 1, 1]", style=solid]; -"31 Constant_2275" -> "28 Conv_backprop/fq_weights_1" [label="[1, 3, 1, 1]", style=solid]; -"32 Constant_2274" -> "28 Conv_backprop/fq_weights_1" [label="[1, 3, 1, 1]", style=solid]; -"33 Constant_3" -> "28 Conv_backprop/fq_weights_1" [label="[3, 3, 1, 1]", style=solid]; -"34 Constant_2247" -> "3 Conv/fq_output_0" [label="[]", style=solid]; -"35 Constant_2246" -> "3 Conv/fq_output_0" [label="[]", style=solid]; -"36 Constant_2245" -> "3 Conv/fq_output_0" [label="[]", style=solid]; -"37 Constant_2244" -> "3 Conv/fq_output_0" [label="[]", style=solid]; +"29 Constant_2954" -> "28 Conv_backprop/fq_weights_1" [label="[1, 3, 1, 1]", style=solid]; +"30 Constant_2953" -> "28 Conv_backprop/fq_weights_1" [label="[1, 3, 1, 1]", style=solid]; +"31 Constant_2952" -> "28 Conv_backprop/fq_weights_1" [label="[1, 3, 1, 1]", style=solid]; +"32 Constant_2951" -> "28 Conv_backprop/fq_weights_1" [label="[1, 3, 1, 1]", style=solid]; +"33 Constant_4" -> "28 Conv_backprop/fq_weights_1" [label="[3, 3, 1, 1]", style=solid]; +"34 Constant_2939" -> "3 Conv/fq_output_0" [label="[]", style=solid]; +"35 Constant_2938" -> "3 Conv/fq_output_0" [label="[]", style=solid]; +"36 Constant_2937" -> "3 Conv/fq_output_0" [label="[]", style=solid]; +"37 Constant_2936" -> "3 Conv/fq_output_0" [label="[]", style=solid]; "38 Conv/fq_weights_1" -> "2 Conv" [label="[3, 3, 1, 1]", style=solid]; -"39 Constant_2272" -> "38 Conv/fq_weights_1" [label="[3, 1, 1, 1]", style=solid]; -"40 Constant_2271" -> "38 Conv/fq_weights_1" [label="[3, 1, 1, 1]", style=solid]; -"41 Constant_2270" -> "38 Conv/fq_weights_1" [label="[3, 1, 1, 1]", style=solid]; -"42 Constant_2269" -> "38 Conv/fq_weights_1" [label="[3, 1, 1, 1]", style=solid]; -"43 Constant_1" -> "38 Conv/fq_weights_1" [label="[3, 3, 1, 1]", style=solid]; -"44 Constant_2257" -> "1 Input_1/fq_output_0" [label="[]", style=solid]; -"45 Constant_2256" -> "1 Input_1/fq_output_0" [label="[]", style=solid]; -"46 Constant_2255" -> "1 Input_1/fq_output_0" [label="[]", style=solid]; -"47 Constant_2254" -> "1 Input_1/fq_output_0" [label="[]", style=solid]; -"48 Constant_2262" -> "11 MatMul_0/fq_output_0" [label="[]", style=solid]; -"49 Constant_2261" -> "11 MatMul_0/fq_output_0" [label="[]", style=solid]; -"50 Constant_2260" -> "11 MatMul_0/fq_output_0" [label="[]", style=solid]; -"51 Constant_2259" -> "11 MatMul_0/fq_output_0" [label="[]", style=solid]; -"52 MatMul_0/fq_weights_0" -> "9 MatMul_0" [label="[1, 3, 1, 1]", style=solid]; -"53 Constant_2282" -> "52 MatMul_0/fq_weights_0" [label="[1, 1, 1, 1]", style=solid]; -"54 Constant_2281" -> "52 MatMul_0/fq_weights_0" [label="[1, 1, 1, 1]", style=solid]; -"55 Constant_2280" -> "52 MatMul_0/fq_weights_0" [label="[1, 1, 1, 1]", style=solid]; -"56 Constant_2279" -> "52 MatMul_0/fq_weights_0" [label="[1, 1, 1, 1]", style=solid]; -"57 Constant_13" -> "52 MatMul_0/fq_weights_0" [label="[1, 3, 1, 1]", style=solid]; -"58 MatMul_const" -> "10 Add_19" [label="[1, 3, 4, 1]", style=solid]; -"59 Constant_17" -> "58 MatMul_const" [label="[1, 3, 1, 1]", style=solid]; -"60 Constant_16" -> "58 MatMul_const" [label="[1, 3, 1, 4]", style=solid]; +"39 Constant_2944" -> "38 Conv/fq_weights_1" [label="[3, 1, 1, 1]", style=solid]; +"40 Constant_2943" -> "38 Conv/fq_weights_1" [label="[3, 1, 1, 1]", style=solid]; +"41 Constant_2942" -> "38 Conv/fq_weights_1" [label="[3, 1, 1, 1]", style=solid]; +"42 Constant_2941" -> "38 Conv/fq_weights_1" [label="[3, 1, 1, 1]", style=solid]; +"43 Constant_2" -> "38 Conv/fq_weights_1" [label="[3, 3, 1, 1]", style=solid]; +"44 Constant_2934" -> "1 Input_1/fq_output_0" [label="[]", style=solid]; +"45 Constant_2933" -> "1 Input_1/fq_output_0" [label="[]", style=solid]; +"46 Constant_2932" -> "1 Input_1/fq_output_0" [label="[]", style=solid]; +"47 Constant_2931" -> "1 Input_1/fq_output_0" [label="[]", style=solid]; +"48 Constant_2969" -> "11 MatMul_0/fq_output_0" [label="[]", style=solid]; +"49 Constant_2968" -> "11 MatMul_0/fq_output_0" [label="[]", style=solid]; +"50 Constant_2967" -> "11 MatMul_0/fq_output_0" [label="[]", style=solid]; +"51 Constant_2966" -> "11 MatMul_0/fq_output_0" [label="[]", style=solid]; +"52 MatMul_0/fq_weights_0" -> "9 MatMul_0" [label="[1, 1]", style=solid]; +"53 Constant_2974" -> "52 MatMul_0/fq_weights_0" [label="[1, 1]", style=solid]; +"54 Constant_2973" -> "52 MatMul_0/fq_weights_0" [label="[1, 1]", style=solid]; +"55 Constant_2972" -> "52 MatMul_0/fq_weights_0" [label="[1, 1]", style=solid]; +"56 Constant_2971" -> "52 MatMul_0/fq_weights_0" [label="[1, 1]", style=solid]; +"57 weights_0" -> "52 MatMul_0/fq_weights_0" [label="[1, 1]", style=solid]; +"57 weights_0" -> "58 MatMul_const" [label="[1, 1]", style=solid]; +"58 MatMul_const" -> "10 Add_15" [label="[4, 1]", style=solid]; } diff --git a/tests/openvino/native/data/reference_graphs/quantized/mobilenet-v2-pytorch.dot b/tests/openvino/native/data/reference_graphs/quantized/mobilenet-v2-pytorch_performance.dot similarity index 55% rename from tests/openvino/native/data/reference_graphs/quantized/mobilenet-v2-pytorch.dot rename to tests/openvino/native/data/reference_graphs/quantized/mobilenet-v2-pytorch_performance.dot index cc7aaf0098a..bff8b3058aa 100644 --- a/tests/openvino/native/data/reference_graphs/quantized/mobilenet-v2-pytorch.dot +++ b/tests/openvino/native/data/reference_graphs/quantized/mobilenet-v2-pytorch_performance.dot @@ -222,633 +222,633 @@ strict digraph { "220 prob/sink_port_0" [id=220, type=Result]; "221 Constant_5169" [id=221, type=Constant]; "222 /classifier/classifier.1/Gemm/WithoutBiases/fq_weights_1" [id=222, type=FakeQuantize]; -"223 Constant_114259" [id=223, type=Constant]; -"224 Constant_114258" [id=224, type=Constant]; -"225 Constant_114257" [id=225, type=Constant]; -"226 Constant_114256" [id=226, type=Constant]; +"223 Constant_41658" [id=223, type=Constant]; +"224 Constant_41657" [id=224, type=Constant]; +"225 Constant_41656" [id=225, type=Constant]; +"226 Constant_41655" [id=226, type=Constant]; "227 classifier.1.weight" [id=227, type=Constant]; "228 Constant_1864" [id=228, type=Constant]; -"229 Constant_114254" [id=229, type=Constant]; -"230 Constant_114253" [id=230, type=Constant]; -"231 Constant_114252" [id=231, type=Constant]; -"232 Constant_114251" [id=232, type=Constant]; +"229 Constant_41653" [id=229, type=Constant]; +"230 Constant_41652" [id=230, type=Constant]; +"231 Constant_41651" [id=231, type=Constant]; +"232 Constant_41650" [id=232, type=Constant]; "233 Range_1860" [id=233, type=Constant]; -"234 Constant_114249" [id=234, type=Constant]; -"235 Constant_114248" [id=235, type=Constant]; -"236 Constant_114247" [id=236, type=Constant]; -"237 Constant_114246" [id=237, type=Constant]; +"234 Constant_41648" [id=234, type=Constant]; +"235 Constant_41647" [id=235, type=Constant]; +"236 Constant_41646" [id=236, type=Constant]; +"237 Constant_41645" [id=237, type=Constant]; "238 Reshape_1846" [id=238, type=Constant]; "239 /features/features.18/features.18.0/Conv/WithoutBiases/fq_weights_1" [id=239, type=FakeQuantize]; -"240 Constant_114244" [id=240, type=Constant]; -"241 Constant_114243" [id=241, type=Constant]; -"242 Constant_114242" [id=242, type=Constant]; -"243 Constant_114241" [id=243, type=Constant]; +"240 Constant_41643" [id=240, type=Constant]; +"241 Constant_41642" [id=241, type=Constant]; +"242 Constant_41641" [id=242, type=Constant]; +"243 Constant_41640" [id=243, type=Constant]; "244 onnx^^Conv_691" [id=244, label="244 onnx::Conv_691", type=Constant]; -"245 Constant_114239" [id=245, type=Constant]; -"246 Constant_114238" [id=246, type=Constant]; -"247 Constant_114237" [id=247, type=Constant]; -"248 Constant_114236" [id=248, type=Constant]; +"245 Constant_41638" [id=245, type=Constant]; +"246 Constant_41637" [id=246, type=Constant]; +"247 Constant_41636" [id=247, type=Constant]; +"248 Constant_41635" [id=248, type=Constant]; "249 Reshape_1831" [id=249, type=Constant]; "250 /features/features.17/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [id=250, type=FakeQuantize]; -"251 Constant_114234" [id=251, type=Constant]; -"252 Constant_114233" [id=252, type=Constant]; -"253 Constant_114232" [id=253, type=Constant]; -"254 Constant_114231" [id=254, type=Constant]; +"251 Constant_41633" [id=251, type=Constant]; +"252 Constant_41632" [id=252, type=Constant]; +"253 Constant_41631" [id=253, type=Constant]; +"254 Constant_41630" [id=254, type=Constant]; "255 onnx^^Conv_688" [id=255, label="255 onnx::Conv_688", type=Constant]; -"256 Constant_114229" [id=256, type=Constant]; -"257 Constant_114228" [id=257, type=Constant]; -"258 Constant_114227" [id=258, type=Constant]; -"259 Constant_114226" [id=259, type=Constant]; +"256 Constant_41628" [id=256, type=Constant]; +"257 Constant_41627" [id=257, type=Constant]; +"258 Constant_41626" [id=258, type=Constant]; +"259 Constant_41625" [id=259, type=Constant]; "260 Reshape_1812" [id=260, type=Constant]; "261 /features/features.17/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [id=261, type=FakeQuantize]; -"262 Constant_114224" [id=262, type=Constant]; -"263 Constant_114223" [id=263, type=Constant]; -"264 Constant_114222" [id=264, type=Constant]; -"265 Constant_114221" [id=265, type=Constant]; +"262 Constant_41623" [id=262, type=Constant]; +"263 Constant_41622" [id=263, type=Constant]; +"264 Constant_41621" [id=264, type=Constant]; +"265 Constant_41620" [id=265, type=Constant]; "266 Reshape_1760" [id=266, type=Constant]; -"267 Constant_114219" [id=267, type=Constant]; -"268 Constant_114218" [id=268, type=Constant]; -"269 Constant_114217" [id=269, type=Constant]; -"270 Constant_114216" [id=270, type=Constant]; +"267 Constant_41618" [id=267, type=Constant]; +"268 Constant_41617" [id=268, type=Constant]; +"269 Constant_41616" [id=269, type=Constant]; +"270 Constant_41615" [id=270, type=Constant]; "271 Reshape_1745" [id=271, type=Constant]; "272 /features/features.17/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [id=272, type=FakeQuantize]; -"273 Constant_114214" [id=273, type=Constant]; -"274 Constant_114213" [id=274, type=Constant]; -"275 Constant_114212" [id=275, type=Constant]; -"276 Constant_114211" [id=276, type=Constant]; +"273 Constant_41613" [id=273, type=Constant]; +"274 Constant_41612" [id=274, type=Constant]; +"275 Constant_41611" [id=275, type=Constant]; +"276 Constant_41610" [id=276, type=Constant]; "277 onnx^^Conv_682" [id=277, label="277 onnx::Conv_682", type=Constant]; -"278 Constant_114209" [id=278, type=Constant]; -"279 Constant_114208" [id=279, type=Constant]; -"280 Constant_114207" [id=280, type=Constant]; -"281 Constant_114206" [id=281, type=Constant]; -"282 Constant_114204" [id=282, type=Constant]; -"283 Constant_114203" [id=283, type=Constant]; -"284 Constant_114202" [id=284, type=Constant]; -"285 Constant_114201" [id=285, type=Constant]; +"278 Constant_41608" [id=278, type=Constant]; +"279 Constant_41607" [id=279, type=Constant]; +"280 Constant_41606" [id=280, type=Constant]; +"281 Constant_41605" [id=281, type=Constant]; +"282 Constant_41603" [id=282, type=Constant]; +"283 Constant_41602" [id=283, type=Constant]; +"284 Constant_41601" [id=284, type=Constant]; +"285 Constant_41600" [id=285, type=Constant]; "286 Reshape_1729" [id=286, type=Constant]; "287 /features/features.16/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [id=287, type=FakeQuantize]; -"288 Constant_114199" [id=288, type=Constant]; -"289 Constant_114198" [id=289, type=Constant]; -"290 Constant_114197" [id=290, type=Constant]; -"291 Constant_114196" [id=291, type=Constant]; +"288 Constant_41598" [id=288, type=Constant]; +"289 Constant_41597" [id=289, type=Constant]; +"290 Constant_41596" [id=290, type=Constant]; +"291 Constant_41595" [id=291, type=Constant]; "292 onnx^^Conv_679" [id=292, label="292 onnx::Conv_679", type=Constant]; -"293 Constant_114194" [id=293, type=Constant]; -"294 Constant_114193" [id=294, type=Constant]; -"295 Constant_114192" [id=295, type=Constant]; -"296 Constant_114191" [id=296, type=Constant]; +"293 Constant_41593" [id=293, type=Constant]; +"294 Constant_41592" [id=294, type=Constant]; +"295 Constant_41591" [id=295, type=Constant]; +"296 Constant_41590" [id=296, type=Constant]; "297 Reshape_1710" [id=297, type=Constant]; "298 /features/features.16/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [id=298, type=FakeQuantize]; -"299 Constant_114189" [id=299, type=Constant]; -"300 Constant_114188" [id=300, type=Constant]; -"301 Constant_114187" [id=301, type=Constant]; -"302 Constant_114186" [id=302, type=Constant]; +"299 Constant_41588" [id=299, type=Constant]; +"300 Constant_41587" [id=300, type=Constant]; +"301 Constant_41586" [id=301, type=Constant]; +"302 Constant_41585" [id=302, type=Constant]; "303 Reshape_1658" [id=303, type=Constant]; -"304 Constant_114184" [id=304, type=Constant]; -"305 Constant_114183" [id=305, type=Constant]; -"306 Constant_114182" [id=306, type=Constant]; -"307 Constant_114181" [id=307, type=Constant]; +"304 Constant_41583" [id=304, type=Constant]; +"305 Constant_41582" [id=305, type=Constant]; +"306 Constant_41581" [id=306, type=Constant]; +"307 Constant_41580" [id=307, type=Constant]; "308 Reshape_1643" [id=308, type=Constant]; "309 /features/features.16/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [id=309, type=FakeQuantize]; -"310 Constant_114179" [id=310, type=Constant]; -"311 Constant_114178" [id=311, type=Constant]; -"312 Constant_114177" [id=312, type=Constant]; -"313 Constant_114176" [id=313, type=Constant]; +"310 Constant_41578" [id=310, type=Constant]; +"311 Constant_41577" [id=311, type=Constant]; +"312 Constant_41576" [id=312, type=Constant]; +"313 Constant_41575" [id=313, type=Constant]; "314 onnx^^Conv_673" [id=314, label="314 onnx::Conv_673", type=Constant]; -"315 Constant_114174" [id=315, type=Constant]; -"316 Constant_114173" [id=316, type=Constant]; -"317 Constant_114172" [id=317, type=Constant]; -"318 Constant_114171" [id=318, type=Constant]; -"319 Constant_114169" [id=319, type=Constant]; -"320 Constant_114168" [id=320, type=Constant]; -"321 Constant_114167" [id=321, type=Constant]; -"322 Constant_114166" [id=322, type=Constant]; +"315 Constant_41573" [id=315, type=Constant]; +"316 Constant_41572" [id=316, type=Constant]; +"317 Constant_41571" [id=317, type=Constant]; +"318 Constant_41570" [id=318, type=Constant]; +"319 Constant_41568" [id=319, type=Constant]; +"320 Constant_41567" [id=320, type=Constant]; +"321 Constant_41566" [id=321, type=Constant]; +"322 Constant_41565" [id=322, type=Constant]; "323 Reshape_1627" [id=323, type=Constant]; "324 /features/features.15/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [id=324, type=FakeQuantize]; -"325 Constant_114164" [id=325, type=Constant]; -"326 Constant_114163" [id=326, type=Constant]; -"327 Constant_114162" [id=327, type=Constant]; -"328 Constant_114161" [id=328, type=Constant]; +"325 Constant_41563" [id=325, type=Constant]; +"326 Constant_41562" [id=326, type=Constant]; +"327 Constant_41561" [id=327, type=Constant]; +"328 Constant_41560" [id=328, type=Constant]; "329 onnx^^Conv_670" [id=329, label="329 onnx::Conv_670", type=Constant]; -"330 Constant_114159" [id=330, type=Constant]; -"331 Constant_114158" [id=331, type=Constant]; -"332 Constant_114157" [id=332, type=Constant]; -"333 Constant_114156" [id=333, type=Constant]; +"330 Constant_41558" [id=330, type=Constant]; +"331 Constant_41557" [id=331, type=Constant]; +"332 Constant_41556" [id=332, type=Constant]; +"333 Constant_41555" [id=333, type=Constant]; "334 Reshape_1608" [id=334, type=Constant]; "335 /features/features.15/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [id=335, type=FakeQuantize]; -"336 Constant_114154" [id=336, type=Constant]; -"337 Constant_114153" [id=337, type=Constant]; -"338 Constant_114152" [id=338, type=Constant]; -"339 Constant_114151" [id=339, type=Constant]; +"336 Constant_41553" [id=336, type=Constant]; +"337 Constant_41552" [id=337, type=Constant]; +"338 Constant_41551" [id=338, type=Constant]; +"339 Constant_41550" [id=339, type=Constant]; "340 Reshape_1556" [id=340, type=Constant]; -"341 Constant_114149" [id=341, type=Constant]; -"342 Constant_114148" [id=342, type=Constant]; -"343 Constant_114147" [id=343, type=Constant]; -"344 Constant_114146" [id=344, type=Constant]; +"341 Constant_41548" [id=341, type=Constant]; +"342 Constant_41547" [id=342, type=Constant]; +"343 Constant_41546" [id=343, type=Constant]; +"344 Constant_41545" [id=344, type=Constant]; "345 Reshape_1541" [id=345, type=Constant]; "346 /features/features.15/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [id=346, type=FakeQuantize]; -"347 Constant_114144" [id=347, type=Constant]; -"348 Constant_114143" [id=348, type=Constant]; -"349 Constant_114142" [id=349, type=Constant]; -"350 Constant_114141" [id=350, type=Constant]; +"347 Constant_41543" [id=347, type=Constant]; +"348 Constant_41542" [id=348, type=Constant]; +"349 Constant_41541" [id=349, type=Constant]; +"350 Constant_41540" [id=350, type=Constant]; "351 onnx^^Conv_664" [id=351, label="351 onnx::Conv_664", type=Constant]; -"352 Constant_114139" [id=352, type=Constant]; -"353 Constant_114138" [id=353, type=Constant]; -"354 Constant_114137" [id=354, type=Constant]; -"355 Constant_114136" [id=355, type=Constant]; +"352 Constant_41538" [id=352, type=Constant]; +"353 Constant_41537" [id=353, type=Constant]; +"354 Constant_41536" [id=354, type=Constant]; +"355 Constant_41535" [id=355, type=Constant]; "356 Reshape_1526" [id=356, type=Constant]; "357 /features/features.14/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [id=357, type=FakeQuantize]; -"358 Constant_114134" [id=358, type=Constant]; -"359 Constant_114133" [id=359, type=Constant]; -"360 Constant_114132" [id=360, type=Constant]; -"361 Constant_114131" [id=361, type=Constant]; +"358 Constant_41533" [id=358, type=Constant]; +"359 Constant_41532" [id=359, type=Constant]; +"360 Constant_41531" [id=360, type=Constant]; +"361 Constant_41530" [id=361, type=Constant]; "362 onnx^^Conv_661" [id=362, label="362 onnx::Conv_661", type=Constant]; -"363 Constant_114129" [id=363, type=Constant]; -"364 Constant_114128" [id=364, type=Constant]; -"365 Constant_114127" [id=365, type=Constant]; -"366 Constant_114126" [id=366, type=Constant]; +"363 Constant_41528" [id=363, type=Constant]; +"364 Constant_41527" [id=364, type=Constant]; +"365 Constant_41526" [id=365, type=Constant]; +"366 Constant_41525" [id=366, type=Constant]; "367 Reshape_1507" [id=367, type=Constant]; "368 /features/features.14/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [id=368, type=FakeQuantize]; -"369 Constant_114124" [id=369, type=Constant]; -"370 Constant_114123" [id=370, type=Constant]; -"371 Constant_114122" [id=371, type=Constant]; -"372 Constant_114121" [id=372, type=Constant]; +"369 Constant_41523" [id=369, type=Constant]; +"370 Constant_41522" [id=370, type=Constant]; +"371 Constant_41521" [id=371, type=Constant]; +"372 Constant_41520" [id=372, type=Constant]; "373 Reshape_1455" [id=373, type=Constant]; -"374 Constant_114119" [id=374, type=Constant]; -"375 Constant_114118" [id=375, type=Constant]; -"376 Constant_114117" [id=376, type=Constant]; -"377 Constant_114116" [id=377, type=Constant]; +"374 Constant_41518" [id=374, type=Constant]; +"375 Constant_41517" [id=375, type=Constant]; +"376 Constant_41516" [id=376, type=Constant]; +"377 Constant_41515" [id=377, type=Constant]; "378 Reshape_1440" [id=378, type=Constant]; "379 /features/features.14/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [id=379, type=FakeQuantize]; -"380 Constant_114114" [id=380, type=Constant]; -"381 Constant_114113" [id=381, type=Constant]; -"382 Constant_114112" [id=382, type=Constant]; -"383 Constant_114111" [id=383, type=Constant]; +"380 Constant_41513" [id=380, type=Constant]; +"381 Constant_41512" [id=381, type=Constant]; +"382 Constant_41511" [id=382, type=Constant]; +"383 Constant_41510" [id=383, type=Constant]; "384 onnx^^Conv_655" [id=384, label="384 onnx::Conv_655", type=Constant]; -"385 Constant_114109" [id=385, type=Constant]; -"386 Constant_114108" [id=386, type=Constant]; -"387 Constant_114107" [id=387, type=Constant]; -"388 Constant_114106" [id=388, type=Constant]; -"389 Constant_114104" [id=389, type=Constant]; -"390 Constant_114103" [id=390, type=Constant]; -"391 Constant_114102" [id=391, type=Constant]; -"392 Constant_114101" [id=392, type=Constant]; +"385 Constant_41508" [id=385, type=Constant]; +"386 Constant_41507" [id=386, type=Constant]; +"387 Constant_41506" [id=387, type=Constant]; +"388 Constant_41505" [id=388, type=Constant]; +"389 Constant_41503" [id=389, type=Constant]; +"390 Constant_41502" [id=390, type=Constant]; +"391 Constant_41501" [id=391, type=Constant]; +"392 Constant_41500" [id=392, type=Constant]; "393 Reshape_1424" [id=393, type=Constant]; "394 /features/features.13/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [id=394, type=FakeQuantize]; -"395 Constant_114099" [id=395, type=Constant]; -"396 Constant_114098" [id=396, type=Constant]; -"397 Constant_114097" [id=397, type=Constant]; -"398 Constant_114096" [id=398, type=Constant]; +"395 Constant_41498" [id=395, type=Constant]; +"396 Constant_41497" [id=396, type=Constant]; +"397 Constant_41496" [id=397, type=Constant]; +"398 Constant_41495" [id=398, type=Constant]; "399 onnx^^Conv_652" [id=399, label="399 onnx::Conv_652", type=Constant]; -"400 Constant_114094" [id=400, type=Constant]; -"401 Constant_114093" [id=401, type=Constant]; -"402 Constant_114092" [id=402, type=Constant]; -"403 Constant_114091" [id=403, type=Constant]; +"400 Constant_41493" [id=400, type=Constant]; +"401 Constant_41492" [id=401, type=Constant]; +"402 Constant_41491" [id=402, type=Constant]; +"403 Constant_41490" [id=403, type=Constant]; "404 Reshape_1405" [id=404, type=Constant]; "405 /features/features.13/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [id=405, type=FakeQuantize]; -"406 Constant_114089" [id=406, type=Constant]; -"407 Constant_114088" [id=407, type=Constant]; -"408 Constant_114087" [id=408, type=Constant]; -"409 Constant_114086" [id=409, type=Constant]; +"406 Constant_41488" [id=406, type=Constant]; +"407 Constant_41487" [id=407, type=Constant]; +"408 Constant_41486" [id=408, type=Constant]; +"409 Constant_41485" [id=409, type=Constant]; "410 Reshape_1353" [id=410, type=Constant]; -"411 Constant_114084" [id=411, type=Constant]; -"412 Constant_114083" [id=412, type=Constant]; -"413 Constant_114082" [id=413, type=Constant]; -"414 Constant_114081" [id=414, type=Constant]; +"411 Constant_41483" [id=411, type=Constant]; +"412 Constant_41482" [id=412, type=Constant]; +"413 Constant_41481" [id=413, type=Constant]; +"414 Constant_41480" [id=414, type=Constant]; "415 Reshape_1338" [id=415, type=Constant]; "416 /features/features.13/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [id=416, type=FakeQuantize]; -"417 Constant_114079" [id=417, type=Constant]; -"418 Constant_114078" [id=418, type=Constant]; -"419 Constant_114077" [id=419, type=Constant]; -"420 Constant_114076" [id=420, type=Constant]; +"417 Constant_41478" [id=417, type=Constant]; +"418 Constant_41477" [id=418, type=Constant]; +"419 Constant_41476" [id=419, type=Constant]; +"420 Constant_41475" [id=420, type=Constant]; "421 onnx^^Conv_646" [id=421, label="421 onnx::Conv_646", type=Constant]; -"422 Constant_114074" [id=422, type=Constant]; -"423 Constant_114073" [id=423, type=Constant]; -"424 Constant_114072" [id=424, type=Constant]; -"425 Constant_114071" [id=425, type=Constant]; -"426 Constant_114069" [id=426, type=Constant]; -"427 Constant_114068" [id=427, type=Constant]; -"428 Constant_114067" [id=428, type=Constant]; -"429 Constant_114066" [id=429, type=Constant]; +"422 Constant_41473" [id=422, type=Constant]; +"423 Constant_41472" [id=423, type=Constant]; +"424 Constant_41471" [id=424, type=Constant]; +"425 Constant_41470" [id=425, type=Constant]; +"426 Constant_41468" [id=426, type=Constant]; +"427 Constant_41467" [id=427, type=Constant]; +"428 Constant_41466" [id=428, type=Constant]; +"429 Constant_41465" [id=429, type=Constant]; "430 Reshape_1322" [id=430, type=Constant]; "431 /features/features.12/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [id=431, type=FakeQuantize]; -"432 Constant_114064" [id=432, type=Constant]; -"433 Constant_114063" [id=433, type=Constant]; -"434 Constant_114062" [id=434, type=Constant]; -"435 Constant_114061" [id=435, type=Constant]; +"432 Constant_41463" [id=432, type=Constant]; +"433 Constant_41462" [id=433, type=Constant]; +"434 Constant_41461" [id=434, type=Constant]; +"435 Constant_41460" [id=435, type=Constant]; "436 onnx^^Conv_643" [id=436, label="436 onnx::Conv_643", type=Constant]; -"437 Constant_114059" [id=437, type=Constant]; -"438 Constant_114058" [id=438, type=Constant]; -"439 Constant_114057" [id=439, type=Constant]; -"440 Constant_114056" [id=440, type=Constant]; +"437 Constant_41458" [id=437, type=Constant]; +"438 Constant_41457" [id=438, type=Constant]; +"439 Constant_41456" [id=439, type=Constant]; +"440 Constant_41455" [id=440, type=Constant]; "441 Reshape_1303" [id=441, type=Constant]; "442 /features/features.12/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [id=442, type=FakeQuantize]; -"443 Constant_114054" [id=443, type=Constant]; -"444 Constant_114053" [id=444, type=Constant]; -"445 Constant_114052" [id=445, type=Constant]; -"446 Constant_114051" [id=446, type=Constant]; +"443 Constant_41453" [id=443, type=Constant]; +"444 Constant_41452" [id=444, type=Constant]; +"445 Constant_41451" [id=445, type=Constant]; +"446 Constant_41450" [id=446, type=Constant]; "447 Reshape_1251" [id=447, type=Constant]; -"448 Constant_114049" [id=448, type=Constant]; -"449 Constant_114048" [id=449, type=Constant]; -"450 Constant_114047" [id=450, type=Constant]; -"451 Constant_114046" [id=451, type=Constant]; +"448 Constant_41448" [id=448, type=Constant]; +"449 Constant_41447" [id=449, type=Constant]; +"450 Constant_41446" [id=450, type=Constant]; +"451 Constant_41445" [id=451, type=Constant]; "452 Reshape_1236" [id=452, type=Constant]; "453 /features/features.12/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [id=453, type=FakeQuantize]; -"454 Constant_114044" [id=454, type=Constant]; -"455 Constant_114043" [id=455, type=Constant]; -"456 Constant_114042" [id=456, type=Constant]; -"457 Constant_114041" [id=457, type=Constant]; +"454 Constant_41443" [id=454, type=Constant]; +"455 Constant_41442" [id=455, type=Constant]; +"456 Constant_41441" [id=456, type=Constant]; +"457 Constant_41440" [id=457, type=Constant]; "458 onnx^^Conv_637" [id=458, label="458 onnx::Conv_637", type=Constant]; -"459 Constant_114039" [id=459, type=Constant]; -"460 Constant_114038" [id=460, type=Constant]; -"461 Constant_114037" [id=461, type=Constant]; -"462 Constant_114036" [id=462, type=Constant]; +"459 Constant_41438" [id=459, type=Constant]; +"460 Constant_41437" [id=460, type=Constant]; +"461 Constant_41436" [id=461, type=Constant]; +"462 Constant_41435" [id=462, type=Constant]; "463 Reshape_1221" [id=463, type=Constant]; "464 /features/features.11/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [id=464, type=FakeQuantize]; -"465 Constant_114034" [id=465, type=Constant]; -"466 Constant_114033" [id=466, type=Constant]; -"467 Constant_114032" [id=467, type=Constant]; -"468 Constant_114031" [id=468, type=Constant]; +"465 Constant_41433" [id=465, type=Constant]; +"466 Constant_41432" [id=466, type=Constant]; +"467 Constant_41431" [id=467, type=Constant]; +"468 Constant_41430" [id=468, type=Constant]; "469 onnx^^Conv_634" [id=469, label="469 onnx::Conv_634", type=Constant]; -"470 Constant_114029" [id=470, type=Constant]; -"471 Constant_114028" [id=471, type=Constant]; -"472 Constant_114027" [id=472, type=Constant]; -"473 Constant_114026" [id=473, type=Constant]; +"470 Constant_41428" [id=470, type=Constant]; +"471 Constant_41427" [id=471, type=Constant]; +"472 Constant_41426" [id=472, type=Constant]; +"473 Constant_41425" [id=473, type=Constant]; "474 Reshape_1202" [id=474, type=Constant]; "475 /features/features.11/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [id=475, type=FakeQuantize]; -"476 Constant_114024" [id=476, type=Constant]; -"477 Constant_114023" [id=477, type=Constant]; -"478 Constant_114022" [id=478, type=Constant]; -"479 Constant_114021" [id=479, type=Constant]; +"476 Constant_41423" [id=476, type=Constant]; +"477 Constant_41422" [id=477, type=Constant]; +"478 Constant_41421" [id=478, type=Constant]; +"479 Constant_41420" [id=479, type=Constant]; "480 Reshape_1150" [id=480, type=Constant]; -"481 Constant_114019" [id=481, type=Constant]; -"482 Constant_114018" [id=482, type=Constant]; -"483 Constant_114017" [id=483, type=Constant]; -"484 Constant_114016" [id=484, type=Constant]; +"481 Constant_41418" [id=481, type=Constant]; +"482 Constant_41417" [id=482, type=Constant]; +"483 Constant_41416" [id=483, type=Constant]; +"484 Constant_41415" [id=484, type=Constant]; "485 Reshape_1135" [id=485, type=Constant]; "486 /features/features.11/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [id=486, type=FakeQuantize]; -"487 Constant_114014" [id=487, type=Constant]; -"488 Constant_114013" [id=488, type=Constant]; -"489 Constant_114012" [id=489, type=Constant]; -"490 Constant_114011" [id=490, type=Constant]; +"487 Constant_41413" [id=487, type=Constant]; +"488 Constant_41412" [id=488, type=Constant]; +"489 Constant_41411" [id=489, type=Constant]; +"490 Constant_41410" [id=490, type=Constant]; "491 onnx^^Conv_628" [id=491, label="491 onnx::Conv_628", type=Constant]; -"492 Constant_114009" [id=492, type=Constant]; -"493 Constant_114008" [id=493, type=Constant]; -"494 Constant_114007" [id=494, type=Constant]; -"495 Constant_114006" [id=495, type=Constant]; -"496 Constant_114004" [id=496, type=Constant]; -"497 Constant_114003" [id=497, type=Constant]; -"498 Constant_114002" [id=498, type=Constant]; -"499 Constant_114001" [id=499, type=Constant]; +"492 Constant_41408" [id=492, type=Constant]; +"493 Constant_41407" [id=493, type=Constant]; +"494 Constant_41406" [id=494, type=Constant]; +"495 Constant_41405" [id=495, type=Constant]; +"496 Constant_41403" [id=496, type=Constant]; +"497 Constant_41402" [id=497, type=Constant]; +"498 Constant_41401" [id=498, type=Constant]; +"499 Constant_41400" [id=499, type=Constant]; "500 Reshape_1119" [id=500, type=Constant]; "501 /features/features.10/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [id=501, type=FakeQuantize]; -"502 Constant_113999" [id=502, type=Constant]; -"503 Constant_113998" [id=503, type=Constant]; -"504 Constant_113997" [id=504, type=Constant]; -"505 Constant_113996" [id=505, type=Constant]; +"502 Constant_41398" [id=502, type=Constant]; +"503 Constant_41397" [id=503, type=Constant]; +"504 Constant_41396" [id=504, type=Constant]; +"505 Constant_41395" [id=505, type=Constant]; "506 onnx^^Conv_625" [id=506, label="506 onnx::Conv_625", type=Constant]; -"507 Constant_113994" [id=507, type=Constant]; -"508 Constant_113993" [id=508, type=Constant]; -"509 Constant_113992" [id=509, type=Constant]; -"510 Constant_113991" [id=510, type=Constant]; +"507 Constant_41393" [id=507, type=Constant]; +"508 Constant_41392" [id=508, type=Constant]; +"509 Constant_41391" [id=509, type=Constant]; +"510 Constant_41390" [id=510, type=Constant]; "511 Reshape_1100" [id=511, type=Constant]; "512 /features/features.10/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [id=512, type=FakeQuantize]; -"513 Constant_113989" [id=513, type=Constant]; -"514 Constant_113988" [id=514, type=Constant]; -"515 Constant_113987" [id=515, type=Constant]; -"516 Constant_113986" [id=516, type=Constant]; +"513 Constant_41388" [id=513, type=Constant]; +"514 Constant_41387" [id=514, type=Constant]; +"515 Constant_41386" [id=515, type=Constant]; +"516 Constant_41385" [id=516, type=Constant]; "517 Reshape_1048" [id=517, type=Constant]; -"518 Constant_113984" [id=518, type=Constant]; -"519 Constant_113983" [id=519, type=Constant]; -"520 Constant_113982" [id=520, type=Constant]; -"521 Constant_113981" [id=521, type=Constant]; +"518 Constant_41383" [id=518, type=Constant]; +"519 Constant_41382" [id=519, type=Constant]; +"520 Constant_41381" [id=520, type=Constant]; +"521 Constant_41380" [id=521, type=Constant]; "522 Reshape_1033" [id=522, type=Constant]; "523 /features/features.10/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [id=523, type=FakeQuantize]; -"524 Constant_113979" [id=524, type=Constant]; -"525 Constant_113978" [id=525, type=Constant]; -"526 Constant_113977" [id=526, type=Constant]; -"527 Constant_113976" [id=527, type=Constant]; +"524 Constant_41378" [id=524, type=Constant]; +"525 Constant_41377" [id=525, type=Constant]; +"526 Constant_41376" [id=526, type=Constant]; +"527 Constant_41375" [id=527, type=Constant]; "528 onnx^^Conv_619" [id=528, label="528 onnx::Conv_619", type=Constant]; -"529 Constant_113974" [id=529, type=Constant]; -"530 Constant_113973" [id=530, type=Constant]; -"531 Constant_113972" [id=531, type=Constant]; -"532 Constant_113971" [id=532, type=Constant]; -"533 Constant_113969" [id=533, type=Constant]; -"534 Constant_113968" [id=534, type=Constant]; -"535 Constant_113967" [id=535, type=Constant]; -"536 Constant_113966" [id=536, type=Constant]; +"529 Constant_41373" [id=529, type=Constant]; +"530 Constant_41372" [id=530, type=Constant]; +"531 Constant_41371" [id=531, type=Constant]; +"532 Constant_41370" [id=532, type=Constant]; +"533 Constant_41368" [id=533, type=Constant]; +"534 Constant_41367" [id=534, type=Constant]; +"535 Constant_41366" [id=535, type=Constant]; +"536 Constant_41365" [id=536, type=Constant]; "537 Reshape_1017" [id=537, type=Constant]; "538 /features/features.9/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [id=538, type=FakeQuantize]; -"539 Constant_113964" [id=539, type=Constant]; -"540 Constant_113963" [id=540, type=Constant]; -"541 Constant_113962" [id=541, type=Constant]; -"542 Constant_113961" [id=542, type=Constant]; +"539 Constant_41363" [id=539, type=Constant]; +"540 Constant_41362" [id=540, type=Constant]; +"541 Constant_41361" [id=541, type=Constant]; +"542 Constant_41360" [id=542, type=Constant]; "543 onnx^^Conv_616" [id=543, label="543 onnx::Conv_616", type=Constant]; -"544 Constant_113959" [id=544, type=Constant]; -"545 Constant_113958" [id=545, type=Constant]; -"546 Constant_113957" [id=546, type=Constant]; -"547 Constant_113956" [id=547, type=Constant]; +"544 Constant_41358" [id=544, type=Constant]; +"545 Constant_41357" [id=545, type=Constant]; +"546 Constant_41356" [id=546, type=Constant]; +"547 Constant_41355" [id=547, type=Constant]; "548 Reshape_998" [id=548, type=Constant]; "549 /features/features.9/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [id=549, type=FakeQuantize]; -"550 Constant_113954" [id=550, type=Constant]; -"551 Constant_113953" [id=551, type=Constant]; -"552 Constant_113952" [id=552, type=Constant]; -"553 Constant_113951" [id=553, type=Constant]; +"550 Constant_41353" [id=550, type=Constant]; +"551 Constant_41352" [id=551, type=Constant]; +"552 Constant_41351" [id=552, type=Constant]; +"553 Constant_41350" [id=553, type=Constant]; "554 Reshape_946" [id=554, type=Constant]; -"555 Constant_113949" [id=555, type=Constant]; -"556 Constant_113948" [id=556, type=Constant]; -"557 Constant_113947" [id=557, type=Constant]; -"558 Constant_113946" [id=558, type=Constant]; +"555 Constant_41348" [id=555, type=Constant]; +"556 Constant_41347" [id=556, type=Constant]; +"557 Constant_41346" [id=557, type=Constant]; +"558 Constant_41345" [id=558, type=Constant]; "559 Reshape_931" [id=559, type=Constant]; "560 /features/features.9/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [id=560, type=FakeQuantize]; -"561 Constant_113944" [id=561, type=Constant]; -"562 Constant_113943" [id=562, type=Constant]; -"563 Constant_113942" [id=563, type=Constant]; -"564 Constant_113941" [id=564, type=Constant]; +"561 Constant_41343" [id=561, type=Constant]; +"562 Constant_41342" [id=562, type=Constant]; +"563 Constant_41341" [id=563, type=Constant]; +"564 Constant_41340" [id=564, type=Constant]; "565 onnx^^Conv_610" [id=565, label="565 onnx::Conv_610", type=Constant]; -"566 Constant_113939" [id=566, type=Constant]; -"567 Constant_113938" [id=567, type=Constant]; -"568 Constant_113937" [id=568, type=Constant]; -"569 Constant_113936" [id=569, type=Constant]; -"570 Constant_113934" [id=570, type=Constant]; -"571 Constant_113933" [id=571, type=Constant]; -"572 Constant_113932" [id=572, type=Constant]; -"573 Constant_113931" [id=573, type=Constant]; +"566 Constant_41338" [id=566, type=Constant]; +"567 Constant_41337" [id=567, type=Constant]; +"568 Constant_41336" [id=568, type=Constant]; +"569 Constant_41335" [id=569, type=Constant]; +"570 Constant_41333" [id=570, type=Constant]; +"571 Constant_41332" [id=571, type=Constant]; +"572 Constant_41331" [id=572, type=Constant]; +"573 Constant_41330" [id=573, type=Constant]; "574 Reshape_915" [id=574, type=Constant]; "575 /features/features.8/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [id=575, type=FakeQuantize]; -"576 Constant_113929" [id=576, type=Constant]; -"577 Constant_113928" [id=577, type=Constant]; -"578 Constant_113927" [id=578, type=Constant]; -"579 Constant_113926" [id=579, type=Constant]; +"576 Constant_41328" [id=576, type=Constant]; +"577 Constant_41327" [id=577, type=Constant]; +"578 Constant_41326" [id=578, type=Constant]; +"579 Constant_41325" [id=579, type=Constant]; "580 onnx^^Conv_607" [id=580, label="580 onnx::Conv_607", type=Constant]; -"581 Constant_113924" [id=581, type=Constant]; -"582 Constant_113923" [id=582, type=Constant]; -"583 Constant_113922" [id=583, type=Constant]; -"584 Constant_113921" [id=584, type=Constant]; +"581 Constant_41323" [id=581, type=Constant]; +"582 Constant_41322" [id=582, type=Constant]; +"583 Constant_41321" [id=583, type=Constant]; +"584 Constant_41320" [id=584, type=Constant]; "585 Reshape_896" [id=585, type=Constant]; "586 /features/features.8/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [id=586, type=FakeQuantize]; -"587 Constant_113919" [id=587, type=Constant]; -"588 Constant_113918" [id=588, type=Constant]; -"589 Constant_113917" [id=589, type=Constant]; -"590 Constant_113916" [id=590, type=Constant]; +"587 Constant_41318" [id=587, type=Constant]; +"588 Constant_41317" [id=588, type=Constant]; +"589 Constant_41316" [id=589, type=Constant]; +"590 Constant_41315" [id=590, type=Constant]; "591 Reshape_844" [id=591, type=Constant]; -"592 Constant_113914" [id=592, type=Constant]; -"593 Constant_113913" [id=593, type=Constant]; -"594 Constant_113912" [id=594, type=Constant]; -"595 Constant_113911" [id=595, type=Constant]; +"592 Constant_41313" [id=592, type=Constant]; +"593 Constant_41312" [id=593, type=Constant]; +"594 Constant_41311" [id=594, type=Constant]; +"595 Constant_41310" [id=595, type=Constant]; "596 Reshape_829" [id=596, type=Constant]; "597 /features/features.8/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [id=597, type=FakeQuantize]; -"598 Constant_113909" [id=598, type=Constant]; -"599 Constant_113908" [id=599, type=Constant]; -"600 Constant_113907" [id=600, type=Constant]; -"601 Constant_113906" [id=601, type=Constant]; +"598 Constant_41308" [id=598, type=Constant]; +"599 Constant_41307" [id=599, type=Constant]; +"600 Constant_41306" [id=600, type=Constant]; +"601 Constant_41305" [id=601, type=Constant]; "602 onnx^^Conv_601" [id=602, label="602 onnx::Conv_601", type=Constant]; -"603 Constant_113904" [id=603, type=Constant]; -"604 Constant_113903" [id=604, type=Constant]; -"605 Constant_113902" [id=605, type=Constant]; -"606 Constant_113901" [id=606, type=Constant]; +"603 Constant_41303" [id=603, type=Constant]; +"604 Constant_41302" [id=604, type=Constant]; +"605 Constant_41301" [id=605, type=Constant]; +"606 Constant_41300" [id=606, type=Constant]; "607 Reshape_814" [id=607, type=Constant]; "608 /features/features.7/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [id=608, type=FakeQuantize]; -"609 Constant_113899" [id=609, type=Constant]; -"610 Constant_113898" [id=610, type=Constant]; -"611 Constant_113897" [id=611, type=Constant]; -"612 Constant_113896" [id=612, type=Constant]; +"609 Constant_41298" [id=609, type=Constant]; +"610 Constant_41297" [id=610, type=Constant]; +"611 Constant_41296" [id=611, type=Constant]; +"612 Constant_41295" [id=612, type=Constant]; "613 onnx^^Conv_598" [id=613, label="613 onnx::Conv_598", type=Constant]; -"614 Constant_113894" [id=614, type=Constant]; -"615 Constant_113893" [id=615, type=Constant]; -"616 Constant_113892" [id=616, type=Constant]; -"617 Constant_113891" [id=617, type=Constant]; +"614 Constant_41293" [id=614, type=Constant]; +"615 Constant_41292" [id=615, type=Constant]; +"616 Constant_41291" [id=616, type=Constant]; +"617 Constant_41290" [id=617, type=Constant]; "618 Reshape_795" [id=618, type=Constant]; "619 /features/features.7/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [id=619, type=FakeQuantize]; -"620 Constant_113889" [id=620, type=Constant]; -"621 Constant_113888" [id=621, type=Constant]; -"622 Constant_113887" [id=622, type=Constant]; -"623 Constant_113886" [id=623, type=Constant]; +"620 Constant_41288" [id=620, type=Constant]; +"621 Constant_41287" [id=621, type=Constant]; +"622 Constant_41286" [id=622, type=Constant]; +"623 Constant_41285" [id=623, type=Constant]; "624 Reshape_743" [id=624, type=Constant]; -"625 Constant_113884" [id=625, type=Constant]; -"626 Constant_113883" [id=626, type=Constant]; -"627 Constant_113882" [id=627, type=Constant]; -"628 Constant_113881" [id=628, type=Constant]; +"625 Constant_41283" [id=625, type=Constant]; +"626 Constant_41282" [id=626, type=Constant]; +"627 Constant_41281" [id=627, type=Constant]; +"628 Constant_41280" [id=628, type=Constant]; "629 Reshape_728" [id=629, type=Constant]; "630 /features/features.7/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [id=630, type=FakeQuantize]; -"631 Constant_113879" [id=631, type=Constant]; -"632 Constant_113878" [id=632, type=Constant]; -"633 Constant_113877" [id=633, type=Constant]; -"634 Constant_113876" [id=634, type=Constant]; +"631 Constant_41278" [id=631, type=Constant]; +"632 Constant_41277" [id=632, type=Constant]; +"633 Constant_41276" [id=633, type=Constant]; +"634 Constant_41275" [id=634, type=Constant]; "635 onnx^^Conv_592" [id=635, label="635 onnx::Conv_592", type=Constant]; -"636 Constant_113874" [id=636, type=Constant]; -"637 Constant_113873" [id=637, type=Constant]; -"638 Constant_113872" [id=638, type=Constant]; -"639 Constant_113871" [id=639, type=Constant]; -"640 Constant_113869" [id=640, type=Constant]; -"641 Constant_113868" [id=641, type=Constant]; -"642 Constant_113867" [id=642, type=Constant]; -"643 Constant_113866" [id=643, type=Constant]; +"636 Constant_41273" [id=636, type=Constant]; +"637 Constant_41272" [id=637, type=Constant]; +"638 Constant_41271" [id=638, type=Constant]; +"639 Constant_41270" [id=639, type=Constant]; +"640 Constant_41268" [id=640, type=Constant]; +"641 Constant_41267" [id=641, type=Constant]; +"642 Constant_41266" [id=642, type=Constant]; +"643 Constant_41265" [id=643, type=Constant]; "644 Reshape_712" [id=644, type=Constant]; "645 /features/features.6/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [id=645, type=FakeQuantize]; -"646 Constant_113864" [id=646, type=Constant]; -"647 Constant_113863" [id=647, type=Constant]; -"648 Constant_113862" [id=648, type=Constant]; -"649 Constant_113861" [id=649, type=Constant]; +"646 Constant_41263" [id=646, type=Constant]; +"647 Constant_41262" [id=647, type=Constant]; +"648 Constant_41261" [id=648, type=Constant]; +"649 Constant_41260" [id=649, type=Constant]; "650 onnx^^Conv_589" [id=650, label="650 onnx::Conv_589", type=Constant]; -"651 Constant_113859" [id=651, type=Constant]; -"652 Constant_113858" [id=652, type=Constant]; -"653 Constant_113857" [id=653, type=Constant]; -"654 Constant_113856" [id=654, type=Constant]; +"651 Constant_41258" [id=651, type=Constant]; +"652 Constant_41257" [id=652, type=Constant]; +"653 Constant_41256" [id=653, type=Constant]; +"654 Constant_41255" [id=654, type=Constant]; "655 Reshape_693" [id=655, type=Constant]; "656 /features/features.6/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [id=656, type=FakeQuantize]; -"657 Constant_113854" [id=657, type=Constant]; -"658 Constant_113853" [id=658, type=Constant]; -"659 Constant_113852" [id=659, type=Constant]; -"660 Constant_113851" [id=660, type=Constant]; +"657 Constant_41253" [id=657, type=Constant]; +"658 Constant_41252" [id=658, type=Constant]; +"659 Constant_41251" [id=659, type=Constant]; +"660 Constant_41250" [id=660, type=Constant]; "661 Reshape_641" [id=661, type=Constant]; -"662 Constant_113849" [id=662, type=Constant]; -"663 Constant_113848" [id=663, type=Constant]; -"664 Constant_113847" [id=664, type=Constant]; -"665 Constant_113846" [id=665, type=Constant]; +"662 Constant_41248" [id=662, type=Constant]; +"663 Constant_41247" [id=663, type=Constant]; +"664 Constant_41246" [id=664, type=Constant]; +"665 Constant_41245" [id=665, type=Constant]; "666 Reshape_626" [id=666, type=Constant]; "667 /features/features.6/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [id=667, type=FakeQuantize]; -"668 Constant_113844" [id=668, type=Constant]; -"669 Constant_113843" [id=669, type=Constant]; -"670 Constant_113842" [id=670, type=Constant]; -"671 Constant_113841" [id=671, type=Constant]; +"668 Constant_41243" [id=668, type=Constant]; +"669 Constant_41242" [id=669, type=Constant]; +"670 Constant_41241" [id=670, type=Constant]; +"671 Constant_41240" [id=671, type=Constant]; "672 onnx^^Conv_583" [id=672, label="672 onnx::Conv_583", type=Constant]; -"673 Constant_113839" [id=673, type=Constant]; -"674 Constant_113838" [id=674, type=Constant]; -"675 Constant_113837" [id=675, type=Constant]; -"676 Constant_113836" [id=676, type=Constant]; -"677 Constant_113834" [id=677, type=Constant]; -"678 Constant_113833" [id=678, type=Constant]; -"679 Constant_113832" [id=679, type=Constant]; -"680 Constant_113831" [id=680, type=Constant]; +"673 Constant_41238" [id=673, type=Constant]; +"674 Constant_41237" [id=674, type=Constant]; +"675 Constant_41236" [id=675, type=Constant]; +"676 Constant_41235" [id=676, type=Constant]; +"677 Constant_41233" [id=677, type=Constant]; +"678 Constant_41232" [id=678, type=Constant]; +"679 Constant_41231" [id=679, type=Constant]; +"680 Constant_41230" [id=680, type=Constant]; "681 Reshape_610" [id=681, type=Constant]; "682 /features/features.5/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [id=682, type=FakeQuantize]; -"683 Constant_113829" [id=683, type=Constant]; -"684 Constant_113828" [id=684, type=Constant]; -"685 Constant_113827" [id=685, type=Constant]; -"686 Constant_113826" [id=686, type=Constant]; +"683 Constant_41228" [id=683, type=Constant]; +"684 Constant_41227" [id=684, type=Constant]; +"685 Constant_41226" [id=685, type=Constant]; +"686 Constant_41225" [id=686, type=Constant]; "687 onnx^^Conv_580" [id=687, label="687 onnx::Conv_580", type=Constant]; -"688 Constant_113824" [id=688, type=Constant]; -"689 Constant_113823" [id=689, type=Constant]; -"690 Constant_113822" [id=690, type=Constant]; -"691 Constant_113821" [id=691, type=Constant]; +"688 Constant_41223" [id=688, type=Constant]; +"689 Constant_41222" [id=689, type=Constant]; +"690 Constant_41221" [id=690, type=Constant]; +"691 Constant_41220" [id=691, type=Constant]; "692 Reshape_591" [id=692, type=Constant]; "693 /features/features.5/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [id=693, type=FakeQuantize]; -"694 Constant_113819" [id=694, type=Constant]; -"695 Constant_113818" [id=695, type=Constant]; -"696 Constant_113817" [id=696, type=Constant]; -"697 Constant_113816" [id=697, type=Constant]; +"694 Constant_41218" [id=694, type=Constant]; +"695 Constant_41217" [id=695, type=Constant]; +"696 Constant_41216" [id=696, type=Constant]; +"697 Constant_41215" [id=697, type=Constant]; "698 Reshape_539" [id=698, type=Constant]; -"699 Constant_113814" [id=699, type=Constant]; -"700 Constant_113813" [id=700, type=Constant]; -"701 Constant_113812" [id=701, type=Constant]; -"702 Constant_113811" [id=702, type=Constant]; +"699 Constant_41213" [id=699, type=Constant]; +"700 Constant_41212" [id=700, type=Constant]; +"701 Constant_41211" [id=701, type=Constant]; +"702 Constant_41210" [id=702, type=Constant]; "703 Reshape_524" [id=703, type=Constant]; "704 /features/features.5/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [id=704, type=FakeQuantize]; -"705 Constant_113809" [id=705, type=Constant]; -"706 Constant_113808" [id=706, type=Constant]; -"707 Constant_113807" [id=707, type=Constant]; -"708 Constant_113806" [id=708, type=Constant]; +"705 Constant_41208" [id=705, type=Constant]; +"706 Constant_41207" [id=706, type=Constant]; +"707 Constant_41206" [id=707, type=Constant]; +"708 Constant_41205" [id=708, type=Constant]; "709 onnx^^Conv_574" [id=709, label="709 onnx::Conv_574", type=Constant]; -"710 Constant_113804" [id=710, type=Constant]; -"711 Constant_113803" [id=711, type=Constant]; -"712 Constant_113802" [id=712, type=Constant]; -"713 Constant_113801" [id=713, type=Constant]; +"710 Constant_41203" [id=710, type=Constant]; +"711 Constant_41202" [id=711, type=Constant]; +"712 Constant_41201" [id=712, type=Constant]; +"713 Constant_41200" [id=713, type=Constant]; "714 Reshape_509" [id=714, type=Constant]; "715 /features/features.4/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [id=715, type=FakeQuantize]; -"716 Constant_113799" [id=716, type=Constant]; -"717 Constant_113798" [id=717, type=Constant]; -"718 Constant_113797" [id=718, type=Constant]; -"719 Constant_113796" [id=719, type=Constant]; +"716 Constant_41198" [id=716, type=Constant]; +"717 Constant_41197" [id=717, type=Constant]; +"718 Constant_41196" [id=718, type=Constant]; +"719 Constant_41195" [id=719, type=Constant]; "720 onnx^^Conv_571" [id=720, label="720 onnx::Conv_571", type=Constant]; -"721 Constant_113794" [id=721, type=Constant]; -"722 Constant_113793" [id=722, type=Constant]; -"723 Constant_113792" [id=723, type=Constant]; -"724 Constant_113791" [id=724, type=Constant]; +"721 Constant_41193" [id=721, type=Constant]; +"722 Constant_41192" [id=722, type=Constant]; +"723 Constant_41191" [id=723, type=Constant]; +"724 Constant_41190" [id=724, type=Constant]; "725 Reshape_490" [id=725, type=Constant]; "726 /features/features.4/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [id=726, type=FakeQuantize]; -"727 Constant_113789" [id=727, type=Constant]; -"728 Constant_113788" [id=728, type=Constant]; -"729 Constant_113787" [id=729, type=Constant]; -"730 Constant_113786" [id=730, type=Constant]; +"727 Constant_41188" [id=727, type=Constant]; +"728 Constant_41187" [id=728, type=Constant]; +"729 Constant_41186" [id=729, type=Constant]; +"730 Constant_41185" [id=730, type=Constant]; "731 Reshape_438" [id=731, type=Constant]; -"732 Constant_113784" [id=732, type=Constant]; -"733 Constant_113783" [id=733, type=Constant]; -"734 Constant_113782" [id=734, type=Constant]; -"735 Constant_113781" [id=735, type=Constant]; +"732 Constant_41183" [id=732, type=Constant]; +"733 Constant_41182" [id=733, type=Constant]; +"734 Constant_41181" [id=734, type=Constant]; +"735 Constant_41180" [id=735, type=Constant]; "736 Reshape_423" [id=736, type=Constant]; "737 /features/features.4/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [id=737, type=FakeQuantize]; -"738 Constant_113779" [id=738, type=Constant]; -"739 Constant_113778" [id=739, type=Constant]; -"740 Constant_113777" [id=740, type=Constant]; -"741 Constant_113776" [id=741, type=Constant]; +"738 Constant_41178" [id=738, type=Constant]; +"739 Constant_41177" [id=739, type=Constant]; +"740 Constant_41176" [id=740, type=Constant]; +"741 Constant_41175" [id=741, type=Constant]; "742 onnx^^Conv_565" [id=742, label="742 onnx::Conv_565", type=Constant]; -"743 Constant_113774" [id=743, type=Constant]; -"744 Constant_113773" [id=744, type=Constant]; -"745 Constant_113772" [id=745, type=Constant]; -"746 Constant_113771" [id=746, type=Constant]; -"747 Constant_113769" [id=747, type=Constant]; -"748 Constant_113768" [id=748, type=Constant]; -"749 Constant_113767" [id=749, type=Constant]; -"750 Constant_113766" [id=750, type=Constant]; +"743 Constant_41173" [id=743, type=Constant]; +"744 Constant_41172" [id=744, type=Constant]; +"745 Constant_41171" [id=745, type=Constant]; +"746 Constant_41170" [id=746, type=Constant]; +"747 Constant_41168" [id=747, type=Constant]; +"748 Constant_41167" [id=748, type=Constant]; +"749 Constant_41166" [id=749, type=Constant]; +"750 Constant_41165" [id=750, type=Constant]; "751 Reshape_407" [id=751, type=Constant]; "752 /features/features.3/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [id=752, type=FakeQuantize]; -"753 Constant_113764" [id=753, type=Constant]; -"754 Constant_113763" [id=754, type=Constant]; -"755 Constant_113762" [id=755, type=Constant]; -"756 Constant_113761" [id=756, type=Constant]; +"753 Constant_41163" [id=753, type=Constant]; +"754 Constant_41162" [id=754, type=Constant]; +"755 Constant_41161" [id=755, type=Constant]; +"756 Constant_41160" [id=756, type=Constant]; "757 onnx^^Conv_562" [id=757, label="757 onnx::Conv_562", type=Constant]; -"758 Constant_113759" [id=758, type=Constant]; -"759 Constant_113758" [id=759, type=Constant]; -"760 Constant_113757" [id=760, type=Constant]; -"761 Constant_113756" [id=761, type=Constant]; +"758 Constant_41158" [id=758, type=Constant]; +"759 Constant_41157" [id=759, type=Constant]; +"760 Constant_41156" [id=760, type=Constant]; +"761 Constant_41155" [id=761, type=Constant]; "762 Reshape_388" [id=762, type=Constant]; "763 /features/features.3/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [id=763, type=FakeQuantize]; -"764 Constant_113754" [id=764, type=Constant]; -"765 Constant_113753" [id=765, type=Constant]; -"766 Constant_113752" [id=766, type=Constant]; -"767 Constant_113751" [id=767, type=Constant]; +"764 Constant_41153" [id=764, type=Constant]; +"765 Constant_41152" [id=765, type=Constant]; +"766 Constant_41151" [id=766, type=Constant]; +"767 Constant_41150" [id=767, type=Constant]; "768 Reshape_336" [id=768, type=Constant]; -"769 Constant_113749" [id=769, type=Constant]; -"770 Constant_113748" [id=770, type=Constant]; -"771 Constant_113747" [id=771, type=Constant]; -"772 Constant_113746" [id=772, type=Constant]; +"769 Constant_41148" [id=769, type=Constant]; +"770 Constant_41147" [id=770, type=Constant]; +"771 Constant_41146" [id=771, type=Constant]; +"772 Constant_41145" [id=772, type=Constant]; "773 Reshape_321" [id=773, type=Constant]; "774 /features/features.3/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [id=774, type=FakeQuantize]; -"775 Constant_113744" [id=775, type=Constant]; -"776 Constant_113743" [id=776, type=Constant]; -"777 Constant_113742" [id=777, type=Constant]; -"778 Constant_113741" [id=778, type=Constant]; +"775 Constant_41143" [id=775, type=Constant]; +"776 Constant_41142" [id=776, type=Constant]; +"777 Constant_41141" [id=777, type=Constant]; +"778 Constant_41140" [id=778, type=Constant]; "779 onnx^^Conv_556" [id=779, label="779 onnx::Conv_556", type=Constant]; -"780 Constant_113739" [id=780, type=Constant]; -"781 Constant_113738" [id=781, type=Constant]; -"782 Constant_113737" [id=782, type=Constant]; -"783 Constant_113736" [id=783, type=Constant]; +"780 Constant_41138" [id=780, type=Constant]; +"781 Constant_41137" [id=781, type=Constant]; +"782 Constant_41136" [id=782, type=Constant]; +"783 Constant_41135" [id=783, type=Constant]; "784 Reshape_306" [id=784, type=Constant]; "785 /features/features.2/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [id=785, type=FakeQuantize]; -"786 Constant_113734" [id=786, type=Constant]; -"787 Constant_113733" [id=787, type=Constant]; -"788 Constant_113732" [id=788, type=Constant]; -"789 Constant_113731" [id=789, type=Constant]; +"786 Constant_41133" [id=786, type=Constant]; +"787 Constant_41132" [id=787, type=Constant]; +"788 Constant_41131" [id=788, type=Constant]; +"789 Constant_41130" [id=789, type=Constant]; "790 onnx^^Conv_553" [id=790, label="790 onnx::Conv_553", type=Constant]; -"791 Constant_113729" [id=791, type=Constant]; -"792 Constant_113728" [id=792, type=Constant]; -"793 Constant_113727" [id=793, type=Constant]; -"794 Constant_113726" [id=794, type=Constant]; +"791 Constant_41128" [id=791, type=Constant]; +"792 Constant_41127" [id=792, type=Constant]; +"793 Constant_41126" [id=793, type=Constant]; +"794 Constant_41125" [id=794, type=Constant]; "795 Reshape_287" [id=795, type=Constant]; "796 /features/features.2/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [id=796, type=FakeQuantize]; -"797 Constant_113724" [id=797, type=Constant]; -"798 Constant_113723" [id=798, type=Constant]; -"799 Constant_113722" [id=799, type=Constant]; -"800 Constant_113721" [id=800, type=Constant]; +"797 Constant_41123" [id=797, type=Constant]; +"798 Constant_41122" [id=798, type=Constant]; +"799 Constant_41121" [id=799, type=Constant]; +"800 Constant_41120" [id=800, type=Constant]; "801 Reshape_235" [id=801, type=Constant]; -"802 Constant_113719" [id=802, type=Constant]; -"803 Constant_113718" [id=803, type=Constant]; -"804 Constant_113717" [id=804, type=Constant]; -"805 Constant_113716" [id=805, type=Constant]; +"802 Constant_41118" [id=802, type=Constant]; +"803 Constant_41117" [id=803, type=Constant]; +"804 Constant_41116" [id=804, type=Constant]; +"805 Constant_41115" [id=805, type=Constant]; "806 Reshape_220" [id=806, type=Constant]; "807 /features/features.2/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [id=807, type=FakeQuantize]; -"808 Constant_113714" [id=808, type=Constant]; -"809 Constant_113713" [id=809, type=Constant]; -"810 Constant_113712" [id=810, type=Constant]; -"811 Constant_113711" [id=811, type=Constant]; +"808 Constant_41113" [id=808, type=Constant]; +"809 Constant_41112" [id=809, type=Constant]; +"810 Constant_41111" [id=810, type=Constant]; +"811 Constant_41110" [id=811, type=Constant]; "812 onnx^^Conv_547" [id=812, label="812 onnx::Conv_547", type=Constant]; -"813 Constant_113709" [id=813, type=Constant]; -"814 Constant_113708" [id=814, type=Constant]; -"815 Constant_113707" [id=815, type=Constant]; -"816 Constant_113706" [id=816, type=Constant]; +"813 Constant_41108" [id=813, type=Constant]; +"814 Constant_41107" [id=814, type=Constant]; +"815 Constant_41106" [id=815, type=Constant]; +"816 Constant_41105" [id=816, type=Constant]; "817 Reshape_205" [id=817, type=Constant]; "818 /features/features.1/conv/conv.1/Conv/WithoutBiases/fq_weights_1" [id=818, type=FakeQuantize]; -"819 Constant_113704" [id=819, type=Constant]; -"820 Constant_113703" [id=820, type=Constant]; -"821 Constant_113702" [id=821, type=Constant]; -"822 Constant_113701" [id=822, type=Constant]; +"819 Constant_41103" [id=819, type=Constant]; +"820 Constant_41102" [id=820, type=Constant]; +"821 Constant_41101" [id=821, type=Constant]; +"822 Constant_41100" [id=822, type=Constant]; "823 onnx^^Conv_544" [id=823, label="823 onnx::Conv_544", type=Constant]; -"824 Constant_113699" [id=824, type=Constant]; -"825 Constant_113698" [id=825, type=Constant]; -"826 Constant_113697" [id=826, type=Constant]; -"827 Constant_113696" [id=827, type=Constant]; +"824 Constant_41098" [id=824, type=Constant]; +"825 Constant_41097" [id=825, type=Constant]; +"826 Constant_41096" [id=826, type=Constant]; +"827 Constant_41095" [id=827, type=Constant]; "828 Reshape_186" [id=828, type=Constant]; "829 /features/features.1/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [id=829, type=FakeQuantize]; -"830 Constant_113694" [id=830, type=Constant]; -"831 Constant_113693" [id=831, type=Constant]; -"832 Constant_113692" [id=832, type=Constant]; -"833 Constant_113691" [id=833, type=Constant]; +"830 Constant_41093" [id=830, type=Constant]; +"831 Constant_41092" [id=831, type=Constant]; +"832 Constant_41091" [id=832, type=Constant]; +"833 Constant_41090" [id=833, type=Constant]; "834 Reshape_134" [id=834, type=Constant]; -"835 Constant_113689" [id=835, type=Constant]; -"836 Constant_113688" [id=836, type=Constant]; -"837 Constant_113687" [id=837, type=Constant]; -"838 Constant_113686" [id=838, type=Constant]; +"835 Constant_41088" [id=835, type=Constant]; +"836 Constant_41087" [id=836, type=Constant]; +"837 Constant_41086" [id=837, type=Constant]; +"838 Constant_41085" [id=838, type=Constant]; "839 Reshape_119" [id=839, type=Constant]; "840 /features/features.0/features.0.0/Conv/WithoutBiases/fq_weights_1" [id=840, type=FakeQuantize]; -"841 Constant_113684" [id=841, type=Constant]; -"842 Constant_113683" [id=842, type=Constant]; -"843 Constant_113682" [id=843, type=Constant]; -"844 Constant_113681" [id=844, type=Constant]; +"841 Constant_41083" [id=841, type=Constant]; +"842 Constant_41082" [id=842, type=Constant]; +"843 Constant_41081" [id=843, type=Constant]; +"844 Constant_41080" [id=844, type=Constant]; "845 Gather_5168" [id=845, type=Constant]; -"846 Constant_113679" [id=846, type=Constant]; -"847 Constant_113678" [id=847, type=Constant]; -"848 Constant_113677" [id=848, type=Constant]; -"849 Constant_113676" [id=849, type=Constant]; +"846 Constant_41078" [id=846, type=Constant]; +"847 Constant_41077" [id=847, type=Constant]; +"848 Constant_41076" [id=848, type=Constant]; +"849 Constant_41075" [id=849, type=Constant]; "850 Gather_5165" [id=850, type=Constant]; "851 Gather_5162" [id=851, type=Constant]; "0 data" -> "1 Multiply_5095" [label="[1, 3, 224, 224]", style=solid]; @@ -1083,633 +1083,633 @@ strict digraph { "219 prob" -> "220 prob/sink_port_0" [label="[1, 1000]", style=solid]; "221 Constant_5169" -> "219 prob" [label="[1, 1000]", style=solid]; "222 /classifier/classifier.1/Gemm/WithoutBiases/fq_weights_1" -> "218 /classifier/classifier.1/Gemm/WithoutBiases" [label="[1000, 1280]", style=solid]; -"223 Constant_114259" -> "222 /classifier/classifier.1/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; -"224 Constant_114258" -> "222 /classifier/classifier.1/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; -"225 Constant_114257" -> "222 /classifier/classifier.1/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; -"226 Constant_114256" -> "222 /classifier/classifier.1/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; +"223 Constant_41658" -> "222 /classifier/classifier.1/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; +"224 Constant_41657" -> "222 /classifier/classifier.1/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; +"225 Constant_41656" -> "222 /classifier/classifier.1/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; +"226 Constant_41655" -> "222 /classifier/classifier.1/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; "227 classifier.1.weight" -> "222 /classifier/classifier.1/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1280]", style=solid]; "228 Constant_1864" -> "217 /Flatten" [label="[2]", style=dashed]; -"229 Constant_114254" -> "216 /GlobalAveragePool/fq_output_0" [label="[]", style=solid]; -"230 Constant_114253" -> "216 /GlobalAveragePool/fq_output_0" [label="[]", style=solid]; -"231 Constant_114252" -> "216 /GlobalAveragePool/fq_output_0" [label="[]", style=solid]; -"232 Constant_114251" -> "216 /GlobalAveragePool/fq_output_0" [label="[]", style=solid]; +"229 Constant_41653" -> "216 /GlobalAveragePool/fq_output_0" [label="[]", style=solid]; +"230 Constant_41652" -> "216 /GlobalAveragePool/fq_output_0" [label="[]", style=solid]; +"231 Constant_41651" -> "216 /GlobalAveragePool/fq_output_0" [label="[]", style=solid]; +"232 Constant_41650" -> "216 /GlobalAveragePool/fq_output_0" [label="[]", style=solid]; "233 Range_1860" -> "215 /GlobalAveragePool" [label="[2]", style=dashed]; -"234 Constant_114249" -> "214 /features/features.18/features.18.2/Clip/fq_output_0" [label="[]", style=solid]; -"235 Constant_114248" -> "214 /features/features.18/features.18.2/Clip/fq_output_0" [label="[]", style=solid]; -"236 Constant_114247" -> "214 /features/features.18/features.18.2/Clip/fq_output_0" [label="[]", style=solid]; -"237 Constant_114246" -> "214 /features/features.18/features.18.2/Clip/fq_output_0" [label="[]", style=solid]; +"234 Constant_41648" -> "214 /features/features.18/features.18.2/Clip/fq_output_0" [label="[]", style=solid]; +"235 Constant_41647" -> "214 /features/features.18/features.18.2/Clip/fq_output_0" [label="[]", style=solid]; +"236 Constant_41646" -> "214 /features/features.18/features.18.2/Clip/fq_output_0" [label="[]", style=solid]; +"237 Constant_41645" -> "214 /features/features.18/features.18.2/Clip/fq_output_0" [label="[]", style=solid]; "238 Reshape_1846" -> "212 /features/features.18/features.18.0/Conv" [label="[1, 1280, 1, 1]", style=solid]; "239 /features/features.18/features.18.0/Conv/WithoutBiases/fq_weights_1" -> "211 /features/features.18/features.18.0/Conv/WithoutBiases" [label="[1280, 320, 1, 1]", style=solid]; -"240 Constant_114244" -> "239 /features/features.18/features.18.0/Conv/WithoutBiases/fq_weights_1" [label="[1280, 1, 1, 1]", style=solid]; -"241 Constant_114243" -> "239 /features/features.18/features.18.0/Conv/WithoutBiases/fq_weights_1" [label="[1280, 1, 1, 1]", style=solid]; -"242 Constant_114242" -> "239 /features/features.18/features.18.0/Conv/WithoutBiases/fq_weights_1" [label="[1280, 1, 1, 1]", style=solid]; -"243 Constant_114241" -> "239 /features/features.18/features.18.0/Conv/WithoutBiases/fq_weights_1" [label="[1280, 1, 1, 1]", style=solid]; +"240 Constant_41643" -> "239 /features/features.18/features.18.0/Conv/WithoutBiases/fq_weights_1" [label="[1280, 1, 1, 1]", style=solid]; +"241 Constant_41642" -> "239 /features/features.18/features.18.0/Conv/WithoutBiases/fq_weights_1" [label="[1280, 1, 1, 1]", style=solid]; +"242 Constant_41641" -> "239 /features/features.18/features.18.0/Conv/WithoutBiases/fq_weights_1" [label="[1280, 1, 1, 1]", style=solid]; +"243 Constant_41640" -> "239 /features/features.18/features.18.0/Conv/WithoutBiases/fq_weights_1" [label="[1280, 1, 1, 1]", style=solid]; "244 onnx^^Conv_691" -> "239 /features/features.18/features.18.0/Conv/WithoutBiases/fq_weights_1" [label="[1280, 320, 1, 1]", style=solid]; -"245 Constant_114239" -> "210 /features/features.17/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"246 Constant_114238" -> "210 /features/features.17/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"247 Constant_114237" -> "210 /features/features.17/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"248 Constant_114236" -> "210 /features/features.17/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"245 Constant_41638" -> "210 /features/features.17/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"246 Constant_41637" -> "210 /features/features.17/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"247 Constant_41636" -> "210 /features/features.17/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"248 Constant_41635" -> "210 /features/features.17/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; "249 Reshape_1831" -> "209 /features/features.17/conv/conv.2/Conv" [label="[1, 320, 1, 1]", style=solid]; "250 /features/features.17/conv/conv.2/Conv/WithoutBiases/fq_weights_1" -> "207 /features/features.17/conv/conv.2/Conv/WithoutBiases" [label="[320, 960, 1, 1]", style=solid]; -"251 Constant_114234" -> "250 /features/features.17/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[320, 1, 1, 1]", style=solid]; -"252 Constant_114233" -> "250 /features/features.17/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[320, 1, 1, 1]", style=solid]; -"253 Constant_114232" -> "250 /features/features.17/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[320, 1, 1, 1]", style=solid]; -"254 Constant_114231" -> "250 /features/features.17/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[320, 1, 1, 1]", style=solid]; +"251 Constant_41633" -> "250 /features/features.17/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[320, 1, 1, 1]", style=solid]; +"252 Constant_41632" -> "250 /features/features.17/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[320, 1, 1, 1]", style=solid]; +"253 Constant_41631" -> "250 /features/features.17/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[320, 1, 1, 1]", style=solid]; +"254 Constant_41630" -> "250 /features/features.17/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[320, 1, 1, 1]", style=solid]; "255 onnx^^Conv_688" -> "250 /features/features.17/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[320, 960, 1, 1]", style=solid]; -"256 Constant_114229" -> "205 /features/features.17/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"257 Constant_114228" -> "205 /features/features.17/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"258 Constant_114227" -> "205 /features/features.17/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"259 Constant_114226" -> "205 /features/features.17/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"256 Constant_41628" -> "205 /features/features.17/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"257 Constant_41627" -> "205 /features/features.17/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"258 Constant_41626" -> "205 /features/features.17/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"259 Constant_41625" -> "205 /features/features.17/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; "260 Reshape_1812" -> "199 /features/features.17/conv/conv.1/conv.1.0/Conv" [label="[1, 960, 1, 1]", style=solid]; "261 /features/features.17/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" -> "196 /features/features.17/conv/conv.1/conv.1.0/Conv/WithoutBiases" [label="[960, 1, 1, 3, 3]", style=solid]; -"262 Constant_114224" -> "261 /features/features.17/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; -"263 Constant_114223" -> "261 /features/features.17/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; -"264 Constant_114222" -> "261 /features/features.17/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; -"265 Constant_114221" -> "261 /features/features.17/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; +"262 Constant_41623" -> "261 /features/features.17/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; +"263 Constant_41622" -> "261 /features/features.17/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; +"264 Constant_41621" -> "261 /features/features.17/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; +"265 Constant_41620" -> "261 /features/features.17/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; "266 Reshape_1760" -> "261 /features/features.17/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 3, 3]", style=solid]; -"267 Constant_114219" -> "193 /features/features.17/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; -"268 Constant_114218" -> "193 /features/features.17/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; -"269 Constant_114217" -> "193 /features/features.17/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; -"270 Constant_114216" -> "193 /features/features.17/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; +"267 Constant_41618" -> "193 /features/features.17/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; +"268 Constant_41617" -> "193 /features/features.17/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; +"269 Constant_41616" -> "193 /features/features.17/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; +"270 Constant_41615" -> "193 /features/features.17/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; "271 Reshape_1745" -> "187 /features/features.17/conv/conv.0/conv.0.0/Conv" [label="[1, 960, 1, 1]", style=solid]; "272 /features/features.17/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" -> "184 /features/features.17/conv/conv.0/conv.0.0/Conv/WithoutBiases" [label="[960, 160, 1, 1]", style=solid]; -"273 Constant_114214" -> "272 /features/features.17/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; -"274 Constant_114213" -> "272 /features/features.17/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; -"275 Constant_114212" -> "272 /features/features.17/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; -"276 Constant_114211" -> "272 /features/features.17/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; +"273 Constant_41613" -> "272 /features/features.17/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; +"274 Constant_41612" -> "272 /features/features.17/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; +"275 Constant_41611" -> "272 /features/features.17/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; +"276 Constant_41610" -> "272 /features/features.17/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; "277 onnx^^Conv_682" -> "272 /features/features.17/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 160, 1, 1]", style=solid]; -"278 Constant_114209" -> "181 /features/features.16/Add/fq_output_0" [label="[]", style=solid]; -"279 Constant_114208" -> "181 /features/features.16/Add/fq_output_0" [label="[]", style=solid]; -"280 Constant_114207" -> "181 /features/features.16/Add/fq_output_0" [label="[]", style=solid]; -"281 Constant_114206" -> "181 /features/features.16/Add/fq_output_0" [label="[]", style=solid]; -"282 Constant_114204" -> "208 /features/features.16/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"283 Constant_114203" -> "208 /features/features.16/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"284 Constant_114202" -> "208 /features/features.16/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"285 Constant_114201" -> "208 /features/features.16/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"278 Constant_41608" -> "181 /features/features.16/Add/fq_output_0" [label="[]", style=solid]; +"279 Constant_41607" -> "181 /features/features.16/Add/fq_output_0" [label="[]", style=solid]; +"280 Constant_41606" -> "181 /features/features.16/Add/fq_output_0" [label="[]", style=solid]; +"281 Constant_41605" -> "181 /features/features.16/Add/fq_output_0" [label="[]", style=solid]; +"282 Constant_41603" -> "208 /features/features.16/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"283 Constant_41602" -> "208 /features/features.16/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"284 Constant_41601" -> "208 /features/features.16/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"285 Constant_41600" -> "208 /features/features.16/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; "286 Reshape_1729" -> "206 /features/features.16/conv/conv.2/Conv" [label="[1, 160, 1, 1]", style=solid]; "287 /features/features.16/conv/conv.2/Conv/WithoutBiases/fq_weights_1" -> "203 /features/features.16/conv/conv.2/Conv/WithoutBiases" [label="[160, 960, 1, 1]", style=solid]; -"288 Constant_114199" -> "287 /features/features.16/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; -"289 Constant_114198" -> "287 /features/features.16/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; -"290 Constant_114197" -> "287 /features/features.16/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; -"291 Constant_114196" -> "287 /features/features.16/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; +"288 Constant_41598" -> "287 /features/features.16/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; +"289 Constant_41597" -> "287 /features/features.16/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; +"290 Constant_41596" -> "287 /features/features.16/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; +"291 Constant_41595" -> "287 /features/features.16/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; "292 onnx^^Conv_679" -> "287 /features/features.16/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 960, 1, 1]", style=solid]; -"293 Constant_114194" -> "200 /features/features.16/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"294 Constant_114193" -> "200 /features/features.16/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"295 Constant_114192" -> "200 /features/features.16/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"296 Constant_114191" -> "200 /features/features.16/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"293 Constant_41593" -> "200 /features/features.16/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"294 Constant_41592" -> "200 /features/features.16/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"295 Constant_41591" -> "200 /features/features.16/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"296 Constant_41590" -> "200 /features/features.16/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; "297 Reshape_1710" -> "194 /features/features.16/conv/conv.1/conv.1.0/Conv" [label="[1, 960, 1, 1]", style=solid]; "298 /features/features.16/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" -> "191 /features/features.16/conv/conv.1/conv.1.0/Conv/WithoutBiases" [label="[960, 1, 1, 3, 3]", style=solid]; -"299 Constant_114189" -> "298 /features/features.16/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; -"300 Constant_114188" -> "298 /features/features.16/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; -"301 Constant_114187" -> "298 /features/features.16/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; -"302 Constant_114186" -> "298 /features/features.16/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; +"299 Constant_41588" -> "298 /features/features.16/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; +"300 Constant_41587" -> "298 /features/features.16/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; +"301 Constant_41586" -> "298 /features/features.16/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; +"302 Constant_41585" -> "298 /features/features.16/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; "303 Reshape_1658" -> "298 /features/features.16/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 3, 3]", style=solid]; -"304 Constant_114184" -> "188 /features/features.16/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; -"305 Constant_114183" -> "188 /features/features.16/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; -"306 Constant_114182" -> "188 /features/features.16/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; -"307 Constant_114181" -> "188 /features/features.16/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; +"304 Constant_41583" -> "188 /features/features.16/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; +"305 Constant_41582" -> "188 /features/features.16/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; +"306 Constant_41581" -> "188 /features/features.16/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; +"307 Constant_41580" -> "188 /features/features.16/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; "308 Reshape_1643" -> "182 /features/features.16/conv/conv.0/conv.0.0/Conv" [label="[1, 960, 1, 1]", style=solid]; "309 /features/features.16/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" -> "179 /features/features.16/conv/conv.0/conv.0.0/Conv/WithoutBiases" [label="[960, 160, 1, 1]", style=solid]; -"310 Constant_114179" -> "309 /features/features.16/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; -"311 Constant_114178" -> "309 /features/features.16/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; -"312 Constant_114177" -> "309 /features/features.16/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; -"313 Constant_114176" -> "309 /features/features.16/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; +"310 Constant_41578" -> "309 /features/features.16/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; +"311 Constant_41577" -> "309 /features/features.16/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; +"312 Constant_41576" -> "309 /features/features.16/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; +"313 Constant_41575" -> "309 /features/features.16/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; "314 onnx^^Conv_673" -> "309 /features/features.16/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 160, 1, 1]", style=solid]; -"315 Constant_114174" -> "176 /features/features.15/Add/fq_output_0" [label="[]", style=solid]; -"316 Constant_114173" -> "176 /features/features.15/Add/fq_output_0" [label="[]", style=solid]; -"317 Constant_114172" -> "176 /features/features.15/Add/fq_output_0" [label="[]", style=solid]; -"318 Constant_114171" -> "176 /features/features.15/Add/fq_output_0" [label="[]", style=solid]; -"319 Constant_114169" -> "204 /features/features.15/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"320 Constant_114168" -> "204 /features/features.15/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"321 Constant_114167" -> "204 /features/features.15/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"322 Constant_114166" -> "204 /features/features.15/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"315 Constant_41573" -> "176 /features/features.15/Add/fq_output_0" [label="[]", style=solid]; +"316 Constant_41572" -> "176 /features/features.15/Add/fq_output_0" [label="[]", style=solid]; +"317 Constant_41571" -> "176 /features/features.15/Add/fq_output_0" [label="[]", style=solid]; +"318 Constant_41570" -> "176 /features/features.15/Add/fq_output_0" [label="[]", style=solid]; +"319 Constant_41568" -> "204 /features/features.15/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"320 Constant_41567" -> "204 /features/features.15/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"321 Constant_41566" -> "204 /features/features.15/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"322 Constant_41565" -> "204 /features/features.15/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; "323 Reshape_1627" -> "201 /features/features.15/conv/conv.2/Conv" [label="[1, 160, 1, 1]", style=solid]; "324 /features/features.15/conv/conv.2/Conv/WithoutBiases/fq_weights_1" -> "198 /features/features.15/conv/conv.2/Conv/WithoutBiases" [label="[160, 960, 1, 1]", style=solid]; -"325 Constant_114164" -> "324 /features/features.15/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; -"326 Constant_114163" -> "324 /features/features.15/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; -"327 Constant_114162" -> "324 /features/features.15/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; -"328 Constant_114161" -> "324 /features/features.15/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; +"325 Constant_41563" -> "324 /features/features.15/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; +"326 Constant_41562" -> "324 /features/features.15/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; +"327 Constant_41561" -> "324 /features/features.15/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; +"328 Constant_41560" -> "324 /features/features.15/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; "329 onnx^^Conv_670" -> "324 /features/features.15/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 960, 1, 1]", style=solid]; -"330 Constant_114159" -> "195 /features/features.15/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"331 Constant_114158" -> "195 /features/features.15/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"332 Constant_114157" -> "195 /features/features.15/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"333 Constant_114156" -> "195 /features/features.15/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"330 Constant_41558" -> "195 /features/features.15/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"331 Constant_41557" -> "195 /features/features.15/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"332 Constant_41556" -> "195 /features/features.15/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"333 Constant_41555" -> "195 /features/features.15/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; "334 Reshape_1608" -> "189 /features/features.15/conv/conv.1/conv.1.0/Conv" [label="[1, 960, 1, 1]", style=solid]; "335 /features/features.15/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" -> "186 /features/features.15/conv/conv.1/conv.1.0/Conv/WithoutBiases" [label="[960, 1, 1, 3, 3]", style=solid]; -"336 Constant_114154" -> "335 /features/features.15/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; -"337 Constant_114153" -> "335 /features/features.15/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; -"338 Constant_114152" -> "335 /features/features.15/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; -"339 Constant_114151" -> "335 /features/features.15/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; +"336 Constant_41553" -> "335 /features/features.15/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; +"337 Constant_41552" -> "335 /features/features.15/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; +"338 Constant_41551" -> "335 /features/features.15/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; +"339 Constant_41550" -> "335 /features/features.15/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1, 1]", style=solid]; "340 Reshape_1556" -> "335 /features/features.15/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 3, 3]", style=solid]; -"341 Constant_114149" -> "183 /features/features.15/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; -"342 Constant_114148" -> "183 /features/features.15/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; -"343 Constant_114147" -> "183 /features/features.15/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; -"344 Constant_114146" -> "183 /features/features.15/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; +"341 Constant_41548" -> "183 /features/features.15/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; +"342 Constant_41547" -> "183 /features/features.15/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; +"343 Constant_41546" -> "183 /features/features.15/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; +"344 Constant_41545" -> "183 /features/features.15/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 960, 1, 1]", style=solid]; "345 Reshape_1541" -> "177 /features/features.15/conv/conv.0/conv.0.0/Conv" [label="[1, 960, 1, 1]", style=solid]; "346 /features/features.15/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" -> "175 /features/features.15/conv/conv.0/conv.0.0/Conv/WithoutBiases" [label="[960, 160, 1, 1]", style=solid]; -"347 Constant_114144" -> "346 /features/features.15/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; -"348 Constant_114143" -> "346 /features/features.15/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; -"349 Constant_114142" -> "346 /features/features.15/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; -"350 Constant_114141" -> "346 /features/features.15/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; +"347 Constant_41543" -> "346 /features/features.15/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; +"348 Constant_41542" -> "346 /features/features.15/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; +"349 Constant_41541" -> "346 /features/features.15/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; +"350 Constant_41540" -> "346 /features/features.15/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 1, 1, 1]", style=solid]; "351 onnx^^Conv_664" -> "346 /features/features.15/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[960, 160, 1, 1]", style=solid]; -"352 Constant_114139" -> "173 /features/features.14/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"353 Constant_114138" -> "173 /features/features.14/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"354 Constant_114137" -> "173 /features/features.14/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"355 Constant_114136" -> "173 /features/features.14/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"352 Constant_41538" -> "173 /features/features.14/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"353 Constant_41537" -> "173 /features/features.14/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"354 Constant_41536" -> "173 /features/features.14/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"355 Constant_41535" -> "173 /features/features.14/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; "356 Reshape_1526" -> "172 /features/features.14/conv/conv.2/Conv" [label="[1, 160, 1, 1]", style=solid]; "357 /features/features.14/conv/conv.2/Conv/WithoutBiases/fq_weights_1" -> "170 /features/features.14/conv/conv.2/Conv/WithoutBiases" [label="[160, 576, 1, 1]", style=solid]; -"358 Constant_114134" -> "357 /features/features.14/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; -"359 Constant_114133" -> "357 /features/features.14/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; -"360 Constant_114132" -> "357 /features/features.14/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; -"361 Constant_114131" -> "357 /features/features.14/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; +"358 Constant_41533" -> "357 /features/features.14/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; +"359 Constant_41532" -> "357 /features/features.14/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; +"360 Constant_41531" -> "357 /features/features.14/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; +"361 Constant_41530" -> "357 /features/features.14/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 1, 1, 1]", style=solid]; "362 onnx^^Conv_661" -> "357 /features/features.14/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[160, 576, 1, 1]", style=solid]; -"363 Constant_114129" -> "168 /features/features.14/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"364 Constant_114128" -> "168 /features/features.14/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"365 Constant_114127" -> "168 /features/features.14/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"366 Constant_114126" -> "168 /features/features.14/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"363 Constant_41528" -> "168 /features/features.14/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"364 Constant_41527" -> "168 /features/features.14/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"365 Constant_41526" -> "168 /features/features.14/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"366 Constant_41525" -> "168 /features/features.14/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; "367 Reshape_1507" -> "162 /features/features.14/conv/conv.1/conv.1.0/Conv" [label="[1, 576, 1, 1]", style=solid]; "368 /features/features.14/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" -> "159 /features/features.14/conv/conv.1/conv.1.0/Conv/WithoutBiases" [label="[576, 1, 1, 3, 3]", style=solid]; -"369 Constant_114124" -> "368 /features/features.14/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; -"370 Constant_114123" -> "368 /features/features.14/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; -"371 Constant_114122" -> "368 /features/features.14/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; -"372 Constant_114121" -> "368 /features/features.14/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"369 Constant_41523" -> "368 /features/features.14/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"370 Constant_41522" -> "368 /features/features.14/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"371 Constant_41521" -> "368 /features/features.14/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"372 Constant_41520" -> "368 /features/features.14/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; "373 Reshape_1455" -> "368 /features/features.14/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 3, 3]", style=solid]; -"374 Constant_114119" -> "156 /features/features.14/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"375 Constant_114118" -> "156 /features/features.14/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"376 Constant_114117" -> "156 /features/features.14/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"377 Constant_114116" -> "156 /features/features.14/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"374 Constant_41518" -> "156 /features/features.14/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"375 Constant_41517" -> "156 /features/features.14/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"376 Constant_41516" -> "156 /features/features.14/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"377 Constant_41515" -> "156 /features/features.14/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; "378 Reshape_1440" -> "150 /features/features.14/conv/conv.0/conv.0.0/Conv" [label="[1, 576, 1, 1]", style=solid]; "379 /features/features.14/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" -> "147 /features/features.14/conv/conv.0/conv.0.0/Conv/WithoutBiases" [label="[576, 96, 1, 1]", style=solid]; -"380 Constant_114114" -> "379 /features/features.14/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"381 Constant_114113" -> "379 /features/features.14/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"382 Constant_114112" -> "379 /features/features.14/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"383 Constant_114111" -> "379 /features/features.14/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"380 Constant_41513" -> "379 /features/features.14/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"381 Constant_41512" -> "379 /features/features.14/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"382 Constant_41511" -> "379 /features/features.14/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"383 Constant_41510" -> "379 /features/features.14/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; "384 onnx^^Conv_655" -> "379 /features/features.14/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 96, 1, 1]", style=solid]; -"385 Constant_114109" -> "144 /features/features.13/Add/fq_output_0" [label="[]", style=solid]; -"386 Constant_114108" -> "144 /features/features.13/Add/fq_output_0" [label="[]", style=solid]; -"387 Constant_114107" -> "144 /features/features.13/Add/fq_output_0" [label="[]", style=solid]; -"388 Constant_114106" -> "144 /features/features.13/Add/fq_output_0" [label="[]", style=solid]; -"389 Constant_114104" -> "171 /features/features.13/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"390 Constant_114103" -> "171 /features/features.13/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"391 Constant_114102" -> "171 /features/features.13/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"392 Constant_114101" -> "171 /features/features.13/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"385 Constant_41508" -> "144 /features/features.13/Add/fq_output_0" [label="[]", style=solid]; +"386 Constant_41507" -> "144 /features/features.13/Add/fq_output_0" [label="[]", style=solid]; +"387 Constant_41506" -> "144 /features/features.13/Add/fq_output_0" [label="[]", style=solid]; +"388 Constant_41505" -> "144 /features/features.13/Add/fq_output_0" [label="[]", style=solid]; +"389 Constant_41503" -> "171 /features/features.13/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"390 Constant_41502" -> "171 /features/features.13/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"391 Constant_41501" -> "171 /features/features.13/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"392 Constant_41500" -> "171 /features/features.13/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; "393 Reshape_1424" -> "169 /features/features.13/conv/conv.2/Conv" [label="[1, 96, 1, 1]", style=solid]; "394 /features/features.13/conv/conv.2/Conv/WithoutBiases/fq_weights_1" -> "166 /features/features.13/conv/conv.2/Conv/WithoutBiases" [label="[96, 576, 1, 1]", style=solid]; -"395 Constant_114099" -> "394 /features/features.13/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"396 Constant_114098" -> "394 /features/features.13/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"397 Constant_114097" -> "394 /features/features.13/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"398 Constant_114096" -> "394 /features/features.13/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"395 Constant_41498" -> "394 /features/features.13/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"396 Constant_41497" -> "394 /features/features.13/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"397 Constant_41496" -> "394 /features/features.13/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"398 Constant_41495" -> "394 /features/features.13/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; "399 onnx^^Conv_652" -> "394 /features/features.13/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 576, 1, 1]", style=solid]; -"400 Constant_114094" -> "163 /features/features.13/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"401 Constant_114093" -> "163 /features/features.13/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"402 Constant_114092" -> "163 /features/features.13/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"403 Constant_114091" -> "163 /features/features.13/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"400 Constant_41493" -> "163 /features/features.13/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"401 Constant_41492" -> "163 /features/features.13/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"402 Constant_41491" -> "163 /features/features.13/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"403 Constant_41490" -> "163 /features/features.13/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; "404 Reshape_1405" -> "157 /features/features.13/conv/conv.1/conv.1.0/Conv" [label="[1, 576, 1, 1]", style=solid]; "405 /features/features.13/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" -> "154 /features/features.13/conv/conv.1/conv.1.0/Conv/WithoutBiases" [label="[576, 1, 1, 3, 3]", style=solid]; -"406 Constant_114089" -> "405 /features/features.13/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; -"407 Constant_114088" -> "405 /features/features.13/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; -"408 Constant_114087" -> "405 /features/features.13/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; -"409 Constant_114086" -> "405 /features/features.13/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"406 Constant_41488" -> "405 /features/features.13/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"407 Constant_41487" -> "405 /features/features.13/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"408 Constant_41486" -> "405 /features/features.13/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"409 Constant_41485" -> "405 /features/features.13/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; "410 Reshape_1353" -> "405 /features/features.13/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 3, 3]", style=solid]; -"411 Constant_114084" -> "151 /features/features.13/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"412 Constant_114083" -> "151 /features/features.13/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"413 Constant_114082" -> "151 /features/features.13/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"414 Constant_114081" -> "151 /features/features.13/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"411 Constant_41483" -> "151 /features/features.13/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"412 Constant_41482" -> "151 /features/features.13/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"413 Constant_41481" -> "151 /features/features.13/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"414 Constant_41480" -> "151 /features/features.13/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; "415 Reshape_1338" -> "145 /features/features.13/conv/conv.0/conv.0.0/Conv" [label="[1, 576, 1, 1]", style=solid]; "416 /features/features.13/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" -> "142 /features/features.13/conv/conv.0/conv.0.0/Conv/WithoutBiases" [label="[576, 96, 1, 1]", style=solid]; -"417 Constant_114079" -> "416 /features/features.13/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"418 Constant_114078" -> "416 /features/features.13/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"419 Constant_114077" -> "416 /features/features.13/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"420 Constant_114076" -> "416 /features/features.13/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"417 Constant_41478" -> "416 /features/features.13/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"418 Constant_41477" -> "416 /features/features.13/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"419 Constant_41476" -> "416 /features/features.13/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"420 Constant_41475" -> "416 /features/features.13/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; "421 onnx^^Conv_646" -> "416 /features/features.13/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 96, 1, 1]", style=solid]; -"422 Constant_114074" -> "139 /features/features.12/Add/fq_output_0" [label="[]", style=solid]; -"423 Constant_114073" -> "139 /features/features.12/Add/fq_output_0" [label="[]", style=solid]; -"424 Constant_114072" -> "139 /features/features.12/Add/fq_output_0" [label="[]", style=solid]; -"425 Constant_114071" -> "139 /features/features.12/Add/fq_output_0" [label="[]", style=solid]; -"426 Constant_114069" -> "167 /features/features.12/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"427 Constant_114068" -> "167 /features/features.12/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"428 Constant_114067" -> "167 /features/features.12/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"429 Constant_114066" -> "167 /features/features.12/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"422 Constant_41473" -> "139 /features/features.12/Add/fq_output_0" [label="[]", style=solid]; +"423 Constant_41472" -> "139 /features/features.12/Add/fq_output_0" [label="[]", style=solid]; +"424 Constant_41471" -> "139 /features/features.12/Add/fq_output_0" [label="[]", style=solid]; +"425 Constant_41470" -> "139 /features/features.12/Add/fq_output_0" [label="[]", style=solid]; +"426 Constant_41468" -> "167 /features/features.12/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"427 Constant_41467" -> "167 /features/features.12/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"428 Constant_41466" -> "167 /features/features.12/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"429 Constant_41465" -> "167 /features/features.12/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; "430 Reshape_1322" -> "164 /features/features.12/conv/conv.2/Conv" [label="[1, 96, 1, 1]", style=solid]; "431 /features/features.12/conv/conv.2/Conv/WithoutBiases/fq_weights_1" -> "161 /features/features.12/conv/conv.2/Conv/WithoutBiases" [label="[96, 576, 1, 1]", style=solid]; -"432 Constant_114064" -> "431 /features/features.12/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"433 Constant_114063" -> "431 /features/features.12/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"434 Constant_114062" -> "431 /features/features.12/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"435 Constant_114061" -> "431 /features/features.12/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"432 Constant_41463" -> "431 /features/features.12/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"433 Constant_41462" -> "431 /features/features.12/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"434 Constant_41461" -> "431 /features/features.12/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"435 Constant_41460" -> "431 /features/features.12/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; "436 onnx^^Conv_643" -> "431 /features/features.12/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 576, 1, 1]", style=solid]; -"437 Constant_114059" -> "158 /features/features.12/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"438 Constant_114058" -> "158 /features/features.12/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"439 Constant_114057" -> "158 /features/features.12/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"440 Constant_114056" -> "158 /features/features.12/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"437 Constant_41458" -> "158 /features/features.12/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"438 Constant_41457" -> "158 /features/features.12/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"439 Constant_41456" -> "158 /features/features.12/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"440 Constant_41455" -> "158 /features/features.12/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; "441 Reshape_1303" -> "152 /features/features.12/conv/conv.1/conv.1.0/Conv" [label="[1, 576, 1, 1]", style=solid]; "442 /features/features.12/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" -> "149 /features/features.12/conv/conv.1/conv.1.0/Conv/WithoutBiases" [label="[576, 1, 1, 3, 3]", style=solid]; -"443 Constant_114054" -> "442 /features/features.12/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; -"444 Constant_114053" -> "442 /features/features.12/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; -"445 Constant_114052" -> "442 /features/features.12/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; -"446 Constant_114051" -> "442 /features/features.12/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"443 Constant_41453" -> "442 /features/features.12/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"444 Constant_41452" -> "442 /features/features.12/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"445 Constant_41451" -> "442 /features/features.12/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"446 Constant_41450" -> "442 /features/features.12/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; "447 Reshape_1251" -> "442 /features/features.12/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 3, 3]", style=solid]; -"448 Constant_114049" -> "146 /features/features.12/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"449 Constant_114048" -> "146 /features/features.12/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"450 Constant_114047" -> "146 /features/features.12/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"451 Constant_114046" -> "146 /features/features.12/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"448 Constant_41448" -> "146 /features/features.12/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"449 Constant_41447" -> "146 /features/features.12/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"450 Constant_41446" -> "146 /features/features.12/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"451 Constant_41445" -> "146 /features/features.12/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; "452 Reshape_1236" -> "140 /features/features.12/conv/conv.0/conv.0.0/Conv" [label="[1, 576, 1, 1]", style=solid]; "453 /features/features.12/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" -> "138 /features/features.12/conv/conv.0/conv.0.0/Conv/WithoutBiases" [label="[576, 96, 1, 1]", style=solid]; -"454 Constant_114044" -> "453 /features/features.12/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"455 Constant_114043" -> "453 /features/features.12/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"456 Constant_114042" -> "453 /features/features.12/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"457 Constant_114041" -> "453 /features/features.12/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"454 Constant_41443" -> "453 /features/features.12/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"455 Constant_41442" -> "453 /features/features.12/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"456 Constant_41441" -> "453 /features/features.12/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"457 Constant_41440" -> "453 /features/features.12/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; "458 onnx^^Conv_637" -> "453 /features/features.12/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[576, 96, 1, 1]", style=solid]; -"459 Constant_114039" -> "136 /features/features.11/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"460 Constant_114038" -> "136 /features/features.11/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"461 Constant_114037" -> "136 /features/features.11/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"462 Constant_114036" -> "136 /features/features.11/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"459 Constant_41438" -> "136 /features/features.11/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"460 Constant_41437" -> "136 /features/features.11/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"461 Constant_41436" -> "136 /features/features.11/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"462 Constant_41435" -> "136 /features/features.11/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; "463 Reshape_1221" -> "135 /features/features.11/conv/conv.2/Conv" [label="[1, 96, 1, 1]", style=solid]; "464 /features/features.11/conv/conv.2/Conv/WithoutBiases/fq_weights_1" -> "133 /features/features.11/conv/conv.2/Conv/WithoutBiases" [label="[96, 384, 1, 1]", style=solid]; -"465 Constant_114034" -> "464 /features/features.11/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"466 Constant_114033" -> "464 /features/features.11/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"467 Constant_114032" -> "464 /features/features.11/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"468 Constant_114031" -> "464 /features/features.11/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"465 Constant_41433" -> "464 /features/features.11/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"466 Constant_41432" -> "464 /features/features.11/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"467 Constant_41431" -> "464 /features/features.11/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"468 Constant_41430" -> "464 /features/features.11/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; "469 onnx^^Conv_634" -> "464 /features/features.11/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[96, 384, 1, 1]", style=solid]; -"470 Constant_114029" -> "131 /features/features.11/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"471 Constant_114028" -> "131 /features/features.11/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"472 Constant_114027" -> "131 /features/features.11/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"473 Constant_114026" -> "131 /features/features.11/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"470 Constant_41428" -> "131 /features/features.11/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"471 Constant_41427" -> "131 /features/features.11/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"472 Constant_41426" -> "131 /features/features.11/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"473 Constant_41425" -> "131 /features/features.11/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; "474 Reshape_1202" -> "125 /features/features.11/conv/conv.1/conv.1.0/Conv" [label="[1, 384, 1, 1]", style=solid]; "475 /features/features.11/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" -> "121 /features/features.11/conv/conv.1/conv.1.0/Conv/WithoutBiases" [label="[384, 1, 1, 3, 3]", style=solid]; -"476 Constant_114024" -> "475 /features/features.11/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; -"477 Constant_114023" -> "475 /features/features.11/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; -"478 Constant_114022" -> "475 /features/features.11/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; -"479 Constant_114021" -> "475 /features/features.11/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; +"476 Constant_41423" -> "475 /features/features.11/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; +"477 Constant_41422" -> "475 /features/features.11/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; +"478 Constant_41421" -> "475 /features/features.11/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; +"479 Constant_41420" -> "475 /features/features.11/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; "480 Reshape_1150" -> "475 /features/features.11/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 3, 3]", style=solid]; -"481 Constant_114019" -> "117 /features/features.11/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; -"482 Constant_114018" -> "117 /features/features.11/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; -"483 Constant_114017" -> "117 /features/features.11/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; -"484 Constant_114016" -> "117 /features/features.11/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; +"481 Constant_41418" -> "117 /features/features.11/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; +"482 Constant_41417" -> "117 /features/features.11/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; +"483 Constant_41416" -> "117 /features/features.11/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; +"484 Constant_41415" -> "117 /features/features.11/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; "485 Reshape_1135" -> "109 /features/features.11/conv/conv.0/conv.0.0/Conv" [label="[1, 384, 1, 1]", style=solid]; "486 /features/features.11/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" -> "105 /features/features.11/conv/conv.0/conv.0.0/Conv/WithoutBiases" [label="[384, 64, 1, 1]", style=solid]; -"487 Constant_114014" -> "486 /features/features.11/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; -"488 Constant_114013" -> "486 /features/features.11/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; -"489 Constant_114012" -> "486 /features/features.11/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; -"490 Constant_114011" -> "486 /features/features.11/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; +"487 Constant_41413" -> "486 /features/features.11/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; +"488 Constant_41412" -> "486 /features/features.11/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; +"489 Constant_41411" -> "486 /features/features.11/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; +"490 Constant_41410" -> "486 /features/features.11/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; "491 onnx^^Conv_628" -> "486 /features/features.11/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 64, 1, 1]", style=solid]; -"492 Constant_114009" -> "101 /features/features.10/Add/fq_output_0" [label="[]", style=solid]; -"493 Constant_114008" -> "101 /features/features.10/Add/fq_output_0" [label="[]", style=solid]; -"494 Constant_114007" -> "101 /features/features.10/Add/fq_output_0" [label="[]", style=solid]; -"495 Constant_114006" -> "101 /features/features.10/Add/fq_output_0" [label="[]", style=solid]; -"496 Constant_114004" -> "134 /features/features.10/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"497 Constant_114003" -> "134 /features/features.10/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"498 Constant_114002" -> "134 /features/features.10/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"499 Constant_114001" -> "134 /features/features.10/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"492 Constant_41408" -> "101 /features/features.10/Add/fq_output_0" [label="[]", style=solid]; +"493 Constant_41407" -> "101 /features/features.10/Add/fq_output_0" [label="[]", style=solid]; +"494 Constant_41406" -> "101 /features/features.10/Add/fq_output_0" [label="[]", style=solid]; +"495 Constant_41405" -> "101 /features/features.10/Add/fq_output_0" [label="[]", style=solid]; +"496 Constant_41403" -> "134 /features/features.10/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"497 Constant_41402" -> "134 /features/features.10/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"498 Constant_41401" -> "134 /features/features.10/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"499 Constant_41400" -> "134 /features/features.10/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; "500 Reshape_1119" -> "132 /features/features.10/conv/conv.2/Conv" [label="[1, 64, 1, 1]", style=solid]; "501 /features/features.10/conv/conv.2/Conv/WithoutBiases/fq_weights_1" -> "129 /features/features.10/conv/conv.2/Conv/WithoutBiases" [label="[64, 384, 1, 1]", style=solid]; -"502 Constant_113999" -> "501 /features/features.10/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"503 Constant_113998" -> "501 /features/features.10/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"504 Constant_113997" -> "501 /features/features.10/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"505 Constant_113996" -> "501 /features/features.10/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"502 Constant_41398" -> "501 /features/features.10/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"503 Constant_41397" -> "501 /features/features.10/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"504 Constant_41396" -> "501 /features/features.10/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"505 Constant_41395" -> "501 /features/features.10/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; "506 onnx^^Conv_625" -> "501 /features/features.10/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 384, 1, 1]", style=solid]; -"507 Constant_113994" -> "126 /features/features.10/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"508 Constant_113993" -> "126 /features/features.10/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"509 Constant_113992" -> "126 /features/features.10/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"510 Constant_113991" -> "126 /features/features.10/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"507 Constant_41393" -> "126 /features/features.10/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"508 Constant_41392" -> "126 /features/features.10/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"509 Constant_41391" -> "126 /features/features.10/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"510 Constant_41390" -> "126 /features/features.10/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; "511 Reshape_1100" -> "118 /features/features.10/conv/conv.1/conv.1.0/Conv" [label="[1, 384, 1, 1]", style=solid]; "512 /features/features.10/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" -> "114 /features/features.10/conv/conv.1/conv.1.0/Conv/WithoutBiases" [label="[384, 1, 1, 3, 3]", style=solid]; -"513 Constant_113989" -> "512 /features/features.10/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; -"514 Constant_113988" -> "512 /features/features.10/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; -"515 Constant_113987" -> "512 /features/features.10/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; -"516 Constant_113986" -> "512 /features/features.10/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; +"513 Constant_41388" -> "512 /features/features.10/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; +"514 Constant_41387" -> "512 /features/features.10/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; +"515 Constant_41386" -> "512 /features/features.10/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; +"516 Constant_41385" -> "512 /features/features.10/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; "517 Reshape_1048" -> "512 /features/features.10/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 3, 3]", style=solid]; -"518 Constant_113984" -> "110 /features/features.10/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; -"519 Constant_113983" -> "110 /features/features.10/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; -"520 Constant_113982" -> "110 /features/features.10/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; -"521 Constant_113981" -> "110 /features/features.10/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; +"518 Constant_41383" -> "110 /features/features.10/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; +"519 Constant_41382" -> "110 /features/features.10/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; +"520 Constant_41381" -> "110 /features/features.10/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; +"521 Constant_41380" -> "110 /features/features.10/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; "522 Reshape_1033" -> "102 /features/features.10/conv/conv.0/conv.0.0/Conv" [label="[1, 384, 1, 1]", style=solid]; "523 /features/features.10/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" -> "98 /features/features.10/conv/conv.0/conv.0.0/Conv/WithoutBiases" [label="[384, 64, 1, 1]", style=solid]; -"524 Constant_113979" -> "523 /features/features.10/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; -"525 Constant_113978" -> "523 /features/features.10/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; -"526 Constant_113977" -> "523 /features/features.10/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; -"527 Constant_113976" -> "523 /features/features.10/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; +"524 Constant_41378" -> "523 /features/features.10/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; +"525 Constant_41377" -> "523 /features/features.10/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; +"526 Constant_41376" -> "523 /features/features.10/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; +"527 Constant_41375" -> "523 /features/features.10/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; "528 onnx^^Conv_619" -> "523 /features/features.10/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 64, 1, 1]", style=solid]; -"529 Constant_113974" -> "94 /features/features.9/Add/fq_output_0" [label="[]", style=solid]; -"530 Constant_113973" -> "94 /features/features.9/Add/fq_output_0" [label="[]", style=solid]; -"531 Constant_113972" -> "94 /features/features.9/Add/fq_output_0" [label="[]", style=solid]; -"532 Constant_113971" -> "94 /features/features.9/Add/fq_output_0" [label="[]", style=solid]; -"533 Constant_113969" -> "130 /features/features.9/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"534 Constant_113968" -> "130 /features/features.9/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"535 Constant_113967" -> "130 /features/features.9/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"536 Constant_113966" -> "130 /features/features.9/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"529 Constant_41373" -> "94 /features/features.9/Add/fq_output_0" [label="[]", style=solid]; +"530 Constant_41372" -> "94 /features/features.9/Add/fq_output_0" [label="[]", style=solid]; +"531 Constant_41371" -> "94 /features/features.9/Add/fq_output_0" [label="[]", style=solid]; +"532 Constant_41370" -> "94 /features/features.9/Add/fq_output_0" [label="[]", style=solid]; +"533 Constant_41368" -> "130 /features/features.9/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"534 Constant_41367" -> "130 /features/features.9/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"535 Constant_41366" -> "130 /features/features.9/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"536 Constant_41365" -> "130 /features/features.9/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; "537 Reshape_1017" -> "127 /features/features.9/conv/conv.2/Conv" [label="[1, 64, 1, 1]", style=solid]; "538 /features/features.9/conv/conv.2/Conv/WithoutBiases/fq_weights_1" -> "123 /features/features.9/conv/conv.2/Conv/WithoutBiases" [label="[64, 384, 1, 1]", style=solid]; -"539 Constant_113964" -> "538 /features/features.9/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"540 Constant_113963" -> "538 /features/features.9/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"541 Constant_113962" -> "538 /features/features.9/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"542 Constant_113961" -> "538 /features/features.9/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"539 Constant_41363" -> "538 /features/features.9/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"540 Constant_41362" -> "538 /features/features.9/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"541 Constant_41361" -> "538 /features/features.9/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"542 Constant_41360" -> "538 /features/features.9/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; "543 onnx^^Conv_616" -> "538 /features/features.9/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 384, 1, 1]", style=solid]; -"544 Constant_113959" -> "119 /features/features.9/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"545 Constant_113958" -> "119 /features/features.9/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"546 Constant_113957" -> "119 /features/features.9/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"547 Constant_113956" -> "119 /features/features.9/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"544 Constant_41358" -> "119 /features/features.9/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"545 Constant_41357" -> "119 /features/features.9/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"546 Constant_41356" -> "119 /features/features.9/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"547 Constant_41355" -> "119 /features/features.9/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; "548 Reshape_998" -> "111 /features/features.9/conv/conv.1/conv.1.0/Conv" [label="[1, 384, 1, 1]", style=solid]; "549 /features/features.9/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" -> "107 /features/features.9/conv/conv.1/conv.1.0/Conv/WithoutBiases" [label="[384, 1, 1, 3, 3]", style=solid]; -"550 Constant_113954" -> "549 /features/features.9/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; -"551 Constant_113953" -> "549 /features/features.9/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; -"552 Constant_113952" -> "549 /features/features.9/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; -"553 Constant_113951" -> "549 /features/features.9/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; +"550 Constant_41353" -> "549 /features/features.9/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; +"551 Constant_41352" -> "549 /features/features.9/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; +"552 Constant_41351" -> "549 /features/features.9/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; +"553 Constant_41350" -> "549 /features/features.9/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; "554 Reshape_946" -> "549 /features/features.9/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 3, 3]", style=solid]; -"555 Constant_113949" -> "103 /features/features.9/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; -"556 Constant_113948" -> "103 /features/features.9/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; -"557 Constant_113947" -> "103 /features/features.9/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; -"558 Constant_113946" -> "103 /features/features.9/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; +"555 Constant_41348" -> "103 /features/features.9/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; +"556 Constant_41347" -> "103 /features/features.9/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; +"557 Constant_41346" -> "103 /features/features.9/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; +"558 Constant_41345" -> "103 /features/features.9/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; "559 Reshape_931" -> "95 /features/features.9/conv/conv.0/conv.0.0/Conv" [label="[1, 384, 1, 1]", style=solid]; "560 /features/features.9/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" -> "92 /features/features.9/conv/conv.0/conv.0.0/Conv/WithoutBiases" [label="[384, 64, 1, 1]", style=solid]; -"561 Constant_113944" -> "560 /features/features.9/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; -"562 Constant_113943" -> "560 /features/features.9/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; -"563 Constant_113942" -> "560 /features/features.9/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; -"564 Constant_113941" -> "560 /features/features.9/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; +"561 Constant_41343" -> "560 /features/features.9/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; +"562 Constant_41342" -> "560 /features/features.9/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; +"563 Constant_41341" -> "560 /features/features.9/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; +"564 Constant_41340" -> "560 /features/features.9/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; "565 onnx^^Conv_610" -> "560 /features/features.9/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 64, 1, 1]", style=solid]; -"566 Constant_113939" -> "89 /features/features.8/Add/fq_output_0" [label="[]", style=solid]; -"567 Constant_113938" -> "89 /features/features.8/Add/fq_output_0" [label="[]", style=solid]; -"568 Constant_113937" -> "89 /features/features.8/Add/fq_output_0" [label="[]", style=solid]; -"569 Constant_113936" -> "89 /features/features.8/Add/fq_output_0" [label="[]", style=solid]; -"570 Constant_113934" -> "124 /features/features.8/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"571 Constant_113933" -> "124 /features/features.8/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"572 Constant_113932" -> "124 /features/features.8/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"573 Constant_113931" -> "124 /features/features.8/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"566 Constant_41338" -> "89 /features/features.8/Add/fq_output_0" [label="[]", style=solid]; +"567 Constant_41337" -> "89 /features/features.8/Add/fq_output_0" [label="[]", style=solid]; +"568 Constant_41336" -> "89 /features/features.8/Add/fq_output_0" [label="[]", style=solid]; +"569 Constant_41335" -> "89 /features/features.8/Add/fq_output_0" [label="[]", style=solid]; +"570 Constant_41333" -> "124 /features/features.8/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"571 Constant_41332" -> "124 /features/features.8/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"572 Constant_41331" -> "124 /features/features.8/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"573 Constant_41330" -> "124 /features/features.8/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; "574 Reshape_915" -> "120 /features/features.8/conv/conv.2/Conv" [label="[1, 64, 1, 1]", style=solid]; "575 /features/features.8/conv/conv.2/Conv/WithoutBiases/fq_weights_1" -> "116 /features/features.8/conv/conv.2/Conv/WithoutBiases" [label="[64, 384, 1, 1]", style=solid]; -"576 Constant_113929" -> "575 /features/features.8/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"577 Constant_113928" -> "575 /features/features.8/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"578 Constant_113927" -> "575 /features/features.8/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"579 Constant_113926" -> "575 /features/features.8/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"576 Constant_41328" -> "575 /features/features.8/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"577 Constant_41327" -> "575 /features/features.8/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"578 Constant_41326" -> "575 /features/features.8/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"579 Constant_41325" -> "575 /features/features.8/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; "580 onnx^^Conv_607" -> "575 /features/features.8/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 384, 1, 1]", style=solid]; -"581 Constant_113924" -> "112 /features/features.8/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"582 Constant_113923" -> "112 /features/features.8/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"583 Constant_113922" -> "112 /features/features.8/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"584 Constant_113921" -> "112 /features/features.8/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"581 Constant_41323" -> "112 /features/features.8/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"582 Constant_41322" -> "112 /features/features.8/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"583 Constant_41321" -> "112 /features/features.8/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"584 Constant_41320" -> "112 /features/features.8/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; "585 Reshape_896" -> "104 /features/features.8/conv/conv.1/conv.1.0/Conv" [label="[1, 384, 1, 1]", style=solid]; "586 /features/features.8/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" -> "100 /features/features.8/conv/conv.1/conv.1.0/Conv/WithoutBiases" [label="[384, 1, 1, 3, 3]", style=solid]; -"587 Constant_113919" -> "586 /features/features.8/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; -"588 Constant_113918" -> "586 /features/features.8/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; -"589 Constant_113917" -> "586 /features/features.8/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; -"590 Constant_113916" -> "586 /features/features.8/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; +"587 Constant_41318" -> "586 /features/features.8/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; +"588 Constant_41317" -> "586 /features/features.8/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; +"589 Constant_41316" -> "586 /features/features.8/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; +"590 Constant_41315" -> "586 /features/features.8/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1, 1]", style=solid]; "591 Reshape_844" -> "586 /features/features.8/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 3, 3]", style=solid]; -"592 Constant_113914" -> "96 /features/features.8/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; -"593 Constant_113913" -> "96 /features/features.8/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; -"594 Constant_113912" -> "96 /features/features.8/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; -"595 Constant_113911" -> "96 /features/features.8/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; +"592 Constant_41313" -> "96 /features/features.8/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; +"593 Constant_41312" -> "96 /features/features.8/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; +"594 Constant_41311" -> "96 /features/features.8/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; +"595 Constant_41310" -> "96 /features/features.8/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 384, 1, 1]", style=solid]; "596 Reshape_829" -> "90 /features/features.8/conv/conv.0/conv.0.0/Conv" [label="[1, 384, 1, 1]", style=solid]; "597 /features/features.8/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" -> "88 /features/features.8/conv/conv.0/conv.0.0/Conv/WithoutBiases" [label="[384, 64, 1, 1]", style=solid]; -"598 Constant_113909" -> "597 /features/features.8/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; -"599 Constant_113908" -> "597 /features/features.8/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; -"600 Constant_113907" -> "597 /features/features.8/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; -"601 Constant_113906" -> "597 /features/features.8/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; +"598 Constant_41308" -> "597 /features/features.8/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; +"599 Constant_41307" -> "597 /features/features.8/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; +"600 Constant_41306" -> "597 /features/features.8/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; +"601 Constant_41305" -> "597 /features/features.8/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 1, 1, 1]", style=solid]; "602 onnx^^Conv_601" -> "597 /features/features.8/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[384, 64, 1, 1]", style=solid]; -"603 Constant_113904" -> "86 /features/features.7/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"604 Constant_113903" -> "86 /features/features.7/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"605 Constant_113902" -> "86 /features/features.7/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"606 Constant_113901" -> "86 /features/features.7/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"603 Constant_41303" -> "86 /features/features.7/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"604 Constant_41302" -> "86 /features/features.7/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"605 Constant_41301" -> "86 /features/features.7/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"606 Constant_41300" -> "86 /features/features.7/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; "607 Reshape_814" -> "85 /features/features.7/conv/conv.2/Conv" [label="[1, 64, 1, 1]", style=solid]; "608 /features/features.7/conv/conv.2/Conv/WithoutBiases/fq_weights_1" -> "83 /features/features.7/conv/conv.2/Conv/WithoutBiases" [label="[64, 192, 1, 1]", style=solid]; -"609 Constant_113899" -> "608 /features/features.7/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"610 Constant_113898" -> "608 /features/features.7/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"611 Constant_113897" -> "608 /features/features.7/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"612 Constant_113896" -> "608 /features/features.7/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"609 Constant_41298" -> "608 /features/features.7/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"610 Constant_41297" -> "608 /features/features.7/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"611 Constant_41296" -> "608 /features/features.7/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"612 Constant_41295" -> "608 /features/features.7/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; "613 onnx^^Conv_598" -> "608 /features/features.7/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[64, 192, 1, 1]", style=solid]; -"614 Constant_113894" -> "81 /features/features.7/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"615 Constant_113893" -> "81 /features/features.7/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"616 Constant_113892" -> "81 /features/features.7/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"617 Constant_113891" -> "81 /features/features.7/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"614 Constant_41293" -> "81 /features/features.7/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"615 Constant_41292" -> "81 /features/features.7/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"616 Constant_41291" -> "81 /features/features.7/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"617 Constant_41290" -> "81 /features/features.7/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; "618 Reshape_795" -> "75 /features/features.7/conv/conv.1/conv.1.0/Conv" [label="[1, 192, 1, 1]", style=solid]; "619 /features/features.7/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" -> "72 /features/features.7/conv/conv.1/conv.1.0/Conv/WithoutBiases" [label="[192, 1, 1, 3, 3]", style=solid]; -"620 Constant_113889" -> "619 /features/features.7/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; -"621 Constant_113888" -> "619 /features/features.7/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; -"622 Constant_113887" -> "619 /features/features.7/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; -"623 Constant_113886" -> "619 /features/features.7/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; +"620 Constant_41288" -> "619 /features/features.7/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; +"621 Constant_41287" -> "619 /features/features.7/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; +"622 Constant_41286" -> "619 /features/features.7/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; +"623 Constant_41285" -> "619 /features/features.7/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; "624 Reshape_743" -> "619 /features/features.7/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 3, 3]", style=solid]; -"625 Constant_113884" -> "69 /features/features.7/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; -"626 Constant_113883" -> "69 /features/features.7/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; -"627 Constant_113882" -> "69 /features/features.7/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; -"628 Constant_113881" -> "69 /features/features.7/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; +"625 Constant_41283" -> "69 /features/features.7/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; +"626 Constant_41282" -> "69 /features/features.7/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; +"627 Constant_41281" -> "69 /features/features.7/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; +"628 Constant_41280" -> "69 /features/features.7/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; "629 Reshape_728" -> "63 /features/features.7/conv/conv.0/conv.0.0/Conv" [label="[1, 192, 1, 1]", style=solid]; "630 /features/features.7/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" -> "60 /features/features.7/conv/conv.0/conv.0.0/Conv/WithoutBiases" [label="[192, 32, 1, 1]", style=solid]; -"631 Constant_113879" -> "630 /features/features.7/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; -"632 Constant_113878" -> "630 /features/features.7/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; -"633 Constant_113877" -> "630 /features/features.7/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; -"634 Constant_113876" -> "630 /features/features.7/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; +"631 Constant_41278" -> "630 /features/features.7/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; +"632 Constant_41277" -> "630 /features/features.7/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; +"633 Constant_41276" -> "630 /features/features.7/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; +"634 Constant_41275" -> "630 /features/features.7/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; "635 onnx^^Conv_592" -> "630 /features/features.7/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 32, 1, 1]", style=solid]; -"636 Constant_113874" -> "57 /features/features.6/Add/fq_output_0" [label="[]", style=solid]; -"637 Constant_113873" -> "57 /features/features.6/Add/fq_output_0" [label="[]", style=solid]; -"638 Constant_113872" -> "57 /features/features.6/Add/fq_output_0" [label="[]", style=solid]; -"639 Constant_113871" -> "57 /features/features.6/Add/fq_output_0" [label="[]", style=solid]; -"640 Constant_113869" -> "84 /features/features.6/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"641 Constant_113868" -> "84 /features/features.6/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"642 Constant_113867" -> "84 /features/features.6/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"643 Constant_113866" -> "84 /features/features.6/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"636 Constant_41273" -> "57 /features/features.6/Add/fq_output_0" [label="[]", style=solid]; +"637 Constant_41272" -> "57 /features/features.6/Add/fq_output_0" [label="[]", style=solid]; +"638 Constant_41271" -> "57 /features/features.6/Add/fq_output_0" [label="[]", style=solid]; +"639 Constant_41270" -> "57 /features/features.6/Add/fq_output_0" [label="[]", style=solid]; +"640 Constant_41268" -> "84 /features/features.6/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"641 Constant_41267" -> "84 /features/features.6/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"642 Constant_41266" -> "84 /features/features.6/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"643 Constant_41265" -> "84 /features/features.6/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; "644 Reshape_712" -> "82 /features/features.6/conv/conv.2/Conv" [label="[1, 32, 1, 1]", style=solid]; "645 /features/features.6/conv/conv.2/Conv/WithoutBiases/fq_weights_1" -> "79 /features/features.6/conv/conv.2/Conv/WithoutBiases" [label="[32, 192, 1, 1]", style=solid]; -"646 Constant_113864" -> "645 /features/features.6/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"647 Constant_113863" -> "645 /features/features.6/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"648 Constant_113862" -> "645 /features/features.6/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"649 Constant_113861" -> "645 /features/features.6/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"646 Constant_41263" -> "645 /features/features.6/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"647 Constant_41262" -> "645 /features/features.6/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"648 Constant_41261" -> "645 /features/features.6/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"649 Constant_41260" -> "645 /features/features.6/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; "650 onnx^^Conv_589" -> "645 /features/features.6/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 192, 1, 1]", style=solid]; -"651 Constant_113859" -> "76 /features/features.6/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"652 Constant_113858" -> "76 /features/features.6/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"653 Constant_113857" -> "76 /features/features.6/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"654 Constant_113856" -> "76 /features/features.6/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"651 Constant_41258" -> "76 /features/features.6/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"652 Constant_41257" -> "76 /features/features.6/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"653 Constant_41256" -> "76 /features/features.6/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"654 Constant_41255" -> "76 /features/features.6/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; "655 Reshape_693" -> "70 /features/features.6/conv/conv.1/conv.1.0/Conv" [label="[1, 192, 1, 1]", style=solid]; "656 /features/features.6/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" -> "67 /features/features.6/conv/conv.1/conv.1.0/Conv/WithoutBiases" [label="[192, 1, 1, 3, 3]", style=solid]; -"657 Constant_113854" -> "656 /features/features.6/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; -"658 Constant_113853" -> "656 /features/features.6/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; -"659 Constant_113852" -> "656 /features/features.6/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; -"660 Constant_113851" -> "656 /features/features.6/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; +"657 Constant_41253" -> "656 /features/features.6/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; +"658 Constant_41252" -> "656 /features/features.6/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; +"659 Constant_41251" -> "656 /features/features.6/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; +"660 Constant_41250" -> "656 /features/features.6/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; "661 Reshape_641" -> "656 /features/features.6/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 3, 3]", style=solid]; -"662 Constant_113849" -> "64 /features/features.6/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; -"663 Constant_113848" -> "64 /features/features.6/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; -"664 Constant_113847" -> "64 /features/features.6/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; -"665 Constant_113846" -> "64 /features/features.6/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; +"662 Constant_41248" -> "64 /features/features.6/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; +"663 Constant_41247" -> "64 /features/features.6/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; +"664 Constant_41246" -> "64 /features/features.6/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; +"665 Constant_41245" -> "64 /features/features.6/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; "666 Reshape_626" -> "58 /features/features.6/conv/conv.0/conv.0.0/Conv" [label="[1, 192, 1, 1]", style=solid]; "667 /features/features.6/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" -> "55 /features/features.6/conv/conv.0/conv.0.0/Conv/WithoutBiases" [label="[192, 32, 1, 1]", style=solid]; -"668 Constant_113844" -> "667 /features/features.6/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; -"669 Constant_113843" -> "667 /features/features.6/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; -"670 Constant_113842" -> "667 /features/features.6/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; -"671 Constant_113841" -> "667 /features/features.6/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; +"668 Constant_41243" -> "667 /features/features.6/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; +"669 Constant_41242" -> "667 /features/features.6/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; +"670 Constant_41241" -> "667 /features/features.6/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; +"671 Constant_41240" -> "667 /features/features.6/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; "672 onnx^^Conv_583" -> "667 /features/features.6/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 32, 1, 1]", style=solid]; -"673 Constant_113839" -> "52 /features/features.5/Add/fq_output_0" [label="[]", style=solid]; -"674 Constant_113838" -> "52 /features/features.5/Add/fq_output_0" [label="[]", style=solid]; -"675 Constant_113837" -> "52 /features/features.5/Add/fq_output_0" [label="[]", style=solid]; -"676 Constant_113836" -> "52 /features/features.5/Add/fq_output_0" [label="[]", style=solid]; -"677 Constant_113834" -> "80 /features/features.5/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"678 Constant_113833" -> "80 /features/features.5/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"679 Constant_113832" -> "80 /features/features.5/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"680 Constant_113831" -> "80 /features/features.5/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"673 Constant_41238" -> "52 /features/features.5/Add/fq_output_0" [label="[]", style=solid]; +"674 Constant_41237" -> "52 /features/features.5/Add/fq_output_0" [label="[]", style=solid]; +"675 Constant_41236" -> "52 /features/features.5/Add/fq_output_0" [label="[]", style=solid]; +"676 Constant_41235" -> "52 /features/features.5/Add/fq_output_0" [label="[]", style=solid]; +"677 Constant_41233" -> "80 /features/features.5/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"678 Constant_41232" -> "80 /features/features.5/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"679 Constant_41231" -> "80 /features/features.5/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"680 Constant_41230" -> "80 /features/features.5/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; "681 Reshape_610" -> "77 /features/features.5/conv/conv.2/Conv" [label="[1, 32, 1, 1]", style=solid]; "682 /features/features.5/conv/conv.2/Conv/WithoutBiases/fq_weights_1" -> "74 /features/features.5/conv/conv.2/Conv/WithoutBiases" [label="[32, 192, 1, 1]", style=solid]; -"683 Constant_113829" -> "682 /features/features.5/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"684 Constant_113828" -> "682 /features/features.5/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"685 Constant_113827" -> "682 /features/features.5/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"686 Constant_113826" -> "682 /features/features.5/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"683 Constant_41228" -> "682 /features/features.5/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"684 Constant_41227" -> "682 /features/features.5/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"685 Constant_41226" -> "682 /features/features.5/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"686 Constant_41225" -> "682 /features/features.5/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; "687 onnx^^Conv_580" -> "682 /features/features.5/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 192, 1, 1]", style=solid]; -"688 Constant_113824" -> "71 /features/features.5/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"689 Constant_113823" -> "71 /features/features.5/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"690 Constant_113822" -> "71 /features/features.5/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"691 Constant_113821" -> "71 /features/features.5/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"688 Constant_41223" -> "71 /features/features.5/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"689 Constant_41222" -> "71 /features/features.5/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"690 Constant_41221" -> "71 /features/features.5/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"691 Constant_41220" -> "71 /features/features.5/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; "692 Reshape_591" -> "65 /features/features.5/conv/conv.1/conv.1.0/Conv" [label="[1, 192, 1, 1]", style=solid]; "693 /features/features.5/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" -> "62 /features/features.5/conv/conv.1/conv.1.0/Conv/WithoutBiases" [label="[192, 1, 1, 3, 3]", style=solid]; -"694 Constant_113819" -> "693 /features/features.5/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; -"695 Constant_113818" -> "693 /features/features.5/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; -"696 Constant_113817" -> "693 /features/features.5/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; -"697 Constant_113816" -> "693 /features/features.5/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; +"694 Constant_41218" -> "693 /features/features.5/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; +"695 Constant_41217" -> "693 /features/features.5/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; +"696 Constant_41216" -> "693 /features/features.5/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; +"697 Constant_41215" -> "693 /features/features.5/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1, 1]", style=solid]; "698 Reshape_539" -> "693 /features/features.5/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 3, 3]", style=solid]; -"699 Constant_113814" -> "59 /features/features.5/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; -"700 Constant_113813" -> "59 /features/features.5/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; -"701 Constant_113812" -> "59 /features/features.5/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; -"702 Constant_113811" -> "59 /features/features.5/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; +"699 Constant_41213" -> "59 /features/features.5/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; +"700 Constant_41212" -> "59 /features/features.5/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; +"701 Constant_41211" -> "59 /features/features.5/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; +"702 Constant_41210" -> "59 /features/features.5/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 192, 1, 1]", style=solid]; "703 Reshape_524" -> "53 /features/features.5/conv/conv.0/conv.0.0/Conv" [label="[1, 192, 1, 1]", style=solid]; "704 /features/features.5/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" -> "51 /features/features.5/conv/conv.0/conv.0.0/Conv/WithoutBiases" [label="[192, 32, 1, 1]", style=solid]; -"705 Constant_113809" -> "704 /features/features.5/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; -"706 Constant_113808" -> "704 /features/features.5/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; -"707 Constant_113807" -> "704 /features/features.5/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; -"708 Constant_113806" -> "704 /features/features.5/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; +"705 Constant_41208" -> "704 /features/features.5/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; +"706 Constant_41207" -> "704 /features/features.5/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; +"707 Constant_41206" -> "704 /features/features.5/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; +"708 Constant_41205" -> "704 /features/features.5/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 1, 1, 1]", style=solid]; "709 onnx^^Conv_574" -> "704 /features/features.5/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[192, 32, 1, 1]", style=solid]; -"710 Constant_113804" -> "49 /features/features.4/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"711 Constant_113803" -> "49 /features/features.4/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"712 Constant_113802" -> "49 /features/features.4/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"713 Constant_113801" -> "49 /features/features.4/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"710 Constant_41203" -> "49 /features/features.4/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"711 Constant_41202" -> "49 /features/features.4/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"712 Constant_41201" -> "49 /features/features.4/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"713 Constant_41200" -> "49 /features/features.4/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; "714 Reshape_509" -> "48 /features/features.4/conv/conv.2/Conv" [label="[1, 32, 1, 1]", style=solid]; "715 /features/features.4/conv/conv.2/Conv/WithoutBiases/fq_weights_1" -> "46 /features/features.4/conv/conv.2/Conv/WithoutBiases" [label="[32, 144, 1, 1]", style=solid]; -"716 Constant_113799" -> "715 /features/features.4/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"717 Constant_113798" -> "715 /features/features.4/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"718 Constant_113797" -> "715 /features/features.4/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"719 Constant_113796" -> "715 /features/features.4/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"716 Constant_41198" -> "715 /features/features.4/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"717 Constant_41197" -> "715 /features/features.4/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"718 Constant_41196" -> "715 /features/features.4/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"719 Constant_41195" -> "715 /features/features.4/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; "720 onnx^^Conv_571" -> "715 /features/features.4/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[32, 144, 1, 1]", style=solid]; -"721 Constant_113794" -> "44 /features/features.4/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"722 Constant_113793" -> "44 /features/features.4/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"723 Constant_113792" -> "44 /features/features.4/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"724 Constant_113791" -> "44 /features/features.4/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"721 Constant_41193" -> "44 /features/features.4/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"722 Constant_41192" -> "44 /features/features.4/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"723 Constant_41191" -> "44 /features/features.4/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"724 Constant_41190" -> "44 /features/features.4/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; "725 Reshape_490" -> "40 /features/features.4/conv/conv.1/conv.1.0/Conv" [label="[1, 144, 1, 1]", style=solid]; "726 /features/features.4/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" -> "38 /features/features.4/conv/conv.1/conv.1.0/Conv/WithoutBiases" [label="[144, 1, 1, 3, 3]", style=solid]; -"727 Constant_113789" -> "726 /features/features.4/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; -"728 Constant_113788" -> "726 /features/features.4/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; -"729 Constant_113787" -> "726 /features/features.4/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; -"730 Constant_113786" -> "726 /features/features.4/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; +"727 Constant_41188" -> "726 /features/features.4/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; +"728 Constant_41187" -> "726 /features/features.4/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; +"729 Constant_41186" -> "726 /features/features.4/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; +"730 Constant_41185" -> "726 /features/features.4/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; "731 Reshape_438" -> "726 /features/features.4/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 3, 3]", style=solid]; -"732 Constant_113784" -> "36 /features/features.4/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; -"733 Constant_113783" -> "36 /features/features.4/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; -"734 Constant_113782" -> "36 /features/features.4/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; -"735 Constant_113781" -> "36 /features/features.4/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; +"732 Constant_41183" -> "36 /features/features.4/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; +"733 Constant_41182" -> "36 /features/features.4/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; +"734 Constant_41181" -> "36 /features/features.4/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; +"735 Constant_41180" -> "36 /features/features.4/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; "736 Reshape_423" -> "32 /features/features.4/conv/conv.0/conv.0.0/Conv" [label="[1, 144, 1, 1]", style=solid]; "737 /features/features.4/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" -> "30 /features/features.4/conv/conv.0/conv.0.0/Conv/WithoutBiases" [label="[144, 24, 1, 1]", style=solid]; -"738 Constant_113779" -> "737 /features/features.4/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"739 Constant_113778" -> "737 /features/features.4/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"740 Constant_113777" -> "737 /features/features.4/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"741 Constant_113776" -> "737 /features/features.4/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"738 Constant_41178" -> "737 /features/features.4/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"739 Constant_41177" -> "737 /features/features.4/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"740 Constant_41176" -> "737 /features/features.4/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"741 Constant_41175" -> "737 /features/features.4/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; "742 onnx^^Conv_565" -> "737 /features/features.4/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 24, 1, 1]", style=solid]; -"743 Constant_113774" -> "28 /features/features.3/Add/fq_output_0" [label="[]", style=solid]; -"744 Constant_113773" -> "28 /features/features.3/Add/fq_output_0" [label="[]", style=solid]; -"745 Constant_113772" -> "28 /features/features.3/Add/fq_output_0" [label="[]", style=solid]; -"746 Constant_113771" -> "28 /features/features.3/Add/fq_output_0" [label="[]", style=solid]; -"747 Constant_113769" -> "47 /features/features.3/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"748 Constant_113768" -> "47 /features/features.3/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"749 Constant_113767" -> "47 /features/features.3/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"750 Constant_113766" -> "47 /features/features.3/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"743 Constant_41173" -> "28 /features/features.3/Add/fq_output_0" [label="[]", style=solid]; +"744 Constant_41172" -> "28 /features/features.3/Add/fq_output_0" [label="[]", style=solid]; +"745 Constant_41171" -> "28 /features/features.3/Add/fq_output_0" [label="[]", style=solid]; +"746 Constant_41170" -> "28 /features/features.3/Add/fq_output_0" [label="[]", style=solid]; +"747 Constant_41168" -> "47 /features/features.3/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"748 Constant_41167" -> "47 /features/features.3/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"749 Constant_41166" -> "47 /features/features.3/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"750 Constant_41165" -> "47 /features/features.3/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; "751 Reshape_407" -> "45 /features/features.3/conv/conv.2/Conv" [label="[1, 24, 1, 1]", style=solid]; "752 /features/features.3/conv/conv.2/Conv/WithoutBiases/fq_weights_1" -> "43 /features/features.3/conv/conv.2/Conv/WithoutBiases" [label="[24, 144, 1, 1]", style=solid]; -"753 Constant_113764" -> "752 /features/features.3/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; -"754 Constant_113763" -> "752 /features/features.3/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; -"755 Constant_113762" -> "752 /features/features.3/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; -"756 Constant_113761" -> "752 /features/features.3/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"753 Constant_41163" -> "752 /features/features.3/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"754 Constant_41162" -> "752 /features/features.3/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"755 Constant_41161" -> "752 /features/features.3/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"756 Constant_41160" -> "752 /features/features.3/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; "757 onnx^^Conv_562" -> "752 /features/features.3/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[24, 144, 1, 1]", style=solid]; -"758 Constant_113759" -> "41 /features/features.3/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"759 Constant_113758" -> "41 /features/features.3/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"760 Constant_113757" -> "41 /features/features.3/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"761 Constant_113756" -> "41 /features/features.3/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"758 Constant_41158" -> "41 /features/features.3/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"759 Constant_41157" -> "41 /features/features.3/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"760 Constant_41156" -> "41 /features/features.3/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"761 Constant_41155" -> "41 /features/features.3/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; "762 Reshape_388" -> "37 /features/features.3/conv/conv.1/conv.1.0/Conv" [label="[1, 144, 1, 1]", style=solid]; "763 /features/features.3/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" -> "35 /features/features.3/conv/conv.1/conv.1.0/Conv/WithoutBiases" [label="[144, 1, 1, 3, 3]", style=solid]; -"764 Constant_113754" -> "763 /features/features.3/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; -"765 Constant_113753" -> "763 /features/features.3/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; -"766 Constant_113752" -> "763 /features/features.3/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; -"767 Constant_113751" -> "763 /features/features.3/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; +"764 Constant_41153" -> "763 /features/features.3/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; +"765 Constant_41152" -> "763 /features/features.3/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; +"766 Constant_41151" -> "763 /features/features.3/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; +"767 Constant_41150" -> "763 /features/features.3/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; "768 Reshape_336" -> "763 /features/features.3/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 3, 3]", style=solid]; -"769 Constant_113749" -> "33 /features/features.3/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; -"770 Constant_113748" -> "33 /features/features.3/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; -"771 Constant_113747" -> "33 /features/features.3/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; -"772 Constant_113746" -> "33 /features/features.3/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; +"769 Constant_41148" -> "33 /features/features.3/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; +"770 Constant_41147" -> "33 /features/features.3/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; +"771 Constant_41146" -> "33 /features/features.3/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; +"772 Constant_41145" -> "33 /features/features.3/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; "773 Reshape_321" -> "29 /features/features.3/conv/conv.0/conv.0.0/Conv" [label="[1, 144, 1, 1]", style=solid]; "774 /features/features.3/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" -> "27 /features/features.3/conv/conv.0/conv.0.0/Conv/WithoutBiases" [label="[144, 24, 1, 1]", style=solid]; -"775 Constant_113744" -> "774 /features/features.3/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"776 Constant_113743" -> "774 /features/features.3/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"777 Constant_113742" -> "774 /features/features.3/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"778 Constant_113741" -> "774 /features/features.3/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"775 Constant_41143" -> "774 /features/features.3/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"776 Constant_41142" -> "774 /features/features.3/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"777 Constant_41141" -> "774 /features/features.3/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"778 Constant_41140" -> "774 /features/features.3/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; "779 onnx^^Conv_556" -> "774 /features/features.3/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[144, 24, 1, 1]", style=solid]; -"780 Constant_113739" -> "25 /features/features.2/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"781 Constant_113738" -> "25 /features/features.2/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"782 Constant_113737" -> "25 /features/features.2/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; -"783 Constant_113736" -> "25 /features/features.2/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"780 Constant_41138" -> "25 /features/features.2/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"781 Constant_41137" -> "25 /features/features.2/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"782 Constant_41136" -> "25 /features/features.2/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; +"783 Constant_41135" -> "25 /features/features.2/conv/conv.2/Conv/fq_output_0" [label="[]", style=solid]; "784 Reshape_306" -> "24 /features/features.2/conv/conv.2/Conv" [label="[1, 24, 1, 1]", style=solid]; "785 /features/features.2/conv/conv.2/Conv/WithoutBiases/fq_weights_1" -> "23 /features/features.2/conv/conv.2/Conv/WithoutBiases" [label="[24, 96, 1, 1]", style=solid]; -"786 Constant_113734" -> "785 /features/features.2/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; -"787 Constant_113733" -> "785 /features/features.2/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; -"788 Constant_113732" -> "785 /features/features.2/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; -"789 Constant_113731" -> "785 /features/features.2/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"786 Constant_41133" -> "785 /features/features.2/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"787 Constant_41132" -> "785 /features/features.2/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"788 Constant_41131" -> "785 /features/features.2/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"789 Constant_41130" -> "785 /features/features.2/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; "790 onnx^^Conv_553" -> "785 /features/features.2/conv/conv.2/Conv/WithoutBiases/fq_weights_1" [label="[24, 96, 1, 1]", style=solid]; -"791 Constant_113729" -> "22 /features/features.2/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"792 Constant_113728" -> "22 /features/features.2/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"793 Constant_113727" -> "22 /features/features.2/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; -"794 Constant_113726" -> "22 /features/features.2/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"791 Constant_41128" -> "22 /features/features.2/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"792 Constant_41127" -> "22 /features/features.2/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"793 Constant_41126" -> "22 /features/features.2/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; +"794 Constant_41125" -> "22 /features/features.2/conv/conv.1/conv.1.2/Clip/fq_output_0" [label="[]", style=solid]; "795 Reshape_287" -> "20 /features/features.2/conv/conv.1/conv.1.0/Conv" [label="[1, 96, 1, 1]", style=solid]; "796 /features/features.2/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" -> "19 /features/features.2/conv/conv.1/conv.1.0/Conv/WithoutBiases" [label="[96, 1, 1, 3, 3]", style=solid]; -"797 Constant_113724" -> "796 /features/features.2/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1, 1]", style=solid]; -"798 Constant_113723" -> "796 /features/features.2/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1, 1]", style=solid]; -"799 Constant_113722" -> "796 /features/features.2/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1, 1]", style=solid]; -"800 Constant_113721" -> "796 /features/features.2/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1, 1]", style=solid]; +"797 Constant_41123" -> "796 /features/features.2/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1, 1]", style=solid]; +"798 Constant_41122" -> "796 /features/features.2/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1, 1]", style=solid]; +"799 Constant_41121" -> "796 /features/features.2/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1, 1]", style=solid]; +"800 Constant_41120" -> "796 /features/features.2/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1, 1]", style=solid]; "801 Reshape_235" -> "796 /features/features.2/conv/conv.1/conv.1.0/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 3, 3]", style=solid]; -"802 Constant_113719" -> "18 /features/features.2/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; -"803 Constant_113718" -> "18 /features/features.2/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; -"804 Constant_113717" -> "18 /features/features.2/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; -"805 Constant_113716" -> "18 /features/features.2/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; +"802 Constant_41118" -> "18 /features/features.2/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; +"803 Constant_41117" -> "18 /features/features.2/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; +"804 Constant_41116" -> "18 /features/features.2/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; +"805 Constant_41115" -> "18 /features/features.2/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; "806 Reshape_220" -> "16 /features/features.2/conv/conv.0/conv.0.0/Conv" [label="[1, 96, 1, 1]", style=solid]; "807 /features/features.2/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" -> "15 /features/features.2/conv/conv.0/conv.0.0/Conv/WithoutBiases" [label="[96, 16, 1, 1]", style=solid]; -"808 Constant_113714" -> "807 /features/features.2/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"809 Constant_113713" -> "807 /features/features.2/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"810 Constant_113712" -> "807 /features/features.2/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"811 Constant_113711" -> "807 /features/features.2/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"808 Constant_41113" -> "807 /features/features.2/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"809 Constant_41112" -> "807 /features/features.2/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"810 Constant_41111" -> "807 /features/features.2/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"811 Constant_41110" -> "807 /features/features.2/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; "812 onnx^^Conv_547" -> "807 /features/features.2/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[96, 16, 1, 1]", style=solid]; -"813 Constant_113709" -> "14 /features/features.1/conv/conv.1/Conv/fq_output_0" [label="[]", style=solid]; -"814 Constant_113708" -> "14 /features/features.1/conv/conv.1/Conv/fq_output_0" [label="[]", style=solid]; -"815 Constant_113707" -> "14 /features/features.1/conv/conv.1/Conv/fq_output_0" [label="[]", style=solid]; -"816 Constant_113706" -> "14 /features/features.1/conv/conv.1/Conv/fq_output_0" [label="[]", style=solid]; +"813 Constant_41108" -> "14 /features/features.1/conv/conv.1/Conv/fq_output_0" [label="[]", style=solid]; +"814 Constant_41107" -> "14 /features/features.1/conv/conv.1/Conv/fq_output_0" [label="[]", style=solid]; +"815 Constant_41106" -> "14 /features/features.1/conv/conv.1/Conv/fq_output_0" [label="[]", style=solid]; +"816 Constant_41105" -> "14 /features/features.1/conv/conv.1/Conv/fq_output_0" [label="[]", style=solid]; "817 Reshape_205" -> "13 /features/features.1/conv/conv.1/Conv" [label="[1, 16, 1, 1]", style=solid]; "818 /features/features.1/conv/conv.1/Conv/WithoutBiases/fq_weights_1" -> "12 /features/features.1/conv/conv.1/Conv/WithoutBiases" [label="[16, 32, 1, 1]", style=solid]; -"819 Constant_113704" -> "818 /features/features.1/conv/conv.1/Conv/WithoutBiases/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; -"820 Constant_113703" -> "818 /features/features.1/conv/conv.1/Conv/WithoutBiases/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; -"821 Constant_113702" -> "818 /features/features.1/conv/conv.1/Conv/WithoutBiases/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; -"822 Constant_113701" -> "818 /features/features.1/conv/conv.1/Conv/WithoutBiases/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; +"819 Constant_41103" -> "818 /features/features.1/conv/conv.1/Conv/WithoutBiases/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; +"820 Constant_41102" -> "818 /features/features.1/conv/conv.1/Conv/WithoutBiases/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; +"821 Constant_41101" -> "818 /features/features.1/conv/conv.1/Conv/WithoutBiases/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; +"822 Constant_41100" -> "818 /features/features.1/conv/conv.1/Conv/WithoutBiases/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; "823 onnx^^Conv_544" -> "818 /features/features.1/conv/conv.1/Conv/WithoutBiases/fq_weights_1" [label="[16, 32, 1, 1]", style=solid]; -"824 Constant_113699" -> "11 /features/features.1/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[]", style=solid]; -"825 Constant_113698" -> "11 /features/features.1/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[]", style=solid]; -"826 Constant_113697" -> "11 /features/features.1/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[]", style=solid]; -"827 Constant_113696" -> "11 /features/features.1/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[]", style=solid]; +"824 Constant_41098" -> "11 /features/features.1/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[]", style=solid]; +"825 Constant_41097" -> "11 /features/features.1/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[]", style=solid]; +"826 Constant_41096" -> "11 /features/features.1/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[]", style=solid]; +"827 Constant_41095" -> "11 /features/features.1/conv/conv.0/conv.0.2/Clip/fq_output_0" [label="[]", style=solid]; "828 Reshape_186" -> "9 /features/features.1/conv/conv.0/conv.0.0/Conv" [label="[1, 32, 1, 1]", style=solid]; "829 /features/features.1/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" -> "8 /features/features.1/conv/conv.0/conv.0.0/Conv/WithoutBiases" [label="[32, 1, 1, 3, 3]", style=solid]; -"830 Constant_113694" -> "829 /features/features.1/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1, 1]", style=solid]; -"831 Constant_113693" -> "829 /features/features.1/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1, 1]", style=solid]; -"832 Constant_113692" -> "829 /features/features.1/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1, 1]", style=solid]; -"833 Constant_113691" -> "829 /features/features.1/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1, 1]", style=solid]; +"830 Constant_41093" -> "829 /features/features.1/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1, 1]", style=solid]; +"831 Constant_41092" -> "829 /features/features.1/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1, 1]", style=solid]; +"832 Constant_41091" -> "829 /features/features.1/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1, 1]", style=solid]; +"833 Constant_41090" -> "829 /features/features.1/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1, 1]", style=solid]; "834 Reshape_134" -> "829 /features/features.1/conv/conv.0/conv.0.0/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 3, 3]", style=solid]; -"835 Constant_113689" -> "7 /features/features.0/features.0.2/Clip/fq_output_0" [label="[1, 32, 1, 1]", style=solid]; -"836 Constant_113688" -> "7 /features/features.0/features.0.2/Clip/fq_output_0" [label="[1, 32, 1, 1]", style=solid]; -"837 Constant_113687" -> "7 /features/features.0/features.0.2/Clip/fq_output_0" [label="[1, 32, 1, 1]", style=solid]; -"838 Constant_113686" -> "7 /features/features.0/features.0.2/Clip/fq_output_0" [label="[1, 32, 1, 1]", style=solid]; +"835 Constant_41088" -> "7 /features/features.0/features.0.2/Clip/fq_output_0" [label="[1, 32, 1, 1]", style=solid]; +"836 Constant_41087" -> "7 /features/features.0/features.0.2/Clip/fq_output_0" [label="[1, 32, 1, 1]", style=solid]; +"837 Constant_41086" -> "7 /features/features.0/features.0.2/Clip/fq_output_0" [label="[1, 32, 1, 1]", style=solid]; +"838 Constant_41085" -> "7 /features/features.0/features.0.2/Clip/fq_output_0" [label="[1, 32, 1, 1]", style=solid]; "839 Reshape_119" -> "5 /features/features.0/features.0.0/Conv" [label="[1, 32, 1, 1]", style=solid]; "840 /features/features.0/features.0.0/Conv/WithoutBiases/fq_weights_1" -> "4 /features/features.0/features.0.0/Conv/WithoutBiases" [label="[32, 3, 3, 3]", style=solid]; -"841 Constant_113684" -> "840 /features/features.0/features.0.0/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"842 Constant_113683" -> "840 /features/features.0/features.0.0/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"843 Constant_113682" -> "840 /features/features.0/features.0.0/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"844 Constant_113681" -> "840 /features/features.0/features.0.0/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"841 Constant_41083" -> "840 /features/features.0/features.0.0/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"842 Constant_41082" -> "840 /features/features.0/features.0.0/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"843 Constant_41081" -> "840 /features/features.0/features.0.0/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"844 Constant_41080" -> "840 /features/features.0/features.0.0/Conv/WithoutBiases/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; "845 Gather_5168" -> "840 /features/features.0/features.0.0/Conv/WithoutBiases/fq_weights_1" [label="[32, 3, 3, 3]", style=solid]; -"846 Constant_113679" -> "3 Divide_1885/fq_output_0" [label="[]", style=solid]; -"847 Constant_113678" -> "3 Divide_1885/fq_output_0" [label="[]", style=solid]; -"848 Constant_113677" -> "3 Divide_1885/fq_output_0" [label="[]", style=solid]; -"849 Constant_113676" -> "3 Divide_1885/fq_output_0" [label="[]", style=solid]; +"846 Constant_41078" -> "3 Divide_1885/fq_output_0" [label="[]", style=solid]; +"847 Constant_41077" -> "3 Divide_1885/fq_output_0" [label="[]", style=solid]; +"848 Constant_41076" -> "3 Divide_1885/fq_output_0" [label="[]", style=solid]; +"849 Constant_41075" -> "3 Divide_1885/fq_output_0" [label="[]", style=solid]; "850 Gather_5165" -> "2 Divide_1885" [label="[1, 3, 1, 1]", style=solid]; "851 Gather_5162" -> "1 Multiply_5095" [label="[1, 3, 1, 1]", style=solid]; } diff --git a/tests/openvino/native/data/reference_graphs/quantized/mobilenet-v3-small-1.0-224-tf.dot b/tests/openvino/native/data/reference_graphs/quantized/mobilenet-v3-small-1.0-224-tf.dot deleted file mode 100644 index f0f7cc2365b..00000000000 --- a/tests/openvino/native/data/reference_graphs/quantized/mobilenet-v3-small-1.0-224-tf.dot +++ /dev/null @@ -1,1968 +0,0 @@ -strict digraph { -"0 input_1" [id=0, type=Parameter]; -"1 Transpose_9545" [id=1, type=Transpose]; -"2 Transpose_9539" [id=2, type=Multiply]; -"3 Transpose_2342" [id=3, type=Add]; -"4 Transpose_2342/fq_output_0" [id=4, type=FakeQuantize]; -"5 Multiply_11760" [id=5, type=Convolution]; -"6 Transpose_6952" [id=6, type=Add]; -"7 Transpose_6970" [id=7, type=HSwish]; -"8 Transpose_6970/fq_output_0" [id=8, type=FakeQuantize]; -"9 Multiply_11774" [id=9, type=GroupConvolution]; -"10 Transpose_7019" [id=10, type=Add]; -"11 Relu_7020" [id=11, type=Relu]; -"12 Relu_7020/fq_output_0" [id=12, type=FakeQuantize]; -"13 Transpose_7025" [id=13, type=ReduceMean]; -"14 Transpose_7057" [id=14, type=Multiply]; -"15 Transpose_7025/fq_output_0" [id=15, type=FakeQuantize]; -"16 Transpose_7057/fq_output_0" [id=16, type=FakeQuantize]; -"17 Convolution_2431" [id=17, type=Convolution]; -"18 Multiply_11788" [id=18, type=Convolution]; -"19 Transpose_7031" [id=19, type=Add]; -"20 Transpose_7081" [id=20, type=Add]; -"21 Relu_7032" [id=21, type=Relu]; -"22 Transpose_7081/fq_output_0" [id=22, type=FakeQuantize]; -"23 Relu_7032/fq_output_0" [id=23, type=FakeQuantize]; -"24 Multiply_11802" [id=24, type=Convolution]; -"25 Convolution_2440" [id=25, type=Convolution]; -"26 Transpose_7105" [id=26, type=Add]; -"27 Transpose_9591" [id=27, type=Add]; -"28 Relu_7106" [id=28, type=Relu]; -"29 Transpose_7055" [id=29, type=HSigmoid]; -"30 Relu_7106/fq_output_0" [id=30, type=FakeQuantize]; -"31 Transpose_7055/fq_output_0" [id=31, type=FakeQuantize]; -"32 Multiply_11816" [id=32, type=GroupConvolution]; -"33 Transpose_7156" [id=33, type=Add]; -"34 Relu_7157" [id=34, type=Relu]; -"35 Relu_7157/fq_output_0" [id=35, type=FakeQuantize]; -"36 Multiply_11830" [id=36, type=Convolution]; -"37 Transpose_7182" [id=37, type=Add]; -"38 Transpose_7182/fq_output_0" [id=38, type=FakeQuantize]; -"39 Multiply_11844" [id=39, type=Convolution]; -"40 Transpose_7262" [id=40, type=Add]; -"41 Transpose_7206" [id=41, type=Add]; -"42 Transpose_7262/fq_output_0" [id=42, type=FakeQuantize]; -"43 Relu_7207" [id=43, type=Relu]; -"44 Multiply_11886" [id=44, type=Convolution]; -"45 Relu_7207/fq_output_0" [id=45, type=FakeQuantize]; -"46 Transpose_7286" [id=46, type=Add]; -"47 Multiply_11858" [id=47, type=GroupConvolution]; -"48 Transpose_7304" [id=48, type=HSwish]; -"49 Transpose_7232" [id=49, type=Add]; -"50 Transpose_7304/fq_output_0" [id=50, type=FakeQuantize]; -"51 Relu_7233" [id=51, type=Relu]; -"52 Multiply_11900" [id=52, type=GroupConvolution]; -"53 Relu_7233/fq_output_0" [id=53, type=FakeQuantize]; -"54 Transpose_7353" [id=54, type=Add]; -"55 Multiply_11872" [id=55, type=Convolution]; -"56 Transpose_7371" [id=56, type=HSwish]; -"57 Transpose_7260" [id=57, type=Add]; -"58 Transpose_7371/fq_output_0" [id=58, type=FakeQuantize]; -"59 Transpose_7260/fq_output_0" [id=59, type=FakeQuantize]; -"60 Transpose_7375" [id=60, type=ReduceMean]; -"61 Transpose_7407" [id=61, type=Multiply]; -"62 Transpose_7375/fq_output_0" [id=62, type=FakeQuantize]; -"63 Transpose_7407/fq_output_0" [id=63, type=FakeQuantize]; -"64 Convolution_2758" [id=64, type=Convolution]; -"65 Multiply_11914" [id=65, type=Convolution]; -"66 Transpose_7381" [id=66, type=Add]; -"67 Transpose_7431" [id=67, type=Add]; -"68 Relu_7382" [id=68, type=Relu]; -"69 Transpose_7431/fq_output_0" [id=69, type=FakeQuantize]; -"70 Relu_7382/fq_output_0" [id=70, type=FakeQuantize]; -"71 Multiply_11928" [id=71, type=Convolution]; -"72 Transpose_7579" [id=72, type=Add]; -"73 Convolution_2767" [id=73, type=Convolution]; -"74 Transpose_7455" [id=74, type=Add]; -"75 Transpose_7579/fq_output_0" [id=75, type=FakeQuantize]; -"76 Transpose_9691" [id=76, type=Add]; -"77 Transpose_7473" [id=77, type=HSwish]; -"78 Multiply_11970" [id=78, type=Convolution]; -"79 Transpose_7727" [id=79, type=Add]; -"80 Transpose_7405" [id=80, type=HSigmoid]; -"81 Transpose_7473/fq_output_0" [id=81, type=FakeQuantize]; -"82 Transpose_7603" [id=82, type=Add]; -"83 Transpose_7727/fq_output_0" [id=83, type=FakeQuantize]; -"84 Transpose_7405/fq_output_0" [id=84, type=FakeQuantize]; -"85 Multiply_11942" [id=85, type=GroupConvolution]; -"86 Transpose_7621" [id=86, type=HSwish]; -"87 Multiply_12012" [id=87, type=Convolution]; -"88 Transpose_7497" [id=88, type=Add]; -"89 Transpose_7621/fq_output_0" [id=89, type=FakeQuantize]; -"90 Transpose_7751" [id=90, type=Add]; -"91 Transpose_7515" [id=91, type=HSwish]; -"92 Multiply_11984" [id=92, type=GroupConvolution]; -"93 Transpose_7769" [id=93, type=HSwish]; -"94 Transpose_7515/fq_output_0" [id=94, type=FakeQuantize]; -"95 Transpose_7645" [id=95, type=Add]; -"96 Transpose_7769/fq_output_0" [id=96, type=FakeQuantize]; -"97 Transpose_7519" [id=97, type=ReduceMean]; -"98 Transpose_7551" [id=98, type=Multiply]; -"99 Transpose_7663" [id=99, type=HSwish]; -"100 Multiply_12026" [id=100, type=GroupConvolution]; -"101 Transpose_7519/fq_output_0" [id=101, type=FakeQuantize]; -"102 Transpose_7551/fq_output_0" [id=102, type=FakeQuantize]; -"103 Transpose_7663/fq_output_0" [id=103, type=FakeQuantize]; -"104 Transpose_7793" [id=104, type=Add]; -"105 Convolution_2868" [id=105, type=Convolution]; -"106 Multiply_11956" [id=106, type=Convolution]; -"107 Transpose_7667" [id=107, type=ReduceMean]; -"108 Transpose_7699" [id=108, type=Multiply]; -"109 Transpose_7811" [id=109, type=HSwish]; -"110 Transpose_7525" [id=110, type=Add]; -"111 Transpose_7577" [id=111, type=Add]; -"112 Transpose_7667/fq_output_0" [id=112, type=FakeQuantize]; -"113 Transpose_7699/fq_output_0" [id=113, type=FakeQuantize]; -"114 Transpose_7811/fq_output_0" [id=114, type=FakeQuantize]; -"115 Relu_7526" [id=115, type=Relu]; -"116 Transpose_7577/fq_output_0" [id=116, type=FakeQuantize]; -"117 Convolution_2979" [id=117, type=Convolution]; -"118 Multiply_11998" [id=118, type=Convolution]; -"119 Transpose_7815" [id=119, type=ReduceMean]; -"120 Transpose_7847" [id=120, type=Multiply]; -"121 Relu_7526/fq_output_0" [id=121, type=FakeQuantize]; -"122 Transpose_7673" [id=122, type=Add]; -"123 Transpose_7725" [id=123, type=Add]; -"124 Transpose_7815/fq_output_0" [id=124, type=FakeQuantize]; -"125 Transpose_7847/fq_output_0" [id=125, type=FakeQuantize]; -"126 Convolution_2877" [id=126, type=Convolution]; -"127 Relu_7674" [id=127, type=Relu]; -"128 Transpose_7725/fq_output_0" [id=128, type=FakeQuantize]; -"129 Convolution_3090" [id=129, type=Convolution]; -"130 Multiply_12040" [id=130, type=Convolution]; -"131 Transpose_9759" [id=131, type=Add]; -"132 Relu_7674/fq_output_0" [id=132, type=FakeQuantize]; -"133 Transpose_7821" [id=133, type=Add]; -"134 Transpose_7871" [id=134, type=Add]; -"135 Transpose_7549" [id=135, type=HSigmoid]; -"136 Convolution_2988" [id=136, type=Convolution]; -"137 Relu_7822" [id=137, type=Relu]; -"138 Transpose_7871/fq_output_0" [id=138, type=FakeQuantize]; -"139 Transpose_7549/fq_output_0" [id=139, type=FakeQuantize]; -"140 Transpose_9859" [id=140, type=Add]; -"141 Relu_7822/fq_output_0" [id=141, type=FakeQuantize]; -"142 Multiply_12054" [id=142, type=Convolution]; -"143 Transpose_8019" [id=143, type=Add]; -"144 Transpose_7697" [id=144, type=HSigmoid]; -"145 Convolution_3099" [id=145, type=Convolution]; -"146 Transpose_7895" [id=146, type=Add]; -"147 Transpose_8019/fq_output_0" [id=147, type=FakeQuantize]; -"148 Transpose_7697/fq_output_0" [id=148, type=FakeQuantize]; -"149 Transpose_9959" [id=149, type=Add]; -"150 Transpose_7913" [id=150, type=HSwish]; -"151 Multiply_12096" [id=151, type=Convolution]; -"152 Transpose_7845" [id=152, type=HSigmoid]; -"153 Transpose_7913/fq_output_0" [id=153, type=FakeQuantize]; -"154 Transpose_8043" [id=154, type=Add]; -"155 Transpose_7845/fq_output_0" [id=155, type=FakeQuantize]; -"156 Multiply_12068" [id=156, type=GroupConvolution]; -"157 Transpose_8061" [id=157, type=HSwish]; -"158 Transpose_7937" [id=158, type=Add]; -"159 Transpose_8061/fq_output_0" [id=159, type=FakeQuantize]; -"160 Transpose_7955" [id=160, type=HSwish]; -"161 Multiply_12110" [id=161, type=GroupConvolution]; -"162 Transpose_7955/fq_output_0" [id=162, type=FakeQuantize]; -"163 Transpose_8110" [id=163, type=Add]; -"164 Transpose_7959" [id=164, type=ReduceMean]; -"165 Transpose_7991" [id=165, type=Multiply]; -"166 Transpose_8128" [id=166, type=HSwish]; -"167 Transpose_7959/fq_output_0" [id=167, type=FakeQuantize]; -"168 Transpose_7991/fq_output_0" [id=168, type=FakeQuantize]; -"169 Transpose_8128/fq_output_0" [id=169, type=FakeQuantize]; -"170 Convolution_3200" [id=170, type=Convolution]; -"171 Multiply_12082" [id=171, type=Convolution]; -"172 Transpose_8132" [id=172, type=ReduceMean]; -"173 Transpose_8164" [id=173, type=Multiply]; -"174 Transpose_7965" [id=174, type=Add]; -"175 Transpose_8017" [id=175, type=Add]; -"176 Transpose_8132/fq_output_0" [id=176, type=FakeQuantize]; -"177 Transpose_8164/fq_output_0" [id=177, type=FakeQuantize]; -"178 Relu_7966" [id=178, type=Relu]; -"179 Transpose_8017/fq_output_0" [id=179, type=FakeQuantize]; -"180 Convolution_3337" [id=180, type=Convolution]; -"181 Multiply_12124" [id=181, type=Convolution]; -"182 Relu_7966/fq_output_0" [id=182, type=FakeQuantize]; -"183 Transpose_8138" [id=183, type=Add]; -"184 Transpose_8188" [id=184, type=Add]; -"185 Convolution_3209" [id=185, type=Convolution]; -"186 Relu_8139" [id=186, type=Relu]; -"187 Transpose_8188/fq_output_0" [id=187, type=FakeQuantize]; -"188 Transpose_10027" [id=188, type=Add]; -"189 Relu_8139/fq_output_0" [id=189, type=FakeQuantize]; -"190 Multiply_12138" [id=190, type=Convolution]; -"191 Transpose_8336" [id=191, type=Add]; -"192 Transpose_7989" [id=192, type=HSigmoid]; -"193 Convolution_3346" [id=193, type=Convolution]; -"194 Transpose_8212" [id=194, type=Add]; -"195 Transpose_8336/fq_output_0" [id=195, type=FakeQuantize]; -"196 Transpose_7989/fq_output_0" [id=196, type=FakeQuantize]; -"197 Transpose_10127" [id=197, type=Add]; -"198 Transpose_8230" [id=198, type=HSwish]; -"199 Multiply_12180" [id=199, type=Convolution]; -"200 Transpose_8484" [id=200, type=Add]; -"201 Transpose_8162" [id=201, type=HSigmoid]; -"202 Transpose_8230/fq_output_0" [id=202, type=FakeQuantize]; -"203 Transpose_8360" [id=203, type=Add]; -"204 Transpose_8484/fq_output_0" [id=204, type=FakeQuantize]; -"205 Transpose_8162/fq_output_0" [id=205, type=FakeQuantize]; -"206 Multiply_12152" [id=206, type=GroupConvolution]; -"207 Transpose_8378" [id=207, type=HSwish]; -"208 Multiply_12222" [id=208, type=Convolution]; -"209 Transpose_8254" [id=209, type=Add]; -"210 Transpose_8378/fq_output_0" [id=210, type=FakeQuantize]; -"211 Transpose_8508" [id=211, type=Add]; -"212 Transpose_8272" [id=212, type=HSwish]; -"213 Multiply_12194" [id=213, type=GroupConvolution]; -"214 Transpose_8526" [id=214, type=HSwish]; -"215 Transpose_8272/fq_output_0" [id=215, type=FakeQuantize]; -"216 Transpose_8402" [id=216, type=Add]; -"217 Transpose_8526/fq_output_0" [id=217, type=FakeQuantize]; -"218 Transpose_8276" [id=218, type=ReduceMean]; -"219 Transpose_8308" [id=219, type=Multiply]; -"220 Transpose_8420" [id=220, type=HSwish]; -"221 Transpose_8530" [id=221, type=ReduceMean]; -"222 Transpose_8276/fq_output_0" [id=222, type=FakeQuantize]; -"223 Transpose_8308/fq_output_0" [id=223, type=FakeQuantize]; -"224 Transpose_8420/fq_output_0" [id=224, type=FakeQuantize]; -"225 Transpose_8530/fq_output_0" [id=225, type=FakeQuantize]; -"226 Convolution_3447" [id=226, type=Convolution]; -"227 Multiply_12166" [id=227, type=Convolution]; -"228 Transpose_8424" [id=228, type=ReduceMean]; -"229 Transpose_8456" [id=229, type=Multiply]; -"230 Convolution_3637" [id=230, type=Convolution]; -"231 Transpose_8282" [id=231, type=Add]; -"232 Transpose_8334" [id=232, type=Add]; -"233 Transpose_8424/fq_output_0" [id=233, type=FakeQuantize]; -"234 Transpose_8456/fq_output_0" [id=234, type=FakeQuantize]; -"235 Transpose_8536" [id=235, type=Add]; -"236 Relu_8283" [id=236, type=Relu]; -"237 Transpose_8334/fq_output_0" [id=237, type=FakeQuantize]; -"238 Convolution_3558" [id=238, type=Convolution]; -"239 Multiply_12208" [id=239, type=Convolution]; -"240 Transpose_8554" [id=240, type=HSwish]; -"241 Relu_8283/fq_output_0" [id=241, type=FakeQuantize]; -"242 Transpose_8430" [id=242, type=Add]; -"243 Transpose_8482" [id=243, type=Add]; -"244 Transpose_8554/fq_output_0" [id=244, type=FakeQuantize]; -"245 Convolution_3456" [id=245, type=Convolution]; -"246 Relu_8431" [id=246, type=Relu]; -"247 Transpose_8482/fq_output_0" [id=247, type=FakeQuantize]; -"248 Convolution_3649" [id=248, type=Convolution]; -"249 Transpose_10195" [id=249, type=Add]; -"250 Relu_8431/fq_output_0" [id=250, type=FakeQuantize]; -"251 Convolution_3649/fq_output_0" [id=251, type=FakeQuantize]; -"252 Transpose_8306" [id=252, type=HSigmoid]; -"253 Convolution_3567" [id=253, type=Convolution]; -"254 Transpose_10375" [id=254, type=Reshape]; -"255 Transpose_8306/fq_output_0" [id=255, type=FakeQuantize]; -"256 Transpose_10295" [id=256, type=Add]; -"257 MobilenetV3small/Logits/BiasAdd" [id=257, type=Add]; -"258 Transpose_8454" [id=258, type=HSigmoid]; -"259 MobilenetV3small/flatten/Reshape" [id=259, type=Reshape]; -"260 Transpose_8454/fq_output_0" [id=260, type=FakeQuantize]; -"261 MobilenetV3small/Predictions/Softmax" [id=261, type=Softmax]; -"262 Predictions" [id=262, type=Result]; -"263 MobilenetV3small/flatten/Const" [id=263, type=Constant]; -"264 Transpose_10377" [id=264, type=Constant]; -"265 Constant_11480" [id=265, type=Constant]; -"266 Constant_15344" [id=266, type=Constant]; -"267 Constant_15343" [id=267, type=Constant]; -"268 Constant_15342" [id=268, type=Constant]; -"269 Constant_15341" [id=269, type=Constant]; -"270 Convolution_3649/fq_weights_1" [id=270, type=FakeQuantize]; -"271 Constant_15349" [id=271, type=Constant]; -"272 Constant_15348" [id=272, type=Constant]; -"273 Constant_15347" [id=273, type=Constant]; -"274 Constant_15346" [id=274, type=Constant]; -"275 Transpose_3648" [id=275, type=Constant]; -"276 Constant_15339" [id=276, type=Constant]; -"277 Constant_15338" [id=277, type=Constant]; -"278 Constant_15337" [id=278, type=Constant]; -"279 Constant_15336" [id=279, type=Constant]; -"280 Transpose_8534" [id=280, type=Constant]; -"281 Convolution_3637/fq_weights_1" [id=281, type=FakeQuantize]; -"282 Constant_15334" [id=282, type=Constant]; -"283 Constant_15333" [id=283, type=Constant]; -"284 Constant_15332" [id=284, type=Constant]; -"285 Constant_15331" [id=285, type=Constant]; -"286 Transpose_3636" [id=286, type=Constant]; -"287 Constant_15329" [id=287, type=Constant]; -"288 Constant_15328" [id=288, type=Constant]; -"289 Constant_15327" [id=289, type=Constant]; -"290 Constant_15326" [id=290, type=Constant]; -"291 Constant_8528" [id=291, type=Constant]; -"292 Constant_15324" [id=292, type=Constant]; -"293 Constant_15323" [id=293, type=Constant]; -"294 Constant_15322" [id=294, type=Constant]; -"295 Constant_15321" [id=295, type=Constant]; -"296 Constant_12230" [id=296, type=Constant]; -"297 Multiply_12222/fq_weights_1" [id=297, type=FakeQuantize]; -"298 Constant_15319" [id=298, type=Constant]; -"299 Constant_15318" [id=299, type=Constant]; -"300 Constant_15317" [id=300, type=Constant]; -"301 Constant_15316" [id=301, type=Constant]; -"302 Multiply_12423" [id=302, type=Constant]; -"303 Constant_15314" [id=303, type=Constant]; -"304 Constant_15313" [id=304, type=Constant]; -"305 Constant_15312" [id=305, type=Constant]; -"306 Constant_15311" [id=306, type=Constant]; -"307 Constant_15309" [id=307, type=Constant]; -"308 Constant_15308" [id=308, type=Constant]; -"309 Constant_15307" [id=309, type=Constant]; -"310 Constant_15306" [id=310, type=Constant]; -"311 Constant_12216" [id=311, type=Constant]; -"312 Multiply_12208/fq_weights_1" [id=312, type=FakeQuantize]; -"313 Constant_15304" [id=313, type=Constant]; -"314 Constant_15303" [id=314, type=Constant]; -"315 Constant_15302" [id=315, type=Constant]; -"316 Constant_15301" [id=316, type=Constant]; -"317 Multiply_12417" [id=317, type=Constant]; -"318 Constant_15299" [id=318, type=Constant]; -"319 Constant_15298" [id=319, type=Constant]; -"320 Constant_15297" [id=320, type=Constant]; -"321 Constant_15296" [id=321, type=Constant]; -"322 Constant_15294" [id=322, type=Constant]; -"323 Constant_15293" [id=323, type=Constant]; -"324 Constant_15292" [id=324, type=Constant]; -"325 Constant_15291" [id=325, type=Constant]; -"326 Transpose_8436" [id=326, type=Constant]; -"327 Convolution_3567/fq_weights_1" [id=327, type=FakeQuantize]; -"328 Constant_15289" [id=328, type=Constant]; -"329 Constant_15288" [id=329, type=Constant]; -"330 Constant_15287" [id=330, type=Constant]; -"331 Constant_15286" [id=331, type=Constant]; -"332 Transpose_3566" [id=332, type=Constant]; -"333 Constant_15284" [id=333, type=Constant]; -"334 Constant_15283" [id=334, type=Constant]; -"335 Constant_15282" [id=335, type=Constant]; -"336 Constant_15281" [id=336, type=Constant]; -"337 Transpose_8428" [id=337, type=Constant]; -"338 Convolution_3558/fq_weights_1" [id=338, type=FakeQuantize]; -"339 Constant_15279" [id=339, type=Constant]; -"340 Constant_15278" [id=340, type=Constant]; -"341 Constant_15277" [id=341, type=Constant]; -"342 Constant_15276" [id=342, type=Constant]; -"343 Transpose_3557" [id=343, type=Constant]; -"344 Constant_15274" [id=344, type=Constant]; -"345 Constant_15273" [id=345, type=Constant]; -"346 Constant_15272" [id=346, type=Constant]; -"347 Constant_15271" [id=347, type=Constant]; -"348 Constant_8422" [id=348, type=Constant]; -"349 Constant_15269" [id=349, type=Constant]; -"350 Constant_15268" [id=350, type=Constant]; -"351 Constant_15267" [id=351, type=Constant]; -"352 Constant_15266" [id=352, type=Constant]; -"353 Constant_12202" [id=353, type=Constant]; -"354 Multiply_12194/fq_weights_1" [id=354, type=FakeQuantize]; -"355 Constant_15264" [id=355, type=Constant]; -"356 Constant_15263" [id=356, type=Constant]; -"357 Constant_15262" [id=357, type=Constant]; -"358 Constant_15261" [id=358, type=Constant]; -"359 Multiply_12412" [id=359, type=Constant]; -"360 Constant_15259" [id=360, type=Constant]; -"361 Constant_15258" [id=361, type=Constant]; -"362 Constant_15257" [id=362, type=Constant]; -"363 Constant_15256" [id=363, type=Constant]; -"364 Constant_12188" [id=364, type=Constant]; -"365 Multiply_12180/fq_weights_1" [id=365, type=FakeQuantize]; -"366 Constant_15254" [id=366, type=Constant]; -"367 Constant_15253" [id=367, type=Constant]; -"368 Constant_15252" [id=368, type=Constant]; -"369 Constant_15251" [id=369, type=Constant]; -"370 Multiply_12406" [id=370, type=Constant]; -"371 Constant_15249" [id=371, type=Constant]; -"372 Constant_15248" [id=372, type=Constant]; -"373 Constant_15247" [id=373, type=Constant]; -"374 Constant_15246" [id=374, type=Constant]; -"375 Constant_15244" [id=375, type=Constant]; -"376 Constant_15243" [id=376, type=Constant]; -"377 Constant_15242" [id=377, type=Constant]; -"378 Constant_15241" [id=378, type=Constant]; -"379 Constant_12174" [id=379, type=Constant]; -"380 Multiply_12166/fq_weights_1" [id=380, type=FakeQuantize]; -"381 Constant_15239" [id=381, type=Constant]; -"382 Constant_15238" [id=382, type=Constant]; -"383 Constant_15237" [id=383, type=Constant]; -"384 Constant_15236" [id=384, type=Constant]; -"385 Multiply_12400" [id=385, type=Constant]; -"386 Constant_15234" [id=386, type=Constant]; -"387 Constant_15233" [id=387, type=Constant]; -"388 Constant_15232" [id=388, type=Constant]; -"389 Constant_15231" [id=389, type=Constant]; -"390 Constant_15229" [id=390, type=Constant]; -"391 Constant_15228" [id=391, type=Constant]; -"392 Constant_15227" [id=392, type=Constant]; -"393 Constant_15226" [id=393, type=Constant]; -"394 Transpose_8288" [id=394, type=Constant]; -"395 Convolution_3456/fq_weights_1" [id=395, type=FakeQuantize]; -"396 Constant_15224" [id=396, type=Constant]; -"397 Constant_15223" [id=397, type=Constant]; -"398 Constant_15222" [id=398, type=Constant]; -"399 Constant_15221" [id=399, type=Constant]; -"400 Transpose_3455" [id=400, type=Constant]; -"401 Constant_15219" [id=401, type=Constant]; -"402 Constant_15218" [id=402, type=Constant]; -"403 Constant_15217" [id=403, type=Constant]; -"404 Constant_15216" [id=404, type=Constant]; -"405 Transpose_8280" [id=405, type=Constant]; -"406 Convolution_3447/fq_weights_1" [id=406, type=FakeQuantize]; -"407 Constant_15214" [id=407, type=Constant]; -"408 Constant_15213" [id=408, type=Constant]; -"409 Constant_15212" [id=409, type=Constant]; -"410 Constant_15211" [id=410, type=Constant]; -"411 Transpose_3446" [id=411, type=Constant]; -"412 Constant_15209" [id=412, type=Constant]; -"413 Constant_15208" [id=413, type=Constant]; -"414 Constant_15207" [id=414, type=Constant]; -"415 Constant_15206" [id=415, type=Constant]; -"416 Constant_8274" [id=416, type=Constant]; -"417 Constant_15204" [id=417, type=Constant]; -"418 Constant_15203" [id=418, type=Constant]; -"419 Constant_15202" [id=419, type=Constant]; -"420 Constant_15201" [id=420, type=Constant]; -"421 Constant_12160" [id=421, type=Constant]; -"422 Multiply_12152/fq_weights_1" [id=422, type=FakeQuantize]; -"423 Constant_15199" [id=423, type=Constant]; -"424 Constant_15198" [id=424, type=Constant]; -"425 Constant_15197" [id=425, type=Constant]; -"426 Constant_15196" [id=426, type=Constant]; -"427 Multiply_12395" [id=427, type=Constant]; -"428 Constant_15194" [id=428, type=Constant]; -"429 Constant_15193" [id=429, type=Constant]; -"430 Constant_15192" [id=430, type=Constant]; -"431 Constant_15191" [id=431, type=Constant]; -"432 Constant_12146" [id=432, type=Constant]; -"433 Multiply_12138/fq_weights_1" [id=433, type=FakeQuantize]; -"434 Constant_15189" [id=434, type=Constant]; -"435 Constant_15188" [id=435, type=Constant]; -"436 Constant_15187" [id=436, type=Constant]; -"437 Constant_15186" [id=437, type=Constant]; -"438 Multiply_12389" [id=438, type=Constant]; -"439 Constant_15184" [id=439, type=Constant]; -"440 Constant_15183" [id=440, type=Constant]; -"441 Constant_15182" [id=441, type=Constant]; -"442 Constant_15181" [id=442, type=Constant]; -"443 Constant_12132" [id=443, type=Constant]; -"444 Multiply_12124/fq_weights_1" [id=444, type=FakeQuantize]; -"445 Constant_15179" [id=445, type=Constant]; -"446 Constant_15178" [id=446, type=Constant]; -"447 Constant_15177" [id=447, type=Constant]; -"448 Constant_15176" [id=448, type=Constant]; -"449 Multiply_12383" [id=449, type=Constant]; -"450 Constant_15174" [id=450, type=Constant]; -"451 Constant_15173" [id=451, type=Constant]; -"452 Constant_15172" [id=452, type=Constant]; -"453 Constant_15171" [id=453, type=Constant]; -"454 Constant_15169" [id=454, type=Constant]; -"455 Constant_15168" [id=455, type=Constant]; -"456 Constant_15167" [id=456, type=Constant]; -"457 Constant_15166" [id=457, type=Constant]; -"458 Transpose_8144" [id=458, type=Constant]; -"459 Convolution_3346/fq_weights_1" [id=459, type=FakeQuantize]; -"460 Constant_15164" [id=460, type=Constant]; -"461 Constant_15163" [id=461, type=Constant]; -"462 Constant_15162" [id=462, type=Constant]; -"463 Constant_15161" [id=463, type=Constant]; -"464 Transpose_3345" [id=464, type=Constant]; -"465 Constant_15159" [id=465, type=Constant]; -"466 Constant_15158" [id=466, type=Constant]; -"467 Constant_15157" [id=467, type=Constant]; -"468 Constant_15156" [id=468, type=Constant]; -"469 Transpose_8136" [id=469, type=Constant]; -"470 Convolution_3337/fq_weights_1" [id=470, type=FakeQuantize]; -"471 Constant_15154" [id=471, type=Constant]; -"472 Constant_15153" [id=472, type=Constant]; -"473 Constant_15152" [id=473, type=Constant]; -"474 Constant_15151" [id=474, type=Constant]; -"475 Transpose_3336" [id=475, type=Constant]; -"476 Constant_15149" [id=476, type=Constant]; -"477 Constant_15148" [id=477, type=Constant]; -"478 Constant_15147" [id=478, type=Constant]; -"479 Constant_15146" [id=479, type=Constant]; -"480 Constant_8130" [id=480, type=Constant]; -"481 Constant_15144" [id=481, type=Constant]; -"482 Constant_15143" [id=482, type=Constant]; -"483 Constant_15142" [id=483, type=Constant]; -"484 Constant_15141" [id=484, type=Constant]; -"485 Constant_12118" [id=485, type=Constant]; -"486 Multiply_12110/fq_weights_1" [id=486, type=FakeQuantize]; -"487 Constant_15139" [id=487, type=Constant]; -"488 Constant_15138" [id=488, type=Constant]; -"489 Constant_15137" [id=489, type=Constant]; -"490 Constant_15136" [id=490, type=Constant]; -"491 Multiply_12378" [id=491, type=Constant]; -"492 Constant_15134" [id=492, type=Constant]; -"493 Constant_15133" [id=493, type=Constant]; -"494 Constant_15132" [id=494, type=Constant]; -"495 Constant_15131" [id=495, type=Constant]; -"496 Constant_12104" [id=496, type=Constant]; -"497 Multiply_12096/fq_weights_1" [id=497, type=FakeQuantize]; -"498 Constant_15129" [id=498, type=Constant]; -"499 Constant_15128" [id=499, type=Constant]; -"500 Constant_15127" [id=500, type=Constant]; -"501 Constant_15126" [id=501, type=Constant]; -"502 Multiply_12372" [id=502, type=Constant]; -"503 Constant_15124" [id=503, type=Constant]; -"504 Constant_15123" [id=504, type=Constant]; -"505 Constant_15122" [id=505, type=Constant]; -"506 Constant_15121" [id=506, type=Constant]; -"507 Constant_15119" [id=507, type=Constant]; -"508 Constant_15118" [id=508, type=Constant]; -"509 Constant_15117" [id=509, type=Constant]; -"510 Constant_15116" [id=510, type=Constant]; -"511 Constant_12090" [id=511, type=Constant]; -"512 Multiply_12082/fq_weights_1" [id=512, type=FakeQuantize]; -"513 Constant_15114" [id=513, type=Constant]; -"514 Constant_15113" [id=514, type=Constant]; -"515 Constant_15112" [id=515, type=Constant]; -"516 Constant_15111" [id=516, type=Constant]; -"517 Multiply_12366" [id=517, type=Constant]; -"518 Constant_15109" [id=518, type=Constant]; -"519 Constant_15108" [id=519, type=Constant]; -"520 Constant_15107" [id=520, type=Constant]; -"521 Constant_15106" [id=521, type=Constant]; -"522 Constant_15104" [id=522, type=Constant]; -"523 Constant_15103" [id=523, type=Constant]; -"524 Constant_15102" [id=524, type=Constant]; -"525 Constant_15101" [id=525, type=Constant]; -"526 Transpose_7971" [id=526, type=Constant]; -"527 Convolution_3209/fq_weights_1" [id=527, type=FakeQuantize]; -"528 Constant_15099" [id=528, type=Constant]; -"529 Constant_15098" [id=529, type=Constant]; -"530 Constant_15097" [id=530, type=Constant]; -"531 Constant_15096" [id=531, type=Constant]; -"532 Transpose_3208" [id=532, type=Constant]; -"533 Constant_15094" [id=533, type=Constant]; -"534 Constant_15093" [id=534, type=Constant]; -"535 Constant_15092" [id=535, type=Constant]; -"536 Constant_15091" [id=536, type=Constant]; -"537 Transpose_7963" [id=537, type=Constant]; -"538 Convolution_3200/fq_weights_1" [id=538, type=FakeQuantize]; -"539 Constant_15089" [id=539, type=Constant]; -"540 Constant_15088" [id=540, type=Constant]; -"541 Constant_15087" [id=541, type=Constant]; -"542 Constant_15086" [id=542, type=Constant]; -"543 Transpose_3199" [id=543, type=Constant]; -"544 Constant_15084" [id=544, type=Constant]; -"545 Constant_15083" [id=545, type=Constant]; -"546 Constant_15082" [id=546, type=Constant]; -"547 Constant_15081" [id=547, type=Constant]; -"548 Constant_7957" [id=548, type=Constant]; -"549 Constant_15079" [id=549, type=Constant]; -"550 Constant_15078" [id=550, type=Constant]; -"551 Constant_15077" [id=551, type=Constant]; -"552 Constant_15076" [id=552, type=Constant]; -"553 Constant_12076" [id=553, type=Constant]; -"554 Multiply_12068/fq_weights_1" [id=554, type=FakeQuantize]; -"555 Constant_15074" [id=555, type=Constant]; -"556 Constant_15073" [id=556, type=Constant]; -"557 Constant_15072" [id=557, type=Constant]; -"558 Constant_15071" [id=558, type=Constant]; -"559 Multiply_12361" [id=559, type=Constant]; -"560 Constant_15069" [id=560, type=Constant]; -"561 Constant_15068" [id=561, type=Constant]; -"562 Constant_15067" [id=562, type=Constant]; -"563 Constant_15066" [id=563, type=Constant]; -"564 Constant_12062" [id=564, type=Constant]; -"565 Multiply_12054/fq_weights_1" [id=565, type=FakeQuantize]; -"566 Constant_15064" [id=566, type=Constant]; -"567 Constant_15063" [id=567, type=Constant]; -"568 Constant_15062" [id=568, type=Constant]; -"569 Constant_15061" [id=569, type=Constant]; -"570 Multiply_12355" [id=570, type=Constant]; -"571 Constant_15059" [id=571, type=Constant]; -"572 Constant_15058" [id=572, type=Constant]; -"573 Constant_15057" [id=573, type=Constant]; -"574 Constant_15056" [id=574, type=Constant]; -"575 Constant_12048" [id=575, type=Constant]; -"576 Multiply_12040/fq_weights_1" [id=576, type=FakeQuantize]; -"577 Constant_15054" [id=577, type=Constant]; -"578 Constant_15053" [id=578, type=Constant]; -"579 Constant_15052" [id=579, type=Constant]; -"580 Constant_15051" [id=580, type=Constant]; -"581 Multiply_12349" [id=581, type=Constant]; -"582 Constant_15049" [id=582, type=Constant]; -"583 Constant_15048" [id=583, type=Constant]; -"584 Constant_15047" [id=584, type=Constant]; -"585 Constant_15046" [id=585, type=Constant]; -"586 Constant_15044" [id=586, type=Constant]; -"587 Constant_15043" [id=587, type=Constant]; -"588 Constant_15042" [id=588, type=Constant]; -"589 Constant_15041" [id=589, type=Constant]; -"590 Transpose_7827" [id=590, type=Constant]; -"591 Convolution_3099/fq_weights_1" [id=591, type=FakeQuantize]; -"592 Constant_15039" [id=592, type=Constant]; -"593 Constant_15038" [id=593, type=Constant]; -"594 Constant_15037" [id=594, type=Constant]; -"595 Constant_15036" [id=595, type=Constant]; -"596 Transpose_3098" [id=596, type=Constant]; -"597 Constant_15034" [id=597, type=Constant]; -"598 Constant_15033" [id=598, type=Constant]; -"599 Constant_15032" [id=599, type=Constant]; -"600 Constant_15031" [id=600, type=Constant]; -"601 Transpose_7819" [id=601, type=Constant]; -"602 Convolution_3090/fq_weights_1" [id=602, type=FakeQuantize]; -"603 Constant_15029" [id=603, type=Constant]; -"604 Constant_15028" [id=604, type=Constant]; -"605 Constant_15027" [id=605, type=Constant]; -"606 Constant_15026" [id=606, type=Constant]; -"607 Transpose_3089" [id=607, type=Constant]; -"608 Constant_15024" [id=608, type=Constant]; -"609 Constant_15023" [id=609, type=Constant]; -"610 Constant_15022" [id=610, type=Constant]; -"611 Constant_15021" [id=611, type=Constant]; -"612 Constant_7813" [id=612, type=Constant]; -"613 Constant_15019" [id=613, type=Constant]; -"614 Constant_15018" [id=614, type=Constant]; -"615 Constant_15017" [id=615, type=Constant]; -"616 Constant_15016" [id=616, type=Constant]; -"617 Constant_12034" [id=617, type=Constant]; -"618 Multiply_12026/fq_weights_1" [id=618, type=FakeQuantize]; -"619 Constant_15014" [id=619, type=Constant]; -"620 Constant_15013" [id=620, type=Constant]; -"621 Constant_15012" [id=621, type=Constant]; -"622 Constant_15011" [id=622, type=Constant]; -"623 Multiply_12344" [id=623, type=Constant]; -"624 Constant_15009" [id=624, type=Constant]; -"625 Constant_15008" [id=625, type=Constant]; -"626 Constant_15007" [id=626, type=Constant]; -"627 Constant_15006" [id=627, type=Constant]; -"628 Constant_12020" [id=628, type=Constant]; -"629 Multiply_12012/fq_weights_1" [id=629, type=FakeQuantize]; -"630 Constant_15004" [id=630, type=Constant]; -"631 Constant_15003" [id=631, type=Constant]; -"632 Constant_15002" [id=632, type=Constant]; -"633 Constant_15001" [id=633, type=Constant]; -"634 Multiply_12338" [id=634, type=Constant]; -"635 Constant_14999" [id=635, type=Constant]; -"636 Constant_14998" [id=636, type=Constant]; -"637 Constant_14997" [id=637, type=Constant]; -"638 Constant_14996" [id=638, type=Constant]; -"639 Constant_14994" [id=639, type=Constant]; -"640 Constant_14993" [id=640, type=Constant]; -"641 Constant_14992" [id=641, type=Constant]; -"642 Constant_14991" [id=642, type=Constant]; -"643 Constant_12006" [id=643, type=Constant]; -"644 Multiply_11998/fq_weights_1" [id=644, type=FakeQuantize]; -"645 Constant_14989" [id=645, type=Constant]; -"646 Constant_14988" [id=646, type=Constant]; -"647 Constant_14987" [id=647, type=Constant]; -"648 Constant_14986" [id=648, type=Constant]; -"649 Multiply_12332" [id=649, type=Constant]; -"650 Constant_14984" [id=650, type=Constant]; -"651 Constant_14983" [id=651, type=Constant]; -"652 Constant_14982" [id=652, type=Constant]; -"653 Constant_14981" [id=653, type=Constant]; -"654 Constant_14979" [id=654, type=Constant]; -"655 Constant_14978" [id=655, type=Constant]; -"656 Constant_14977" [id=656, type=Constant]; -"657 Constant_14976" [id=657, type=Constant]; -"658 Transpose_7679" [id=658, type=Constant]; -"659 Convolution_2988/fq_weights_1" [id=659, type=FakeQuantize]; -"660 Constant_14974" [id=660, type=Constant]; -"661 Constant_14973" [id=661, type=Constant]; -"662 Constant_14972" [id=662, type=Constant]; -"663 Constant_14971" [id=663, type=Constant]; -"664 Transpose_2987" [id=664, type=Constant]; -"665 Constant_14969" [id=665, type=Constant]; -"666 Constant_14968" [id=666, type=Constant]; -"667 Constant_14967" [id=667, type=Constant]; -"668 Constant_14966" [id=668, type=Constant]; -"669 Transpose_7671" [id=669, type=Constant]; -"670 Convolution_2979/fq_weights_1" [id=670, type=FakeQuantize]; -"671 Constant_14964" [id=671, type=Constant]; -"672 Constant_14963" [id=672, type=Constant]; -"673 Constant_14962" [id=673, type=Constant]; -"674 Constant_14961" [id=674, type=Constant]; -"675 Transpose_2978" [id=675, type=Constant]; -"676 Constant_14959" [id=676, type=Constant]; -"677 Constant_14958" [id=677, type=Constant]; -"678 Constant_14957" [id=678, type=Constant]; -"679 Constant_14956" [id=679, type=Constant]; -"680 Constant_7665" [id=680, type=Constant]; -"681 Constant_14954" [id=681, type=Constant]; -"682 Constant_14953" [id=682, type=Constant]; -"683 Constant_14952" [id=683, type=Constant]; -"684 Constant_14951" [id=684, type=Constant]; -"685 Constant_11992" [id=685, type=Constant]; -"686 Multiply_11984/fq_weights_1" [id=686, type=FakeQuantize]; -"687 Constant_14949" [id=687, type=Constant]; -"688 Constant_14948" [id=688, type=Constant]; -"689 Constant_14947" [id=689, type=Constant]; -"690 Constant_14946" [id=690, type=Constant]; -"691 Multiply_12327" [id=691, type=Constant]; -"692 Constant_14944" [id=692, type=Constant]; -"693 Constant_14943" [id=693, type=Constant]; -"694 Constant_14942" [id=694, type=Constant]; -"695 Constant_14941" [id=695, type=Constant]; -"696 Constant_11978" [id=696, type=Constant]; -"697 Multiply_11970/fq_weights_1" [id=697, type=FakeQuantize]; -"698 Constant_14939" [id=698, type=Constant]; -"699 Constant_14938" [id=699, type=Constant]; -"700 Constant_14937" [id=700, type=Constant]; -"701 Constant_14936" [id=701, type=Constant]; -"702 Multiply_12321" [id=702, type=Constant]; -"703 Constant_14934" [id=703, type=Constant]; -"704 Constant_14933" [id=704, type=Constant]; -"705 Constant_14932" [id=705, type=Constant]; -"706 Constant_14931" [id=706, type=Constant]; -"707 Constant_14929" [id=707, type=Constant]; -"708 Constant_14928" [id=708, type=Constant]; -"709 Constant_14927" [id=709, type=Constant]; -"710 Constant_14926" [id=710, type=Constant]; -"711 Constant_11964" [id=711, type=Constant]; -"712 Multiply_11956/fq_weights_1" [id=712, type=FakeQuantize]; -"713 Constant_14924" [id=713, type=Constant]; -"714 Constant_14923" [id=714, type=Constant]; -"715 Constant_14922" [id=715, type=Constant]; -"716 Constant_14921" [id=716, type=Constant]; -"717 Multiply_12315" [id=717, type=Constant]; -"718 Constant_14919" [id=718, type=Constant]; -"719 Constant_14918" [id=719, type=Constant]; -"720 Constant_14917" [id=720, type=Constant]; -"721 Constant_14916" [id=721, type=Constant]; -"722 Constant_14914" [id=722, type=Constant]; -"723 Constant_14913" [id=723, type=Constant]; -"724 Constant_14912" [id=724, type=Constant]; -"725 Constant_14911" [id=725, type=Constant]; -"726 Transpose_7531" [id=726, type=Constant]; -"727 Convolution_2877/fq_weights_1" [id=727, type=FakeQuantize]; -"728 Constant_14909" [id=728, type=Constant]; -"729 Constant_14908" [id=729, type=Constant]; -"730 Constant_14907" [id=730, type=Constant]; -"731 Constant_14906" [id=731, type=Constant]; -"732 Transpose_2876" [id=732, type=Constant]; -"733 Constant_14904" [id=733, type=Constant]; -"734 Constant_14903" [id=734, type=Constant]; -"735 Constant_14902" [id=735, type=Constant]; -"736 Constant_14901" [id=736, type=Constant]; -"737 Transpose_7523" [id=737, type=Constant]; -"738 Convolution_2868/fq_weights_1" [id=738, type=FakeQuantize]; -"739 Constant_14899" [id=739, type=Constant]; -"740 Constant_14898" [id=740, type=Constant]; -"741 Constant_14897" [id=741, type=Constant]; -"742 Constant_14896" [id=742, type=Constant]; -"743 Transpose_2867" [id=743, type=Constant]; -"744 Constant_14894" [id=744, type=Constant]; -"745 Constant_14893" [id=745, type=Constant]; -"746 Constant_14892" [id=746, type=Constant]; -"747 Constant_14891" [id=747, type=Constant]; -"748 Constant_7517" [id=748, type=Constant]; -"749 Constant_14889" [id=749, type=Constant]; -"750 Constant_14888" [id=750, type=Constant]; -"751 Constant_14887" [id=751, type=Constant]; -"752 Constant_14886" [id=752, type=Constant]; -"753 Constant_11950" [id=753, type=Constant]; -"754 Multiply_11942/fq_weights_1" [id=754, type=FakeQuantize]; -"755 Constant_14884" [id=755, type=Constant]; -"756 Constant_14883" [id=756, type=Constant]; -"757 Constant_14882" [id=757, type=Constant]; -"758 Constant_14881" [id=758, type=Constant]; -"759 Multiply_12310" [id=759, type=Constant]; -"760 Constant_14879" [id=760, type=Constant]; -"761 Constant_14878" [id=761, type=Constant]; -"762 Constant_14877" [id=762, type=Constant]; -"763 Constant_14876" [id=763, type=Constant]; -"764 Constant_11936" [id=764, type=Constant]; -"765 Multiply_11928/fq_weights_1" [id=765, type=FakeQuantize]; -"766 Constant_14874" [id=766, type=Constant]; -"767 Constant_14873" [id=767, type=Constant]; -"768 Constant_14872" [id=768, type=Constant]; -"769 Constant_14871" [id=769, type=Constant]; -"770 Multiply_12304" [id=770, type=Constant]; -"771 Constant_14869" [id=771, type=Constant]; -"772 Constant_14868" [id=772, type=Constant]; -"773 Constant_14867" [id=773, type=Constant]; -"774 Constant_14866" [id=774, type=Constant]; -"775 Constant_11922" [id=775, type=Constant]; -"776 Multiply_11914/fq_weights_1" [id=776, type=FakeQuantize]; -"777 Constant_14864" [id=777, type=Constant]; -"778 Constant_14863" [id=778, type=Constant]; -"779 Constant_14862" [id=779, type=Constant]; -"780 Constant_14861" [id=780, type=Constant]; -"781 Multiply_12298" [id=781, type=Constant]; -"782 Constant_14859" [id=782, type=Constant]; -"783 Constant_14858" [id=783, type=Constant]; -"784 Constant_14857" [id=784, type=Constant]; -"785 Constant_14856" [id=785, type=Constant]; -"786 Constant_14854" [id=786, type=Constant]; -"787 Constant_14853" [id=787, type=Constant]; -"788 Constant_14852" [id=788, type=Constant]; -"789 Constant_14851" [id=789, type=Constant]; -"790 Transpose_7387" [id=790, type=Constant]; -"791 Convolution_2767/fq_weights_1" [id=791, type=FakeQuantize]; -"792 Constant_14849" [id=792, type=Constant]; -"793 Constant_14848" [id=793, type=Constant]; -"794 Constant_14847" [id=794, type=Constant]; -"795 Constant_14846" [id=795, type=Constant]; -"796 Transpose_2766" [id=796, type=Constant]; -"797 Constant_14844" [id=797, type=Constant]; -"798 Constant_14843" [id=798, type=Constant]; -"799 Constant_14842" [id=799, type=Constant]; -"800 Constant_14841" [id=800, type=Constant]; -"801 Transpose_7379" [id=801, type=Constant]; -"802 Convolution_2758/fq_weights_1" [id=802, type=FakeQuantize]; -"803 Constant_14839" [id=803, type=Constant]; -"804 Constant_14838" [id=804, type=Constant]; -"805 Constant_14837" [id=805, type=Constant]; -"806 Constant_14836" [id=806, type=Constant]; -"807 Transpose_2757" [id=807, type=Constant]; -"808 Constant_14834" [id=808, type=Constant]; -"809 Constant_14833" [id=809, type=Constant]; -"810 Constant_14832" [id=810, type=Constant]; -"811 Constant_14831" [id=811, type=Constant]; -"812 Constant_7373" [id=812, type=Constant]; -"813 Constant_14829" [id=813, type=Constant]; -"814 Constant_14828" [id=814, type=Constant]; -"815 Constant_14827" [id=815, type=Constant]; -"816 Constant_14826" [id=816, type=Constant]; -"817 Constant_11908" [id=817, type=Constant]; -"818 Multiply_11900/fq_weights_1" [id=818, type=FakeQuantize]; -"819 Constant_14824" [id=819, type=Constant]; -"820 Constant_14823" [id=820, type=Constant]; -"821 Constant_14822" [id=821, type=Constant]; -"822 Constant_14821" [id=822, type=Constant]; -"823 Multiply_12293" [id=823, type=Constant]; -"824 Constant_14819" [id=824, type=Constant]; -"825 Constant_14818" [id=825, type=Constant]; -"826 Constant_14817" [id=826, type=Constant]; -"827 Constant_14816" [id=827, type=Constant]; -"828 Constant_11894" [id=828, type=Constant]; -"829 Multiply_11886/fq_weights_1" [id=829, type=FakeQuantize]; -"830 Constant_14814" [id=830, type=Constant]; -"831 Constant_14813" [id=831, type=Constant]; -"832 Constant_14812" [id=832, type=Constant]; -"833 Constant_14811" [id=833, type=Constant]; -"834 Multiply_12287" [id=834, type=Constant]; -"835 Constant_14809" [id=835, type=Constant]; -"836 Constant_14808" [id=836, type=Constant]; -"837 Constant_14807" [id=837, type=Constant]; -"838 Constant_14806" [id=838, type=Constant]; -"839 Constant_14804" [id=839, type=Constant]; -"840 Constant_14803" [id=840, type=Constant]; -"841 Constant_14802" [id=841, type=Constant]; -"842 Constant_14801" [id=842, type=Constant]; -"843 Constant_11880" [id=843, type=Constant]; -"844 Multiply_11872/fq_weights_1" [id=844, type=FakeQuantize]; -"845 Constant_14799" [id=845, type=Constant]; -"846 Constant_14798" [id=846, type=Constant]; -"847 Constant_14797" [id=847, type=Constant]; -"848 Constant_14796" [id=848, type=Constant]; -"849 Multiply_12281" [id=849, type=Constant]; -"850 Constant_14794" [id=850, type=Constant]; -"851 Constant_14793" [id=851, type=Constant]; -"852 Constant_14792" [id=852, type=Constant]; -"853 Constant_14791" [id=853, type=Constant]; -"854 Constant_11866" [id=854, type=Constant]; -"855 Multiply_11858/fq_weights_1" [id=855, type=FakeQuantize]; -"856 Constant_14789" [id=856, type=Constant]; -"857 Constant_14788" [id=857, type=Constant]; -"858 Constant_14787" [id=858, type=Constant]; -"859 Constant_14786" [id=859, type=Constant]; -"860 Multiply_12276" [id=860, type=Constant]; -"861 Constant_14784" [id=861, type=Constant]; -"862 Constant_14783" [id=862, type=Constant]; -"863 Constant_14782" [id=863, type=Constant]; -"864 Constant_14781" [id=864, type=Constant]; -"865 Constant_11852" [id=865, type=Constant]; -"866 Multiply_11844/fq_weights_1" [id=866, type=FakeQuantize]; -"867 Constant_14779" [id=867, type=Constant]; -"868 Constant_14778" [id=868, type=Constant]; -"869 Constant_14777" [id=869, type=Constant]; -"870 Constant_14776" [id=870, type=Constant]; -"871 Multiply_12270" [id=871, type=Constant]; -"872 Constant_14774" [id=872, type=Constant]; -"873 Constant_14773" [id=873, type=Constant]; -"874 Constant_14772" [id=874, type=Constant]; -"875 Constant_14771" [id=875, type=Constant]; -"876 Constant_11838" [id=876, type=Constant]; -"877 Multiply_11830/fq_weights_1" [id=877, type=FakeQuantize]; -"878 Constant_14769" [id=878, type=Constant]; -"879 Constant_14768" [id=879, type=Constant]; -"880 Constant_14767" [id=880, type=Constant]; -"881 Constant_14766" [id=881, type=Constant]; -"882 Multiply_12264" [id=882, type=Constant]; -"883 Constant_14764" [id=883, type=Constant]; -"884 Constant_14763" [id=884, type=Constant]; -"885 Constant_14762" [id=885, type=Constant]; -"886 Constant_14761" [id=886, type=Constant]; -"887 Constant_11824" [id=887, type=Constant]; -"888 Multiply_11816/fq_weights_1" [id=888, type=FakeQuantize]; -"889 Constant_14759" [id=889, type=Constant]; -"890 Constant_14758" [id=890, type=Constant]; -"891 Constant_14757" [id=891, type=Constant]; -"892 Constant_14756" [id=892, type=Constant]; -"893 Multiply_12259" [id=893, type=Constant]; -"894 Constant_14754" [id=894, type=Constant]; -"895 Constant_14753" [id=895, type=Constant]; -"896 Constant_14752" [id=896, type=Constant]; -"897 Constant_14751" [id=897, type=Constant]; -"898 Constant_11810" [id=898, type=Constant]; -"899 Multiply_11802/fq_weights_1" [id=899, type=FakeQuantize]; -"900 Constant_14749" [id=900, type=Constant]; -"901 Constant_14748" [id=901, type=Constant]; -"902 Constant_14747" [id=902, type=Constant]; -"903 Constant_14746" [id=903, type=Constant]; -"904 Multiply_12253" [id=904, type=Constant]; -"905 Constant_14744" [id=905, type=Constant]; -"906 Constant_14743" [id=906, type=Constant]; -"907 Constant_14742" [id=907, type=Constant]; -"908 Constant_14741" [id=908, type=Constant]; -"909 Constant_11796" [id=909, type=Constant]; -"910 Multiply_11788/fq_weights_1" [id=910, type=FakeQuantize]; -"911 Constant_14739" [id=911, type=Constant]; -"912 Constant_14738" [id=912, type=Constant]; -"913 Constant_14737" [id=913, type=Constant]; -"914 Constant_14736" [id=914, type=Constant]; -"915 Multiply_12247" [id=915, type=Constant]; -"916 Constant_14734" [id=916, type=Constant]; -"917 Constant_14733" [id=917, type=Constant]; -"918 Constant_14732" [id=918, type=Constant]; -"919 Constant_14731" [id=919, type=Constant]; -"920 Constant_14729" [id=920, type=Constant]; -"921 Constant_14728" [id=921, type=Constant]; -"922 Constant_14727" [id=922, type=Constant]; -"923 Constant_14726" [id=923, type=Constant]; -"924 Transpose_7037" [id=924, type=Constant]; -"925 Convolution_2440/fq_weights_1" [id=925, type=FakeQuantize]; -"926 Constant_14724" [id=926, type=Constant]; -"927 Constant_14723" [id=927, type=Constant]; -"928 Constant_14722" [id=928, type=Constant]; -"929 Constant_14721" [id=929, type=Constant]; -"930 Transpose_2439" [id=930, type=Constant]; -"931 Constant_14719" [id=931, type=Constant]; -"932 Constant_14718" [id=932, type=Constant]; -"933 Constant_14717" [id=933, type=Constant]; -"934 Constant_14716" [id=934, type=Constant]; -"935 Transpose_7029" [id=935, type=Constant]; -"936 Convolution_2431/fq_weights_1" [id=936, type=FakeQuantize]; -"937 Constant_14714" [id=937, type=Constant]; -"938 Constant_14713" [id=938, type=Constant]; -"939 Constant_14712" [id=939, type=Constant]; -"940 Constant_14711" [id=940, type=Constant]; -"941 Transpose_2430" [id=941, type=Constant]; -"942 Constant_14709" [id=942, type=Constant]; -"943 Constant_14708" [id=943, type=Constant]; -"944 Constant_14707" [id=944, type=Constant]; -"945 Constant_14706" [id=945, type=Constant]; -"946 Constant_7023" [id=946, type=Constant]; -"947 Constant_14704" [id=947, type=Constant]; -"948 Constant_14703" [id=948, type=Constant]; -"949 Constant_14702" [id=949, type=Constant]; -"950 Constant_14701" [id=950, type=Constant]; -"951 Constant_11782" [id=951, type=Constant]; -"952 Multiply_11774/fq_weights_1" [id=952, type=FakeQuantize]; -"953 Constant_14699" [id=953, type=Constant]; -"954 Constant_14698" [id=954, type=Constant]; -"955 Constant_14697" [id=955, type=Constant]; -"956 Constant_14696" [id=956, type=Constant]; -"957 Multiply_12242" [id=957, type=Constant]; -"958 Constant_14694" [id=958, type=Constant]; -"959 Constant_14693" [id=959, type=Constant]; -"960 Constant_14692" [id=960, type=Constant]; -"961 Constant_14691" [id=961, type=Constant]; -"962 Constant_11768" [id=962, type=Constant]; -"963 Multiply_11760/fq_weights_1" [id=963, type=FakeQuantize]; -"964 Constant_14689" [id=964, type=Constant]; -"965 Constant_14688" [id=965, type=Constant]; -"966 Constant_14687" [id=966, type=Constant]; -"967 Constant_14686" [id=967, type=Constant]; -"968 Gather_12661" [id=968, type=Constant]; -"969 Constant_14684" [id=969, type=Constant]; -"970 Constant_14683" [id=970, type=Constant]; -"971 Constant_14682" [id=971, type=Constant]; -"972 Constant_14681" [id=972, type=Constant]; -"973 Unsqueeze_9541" [id=973, type=Constant]; -"974 Unsqueeze_9547" [id=974, type=Constant]; -"975 Constant_9544" [id=975, type=Constant]; -"0 input_1" -> "1 Transpose_9545" [label="[1, 224, 224, 3]", style=solid]; -"1 Transpose_9545" -> "2 Transpose_9539" [label="[1, 3, 224, 224]", style=solid]; -"2 Transpose_9539" -> "3 Transpose_2342" [label="[1, 3, 224, 224]", style=solid]; -"3 Transpose_2342" -> "4 Transpose_2342/fq_output_0" [label="[1, 3, 224, 224]", style=solid]; -"4 Transpose_2342/fq_output_0" -> "5 Multiply_11760" [label="[1, 3, 224, 224]", style=solid]; -"5 Multiply_11760" -> "6 Transpose_6952" [label="[1, 16, 112, 112]", style=solid]; -"6 Transpose_6952" -> "7 Transpose_6970" [label="[1, 16, 112, 112]", style=solid]; -"7 Transpose_6970" -> "8 Transpose_6970/fq_output_0" [label="[1, 16, 112, 112]", style=solid]; -"8 Transpose_6970/fq_output_0" -> "9 Multiply_11774" [label="[1, 16, 112, 112]", style=solid]; -"9 Multiply_11774" -> "10 Transpose_7019" [label="[1, 16, 56, 56]", style=solid]; -"10 Transpose_7019" -> "11 Relu_7020" [label="[1, 16, 56, 56]", style=solid]; -"11 Relu_7020" -> "12 Relu_7020/fq_output_0" [label="[1, 16, 56, 56]", style=solid]; -"12 Relu_7020/fq_output_0" -> "13 Transpose_7025" [label="[1, 16, 56, 56]", style=solid]; -"12 Relu_7020/fq_output_0" -> "14 Transpose_7057" [label="[1, 16, 56, 56]", style=solid]; -"13 Transpose_7025" -> "15 Transpose_7025/fq_output_0" [label="[1, 16, 1, 1]", style=solid]; -"14 Transpose_7057" -> "16 Transpose_7057/fq_output_0" [label="[1, 16, 56, 56]", style=solid]; -"15 Transpose_7025/fq_output_0" -> "17 Convolution_2431" [label="[1, 16, 1, 1]", style=solid]; -"16 Transpose_7057/fq_output_0" -> "18 Multiply_11788" [label="[1, 16, 56, 56]", style=solid]; -"17 Convolution_2431" -> "19 Transpose_7031" [label="[1, 8, 1, 1]", style=solid]; -"18 Multiply_11788" -> "20 Transpose_7081" [label="[1, 16, 56, 56]", style=solid]; -"19 Transpose_7031" -> "21 Relu_7032" [label="[1, 8, 1, 1]", style=solid]; -"20 Transpose_7081" -> "22 Transpose_7081/fq_output_0" [label="[1, 16, 56, 56]", style=solid]; -"21 Relu_7032" -> "23 Relu_7032/fq_output_0" [label="[1, 8, 1, 1]", style=solid]; -"22 Transpose_7081/fq_output_0" -> "24 Multiply_11802" [label="[1, 16, 56, 56]", style=solid]; -"23 Relu_7032/fq_output_0" -> "25 Convolution_2440" [label="[1, 8, 1, 1]", style=solid]; -"24 Multiply_11802" -> "26 Transpose_7105" [label="[1, 72, 56, 56]", style=solid]; -"25 Convolution_2440" -> "27 Transpose_9591" [label="[1, 16, 1, 1]", style=solid]; -"26 Transpose_7105" -> "28 Relu_7106" [label="[1, 72, 56, 56]", style=solid]; -"27 Transpose_9591" -> "29 Transpose_7055" [label="[1, 16, 1, 1]", style=solid]; -"28 Relu_7106" -> "30 Relu_7106/fq_output_0" [label="[1, 72, 56, 56]", style=solid]; -"29 Transpose_7055" -> "31 Transpose_7055/fq_output_0" [label="[1, 16, 1, 1]", style=solid]; -"30 Relu_7106/fq_output_0" -> "32 Multiply_11816" [label="[1, 72, 56, 56]", style=solid]; -"31 Transpose_7055/fq_output_0" -> "14 Transpose_7057" [label="[1, 16, 1, 1]", style=solid]; -"32 Multiply_11816" -> "33 Transpose_7156" [label="[1, 72, 28, 28]", style=solid]; -"33 Transpose_7156" -> "34 Relu_7157" [label="[1, 72, 28, 28]", style=solid]; -"34 Relu_7157" -> "35 Relu_7157/fq_output_0" [label="[1, 72, 28, 28]", style=solid]; -"35 Relu_7157/fq_output_0" -> "36 Multiply_11830" [label="[1, 72, 28, 28]", style=solid]; -"36 Multiply_11830" -> "37 Transpose_7182" [label="[1, 24, 28, 28]", style=solid]; -"37 Transpose_7182" -> "38 Transpose_7182/fq_output_0" [label="[1, 24, 28, 28]", style=solid]; -"38 Transpose_7182/fq_output_0" -> "39 Multiply_11844" [label="[1, 24, 28, 28]", style=solid]; -"38 Transpose_7182/fq_output_0" -> "40 Transpose_7262" [label="[1, 24, 28, 28]", style=solid]; -"39 Multiply_11844" -> "41 Transpose_7206" [label="[1, 88, 28, 28]", style=solid]; -"40 Transpose_7262" -> "42 Transpose_7262/fq_output_0" [label="[1, 24, 28, 28]", style=solid]; -"41 Transpose_7206" -> "43 Relu_7207" [label="[1, 88, 28, 28]", style=solid]; -"42 Transpose_7262/fq_output_0" -> "44 Multiply_11886" [label="[1, 24, 28, 28]", style=solid]; -"43 Relu_7207" -> "45 Relu_7207/fq_output_0" [label="[1, 88, 28, 28]", style=solid]; -"44 Multiply_11886" -> "46 Transpose_7286" [label="[1, 96, 28, 28]", style=solid]; -"45 Relu_7207/fq_output_0" -> "47 Multiply_11858" [label="[1, 88, 28, 28]", style=solid]; -"46 Transpose_7286" -> "48 Transpose_7304" [label="[1, 96, 28, 28]", style=solid]; -"47 Multiply_11858" -> "49 Transpose_7232" [label="[1, 88, 28, 28]", style=solid]; -"48 Transpose_7304" -> "50 Transpose_7304/fq_output_0" [label="[1, 96, 28, 28]", style=solid]; -"49 Transpose_7232" -> "51 Relu_7233" [label="[1, 88, 28, 28]", style=solid]; -"50 Transpose_7304/fq_output_0" -> "52 Multiply_11900" [label="[1, 96, 28, 28]", style=solid]; -"51 Relu_7233" -> "53 Relu_7233/fq_output_0" [label="[1, 88, 28, 28]", style=solid]; -"52 Multiply_11900" -> "54 Transpose_7353" [label="[1, 96, 14, 14]", style=solid]; -"53 Relu_7233/fq_output_0" -> "55 Multiply_11872" [label="[1, 88, 28, 28]", style=solid]; -"54 Transpose_7353" -> "56 Transpose_7371" [label="[1, 96, 14, 14]", style=solid]; -"55 Multiply_11872" -> "57 Transpose_7260" [label="[1, 24, 28, 28]", style=solid]; -"56 Transpose_7371" -> "58 Transpose_7371/fq_output_0" [label="[1, 96, 14, 14]", style=solid]; -"57 Transpose_7260" -> "59 Transpose_7260/fq_output_0" [label="[1, 24, 28, 28]", style=solid]; -"58 Transpose_7371/fq_output_0" -> "60 Transpose_7375" [label="[1, 96, 14, 14]", style=solid]; -"58 Transpose_7371/fq_output_0" -> "61 Transpose_7407" [label="[1, 96, 14, 14]", style=solid]; -"59 Transpose_7260/fq_output_0" -> "40 Transpose_7262" [label="[1, 24, 28, 28]", style=solid]; -"60 Transpose_7375" -> "62 Transpose_7375/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; -"61 Transpose_7407" -> "63 Transpose_7407/fq_output_0" [label="[1, 96, 14, 14]", style=solid]; -"62 Transpose_7375/fq_output_0" -> "64 Convolution_2758" [label="[1, 96, 1, 1]", style=solid]; -"63 Transpose_7407/fq_output_0" -> "65 Multiply_11914" [label="[1, 96, 14, 14]", style=solid]; -"64 Convolution_2758" -> "66 Transpose_7381" [label="[1, 24, 1, 1]", style=solid]; -"65 Multiply_11914" -> "67 Transpose_7431" [label="[1, 40, 14, 14]", style=solid]; -"66 Transpose_7381" -> "68 Relu_7382" [label="[1, 24, 1, 1]", style=solid]; -"67 Transpose_7431" -> "69 Transpose_7431/fq_output_0" [label="[1, 40, 14, 14]", style=solid]; -"68 Relu_7382" -> "70 Relu_7382/fq_output_0" [label="[1, 24, 1, 1]", style=solid]; -"69 Transpose_7431/fq_output_0" -> "71 Multiply_11928" [label="[1, 40, 14, 14]", style=solid]; -"69 Transpose_7431/fq_output_0" -> "72 Transpose_7579" [label="[1, 40, 14, 14]", style=solid]; -"70 Relu_7382/fq_output_0" -> "73 Convolution_2767" [label="[1, 24, 1, 1]", style=solid]; -"71 Multiply_11928" -> "74 Transpose_7455" [label="[1, 240, 14, 14]", style=solid]; -"72 Transpose_7579" -> "75 Transpose_7579/fq_output_0" [label="[1, 40, 14, 14]", style=solid]; -"73 Convolution_2767" -> "76 Transpose_9691" [label="[1, 96, 1, 1]", style=solid]; -"74 Transpose_7455" -> "77 Transpose_7473" [label="[1, 240, 14, 14]", style=solid]; -"75 Transpose_7579/fq_output_0" -> "78 Multiply_11970" [label="[1, 40, 14, 14]", style=solid]; -"75 Transpose_7579/fq_output_0" -> "79 Transpose_7727" [label="[1, 40, 14, 14]", style=solid]; -"76 Transpose_9691" -> "80 Transpose_7405" [label="[1, 96, 1, 1]", style=solid]; -"77 Transpose_7473" -> "81 Transpose_7473/fq_output_0" [label="[1, 240, 14, 14]", style=solid]; -"78 Multiply_11970" -> "82 Transpose_7603" [label="[1, 240, 14, 14]", style=solid]; -"79 Transpose_7727" -> "83 Transpose_7727/fq_output_0" [label="[1, 40, 14, 14]", style=solid]; -"80 Transpose_7405" -> "84 Transpose_7405/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; -"81 Transpose_7473/fq_output_0" -> "85 Multiply_11942" [label="[1, 240, 14, 14]", style=solid]; -"82 Transpose_7603" -> "86 Transpose_7621" [label="[1, 240, 14, 14]", style=solid]; -"83 Transpose_7727/fq_output_0" -> "87 Multiply_12012" [label="[1, 40, 14, 14]", style=solid]; -"84 Transpose_7405/fq_output_0" -> "61 Transpose_7407" [label="[1, 96, 1, 1]", style=solid]; -"85 Multiply_11942" -> "88 Transpose_7497" [label="[1, 240, 14, 14]", style=solid]; -"86 Transpose_7621" -> "89 Transpose_7621/fq_output_0" [label="[1, 240, 14, 14]", style=solid]; -"87 Multiply_12012" -> "90 Transpose_7751" [label="[1, 120, 14, 14]", style=solid]; -"88 Transpose_7497" -> "91 Transpose_7515" [label="[1, 240, 14, 14]", style=solid]; -"89 Transpose_7621/fq_output_0" -> "92 Multiply_11984" [label="[1, 240, 14, 14]", style=solid]; -"90 Transpose_7751" -> "93 Transpose_7769" [label="[1, 120, 14, 14]", style=solid]; -"91 Transpose_7515" -> "94 Transpose_7515/fq_output_0" [label="[1, 240, 14, 14]", style=solid]; -"92 Multiply_11984" -> "95 Transpose_7645" [label="[1, 240, 14, 14]", style=solid]; -"93 Transpose_7769" -> "96 Transpose_7769/fq_output_0" [label="[1, 120, 14, 14]", style=solid]; -"94 Transpose_7515/fq_output_0" -> "97 Transpose_7519" [label="[1, 240, 14, 14]", style=solid]; -"94 Transpose_7515/fq_output_0" -> "98 Transpose_7551" [label="[1, 240, 14, 14]", style=solid]; -"95 Transpose_7645" -> "99 Transpose_7663" [label="[1, 240, 14, 14]", style=solid]; -"96 Transpose_7769/fq_output_0" -> "100 Multiply_12026" [label="[1, 120, 14, 14]", style=solid]; -"97 Transpose_7519" -> "101 Transpose_7519/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; -"98 Transpose_7551" -> "102 Transpose_7551/fq_output_0" [label="[1, 240, 14, 14]", style=solid]; -"99 Transpose_7663" -> "103 Transpose_7663/fq_output_0" [label="[1, 240, 14, 14]", style=solid]; -"100 Multiply_12026" -> "104 Transpose_7793" [label="[1, 120, 14, 14]", style=solid]; -"101 Transpose_7519/fq_output_0" -> "105 Convolution_2868" [label="[1, 240, 1, 1]", style=solid]; -"102 Transpose_7551/fq_output_0" -> "106 Multiply_11956" [label="[1, 240, 14, 14]", style=solid]; -"103 Transpose_7663/fq_output_0" -> "107 Transpose_7667" [label="[1, 240, 14, 14]", style=solid]; -"103 Transpose_7663/fq_output_0" -> "108 Transpose_7699" [label="[1, 240, 14, 14]", style=solid]; -"104 Transpose_7793" -> "109 Transpose_7811" [label="[1, 120, 14, 14]", style=solid]; -"105 Convolution_2868" -> "110 Transpose_7525" [label="[1, 64, 1, 1]", style=solid]; -"106 Multiply_11956" -> "111 Transpose_7577" [label="[1, 40, 14, 14]", style=solid]; -"107 Transpose_7667" -> "112 Transpose_7667/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; -"108 Transpose_7699" -> "113 Transpose_7699/fq_output_0" [label="[1, 240, 14, 14]", style=solid]; -"109 Transpose_7811" -> "114 Transpose_7811/fq_output_0" [label="[1, 120, 14, 14]", style=solid]; -"110 Transpose_7525" -> "115 Relu_7526" [label="[1, 64, 1, 1]", style=solid]; -"111 Transpose_7577" -> "116 Transpose_7577/fq_output_0" [label="[1, 40, 14, 14]", style=solid]; -"112 Transpose_7667/fq_output_0" -> "117 Convolution_2979" [label="[1, 240, 1, 1]", style=solid]; -"113 Transpose_7699/fq_output_0" -> "118 Multiply_11998" [label="[1, 240, 14, 14]", style=solid]; -"114 Transpose_7811/fq_output_0" -> "119 Transpose_7815" [label="[1, 120, 14, 14]", style=solid]; -"114 Transpose_7811/fq_output_0" -> "120 Transpose_7847" [label="[1, 120, 14, 14]", style=solid]; -"115 Relu_7526" -> "121 Relu_7526/fq_output_0" [label="[1, 64, 1, 1]", style=solid]; -"116 Transpose_7577/fq_output_0" -> "72 Transpose_7579" [label="[1, 40, 14, 14]", style=solid]; -"117 Convolution_2979" -> "122 Transpose_7673" [label="[1, 64, 1, 1]", style=solid]; -"118 Multiply_11998" -> "123 Transpose_7725" [label="[1, 40, 14, 14]", style=solid]; -"119 Transpose_7815" -> "124 Transpose_7815/fq_output_0" [label="[1, 120, 1, 1]", style=solid]; -"120 Transpose_7847" -> "125 Transpose_7847/fq_output_0" [label="[1, 120, 14, 14]", style=solid]; -"121 Relu_7526/fq_output_0" -> "126 Convolution_2877" [label="[1, 64, 1, 1]", style=solid]; -"122 Transpose_7673" -> "127 Relu_7674" [label="[1, 64, 1, 1]", style=solid]; -"123 Transpose_7725" -> "128 Transpose_7725/fq_output_0" [label="[1, 40, 14, 14]", style=solid]; -"124 Transpose_7815/fq_output_0" -> "129 Convolution_3090" [label="[1, 120, 1, 1]", style=solid]; -"125 Transpose_7847/fq_output_0" -> "130 Multiply_12040" [label="[1, 120, 14, 14]", style=solid]; -"126 Convolution_2877" -> "131 Transpose_9759" [label="[1, 240, 1, 1]", style=solid]; -"127 Relu_7674" -> "132 Relu_7674/fq_output_0" [label="[1, 64, 1, 1]", style=solid]; -"128 Transpose_7725/fq_output_0" -> "79 Transpose_7727" [label="[1, 40, 14, 14]", style=solid]; -"129 Convolution_3090" -> "133 Transpose_7821" [label="[1, 32, 1, 1]", style=solid]; -"130 Multiply_12040" -> "134 Transpose_7871" [label="[1, 48, 14, 14]", style=solid]; -"131 Transpose_9759" -> "135 Transpose_7549" [label="[1, 240, 1, 1]", style=solid]; -"132 Relu_7674/fq_output_0" -> "136 Convolution_2988" [label="[1, 64, 1, 1]", style=solid]; -"133 Transpose_7821" -> "137 Relu_7822" [label="[1, 32, 1, 1]", style=solid]; -"134 Transpose_7871" -> "138 Transpose_7871/fq_output_0" [label="[1, 48, 14, 14]", style=solid]; -"135 Transpose_7549" -> "139 Transpose_7549/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; -"136 Convolution_2988" -> "140 Transpose_9859" [label="[1, 240, 1, 1]", style=solid]; -"137 Relu_7822" -> "141 Relu_7822/fq_output_0" [label="[1, 32, 1, 1]", style=solid]; -"138 Transpose_7871/fq_output_0" -> "142 Multiply_12054" [label="[1, 48, 14, 14]", style=solid]; -"138 Transpose_7871/fq_output_0" -> "143 Transpose_8019" [label="[1, 48, 14, 14]", style=solid]; -"139 Transpose_7549/fq_output_0" -> "98 Transpose_7551" [label="[1, 240, 1, 1]", style=solid]; -"140 Transpose_9859" -> "144 Transpose_7697" [label="[1, 240, 1, 1]", style=solid]; -"141 Relu_7822/fq_output_0" -> "145 Convolution_3099" [label="[1, 32, 1, 1]", style=solid]; -"142 Multiply_12054" -> "146 Transpose_7895" [label="[1, 144, 14, 14]", style=solid]; -"143 Transpose_8019" -> "147 Transpose_8019/fq_output_0" [label="[1, 48, 14, 14]", style=solid]; -"144 Transpose_7697" -> "148 Transpose_7697/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; -"145 Convolution_3099" -> "149 Transpose_9959" [label="[1, 120, 1, 1]", style=solid]; -"146 Transpose_7895" -> "150 Transpose_7913" [label="[1, 144, 14, 14]", style=solid]; -"147 Transpose_8019/fq_output_0" -> "151 Multiply_12096" [label="[1, 48, 14, 14]", style=solid]; -"148 Transpose_7697/fq_output_0" -> "108 Transpose_7699" [label="[1, 240, 1, 1]", style=solid]; -"149 Transpose_9959" -> "152 Transpose_7845" [label="[1, 120, 1, 1]", style=solid]; -"150 Transpose_7913" -> "153 Transpose_7913/fq_output_0" [label="[1, 144, 14, 14]", style=solid]; -"151 Multiply_12096" -> "154 Transpose_8043" [label="[1, 288, 14, 14]", style=solid]; -"152 Transpose_7845" -> "155 Transpose_7845/fq_output_0" [label="[1, 120, 1, 1]", style=solid]; -"153 Transpose_7913/fq_output_0" -> "156 Multiply_12068" [label="[1, 144, 14, 14]", style=solid]; -"154 Transpose_8043" -> "157 Transpose_8061" [label="[1, 288, 14, 14]", style=solid]; -"155 Transpose_7845/fq_output_0" -> "120 Transpose_7847" [label="[1, 120, 1, 1]", style=solid]; -"156 Multiply_12068" -> "158 Transpose_7937" [label="[1, 144, 14, 14]", style=solid]; -"157 Transpose_8061" -> "159 Transpose_8061/fq_output_0" [label="[1, 288, 14, 14]", style=solid]; -"158 Transpose_7937" -> "160 Transpose_7955" [label="[1, 144, 14, 14]", style=solid]; -"159 Transpose_8061/fq_output_0" -> "161 Multiply_12110" [label="[1, 288, 14, 14]", style=solid]; -"160 Transpose_7955" -> "162 Transpose_7955/fq_output_0" [label="[1, 144, 14, 14]", style=solid]; -"161 Multiply_12110" -> "163 Transpose_8110" [label="[1, 288, 7, 7]", style=solid]; -"162 Transpose_7955/fq_output_0" -> "164 Transpose_7959" [label="[1, 144, 14, 14]", style=solid]; -"162 Transpose_7955/fq_output_0" -> "165 Transpose_7991" [label="[1, 144, 14, 14]", style=solid]; -"163 Transpose_8110" -> "166 Transpose_8128" [label="[1, 288, 7, 7]", style=solid]; -"164 Transpose_7959" -> "167 Transpose_7959/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; -"165 Transpose_7991" -> "168 Transpose_7991/fq_output_0" [label="[1, 144, 14, 14]", style=solid]; -"166 Transpose_8128" -> "169 Transpose_8128/fq_output_0" [label="[1, 288, 7, 7]", style=solid]; -"167 Transpose_7959/fq_output_0" -> "170 Convolution_3200" [label="[1, 144, 1, 1]", style=solid]; -"168 Transpose_7991/fq_output_0" -> "171 Multiply_12082" [label="[1, 144, 14, 14]", style=solid]; -"169 Transpose_8128/fq_output_0" -> "172 Transpose_8132" [label="[1, 288, 7, 7]", style=solid]; -"169 Transpose_8128/fq_output_0" -> "173 Transpose_8164" [label="[1, 288, 7, 7]", style=solid]; -"170 Convolution_3200" -> "174 Transpose_7965" [label="[1, 40, 1, 1]", style=solid]; -"171 Multiply_12082" -> "175 Transpose_8017" [label="[1, 48, 14, 14]", style=solid]; -"172 Transpose_8132" -> "176 Transpose_8132/fq_output_0" [label="[1, 288, 1, 1]", style=solid]; -"173 Transpose_8164" -> "177 Transpose_8164/fq_output_0" [label="[1, 288, 7, 7]", style=solid]; -"174 Transpose_7965" -> "178 Relu_7966" [label="[1, 40, 1, 1]", style=solid]; -"175 Transpose_8017" -> "179 Transpose_8017/fq_output_0" [label="[1, 48, 14, 14]", style=solid]; -"176 Transpose_8132/fq_output_0" -> "180 Convolution_3337" [label="[1, 288, 1, 1]", style=solid]; -"177 Transpose_8164/fq_output_0" -> "181 Multiply_12124" [label="[1, 288, 7, 7]", style=solid]; -"178 Relu_7966" -> "182 Relu_7966/fq_output_0" [label="[1, 40, 1, 1]", style=solid]; -"179 Transpose_8017/fq_output_0" -> "143 Transpose_8019" [label="[1, 48, 14, 14]", style=solid]; -"180 Convolution_3337" -> "183 Transpose_8138" [label="[1, 72, 1, 1]", style=solid]; -"181 Multiply_12124" -> "184 Transpose_8188" [label="[1, 96, 7, 7]", style=solid]; -"182 Relu_7966/fq_output_0" -> "185 Convolution_3209" [label="[1, 40, 1, 1]", style=solid]; -"183 Transpose_8138" -> "186 Relu_8139" [label="[1, 72, 1, 1]", style=solid]; -"184 Transpose_8188" -> "187 Transpose_8188/fq_output_0" [label="[1, 96, 7, 7]", style=solid]; -"185 Convolution_3209" -> "188 Transpose_10027" [label="[1, 144, 1, 1]", style=solid]; -"186 Relu_8139" -> "189 Relu_8139/fq_output_0" [label="[1, 72, 1, 1]", style=solid]; -"187 Transpose_8188/fq_output_0" -> "190 Multiply_12138" [label="[1, 96, 7, 7]", style=solid]; -"187 Transpose_8188/fq_output_0" -> "191 Transpose_8336" [label="[1, 96, 7, 7]", style=solid]; -"188 Transpose_10027" -> "192 Transpose_7989" [label="[1, 144, 1, 1]", style=solid]; -"189 Relu_8139/fq_output_0" -> "193 Convolution_3346" [label="[1, 72, 1, 1]", style=solid]; -"190 Multiply_12138" -> "194 Transpose_8212" [label="[1, 576, 7, 7]", style=solid]; -"191 Transpose_8336" -> "195 Transpose_8336/fq_output_0" [label="[1, 96, 7, 7]", style=solid]; -"192 Transpose_7989" -> "196 Transpose_7989/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; -"193 Convolution_3346" -> "197 Transpose_10127" [label="[1, 288, 1, 1]", style=solid]; -"194 Transpose_8212" -> "198 Transpose_8230" [label="[1, 576, 7, 7]", style=solid]; -"195 Transpose_8336/fq_output_0" -> "199 Multiply_12180" [label="[1, 96, 7, 7]", style=solid]; -"195 Transpose_8336/fq_output_0" -> "200 Transpose_8484" [label="[1, 96, 7, 7]", style=solid]; -"196 Transpose_7989/fq_output_0" -> "165 Transpose_7991" [label="[1, 144, 1, 1]", style=solid]; -"197 Transpose_10127" -> "201 Transpose_8162" [label="[1, 288, 1, 1]", style=solid]; -"198 Transpose_8230" -> "202 Transpose_8230/fq_output_0" [label="[1, 576, 7, 7]", style=solid]; -"199 Multiply_12180" -> "203 Transpose_8360" [label="[1, 576, 7, 7]", style=solid]; -"200 Transpose_8484" -> "204 Transpose_8484/fq_output_0" [label="[1, 96, 7, 7]", style=solid]; -"201 Transpose_8162" -> "205 Transpose_8162/fq_output_0" [label="[1, 288, 1, 1]", style=solid]; -"202 Transpose_8230/fq_output_0" -> "206 Multiply_12152" [label="[1, 576, 7, 7]", style=solid]; -"203 Transpose_8360" -> "207 Transpose_8378" [label="[1, 576, 7, 7]", style=solid]; -"204 Transpose_8484/fq_output_0" -> "208 Multiply_12222" [label="[1, 96, 7, 7]", style=solid]; -"205 Transpose_8162/fq_output_0" -> "173 Transpose_8164" [label="[1, 288, 1, 1]", style=solid]; -"206 Multiply_12152" -> "209 Transpose_8254" [label="[1, 576, 7, 7]", style=solid]; -"207 Transpose_8378" -> "210 Transpose_8378/fq_output_0" [label="[1, 576, 7, 7]", style=solid]; -"208 Multiply_12222" -> "211 Transpose_8508" [label="[1, 576, 7, 7]", style=solid]; -"209 Transpose_8254" -> "212 Transpose_8272" [label="[1, 576, 7, 7]", style=solid]; -"210 Transpose_8378/fq_output_0" -> "213 Multiply_12194" [label="[1, 576, 7, 7]", style=solid]; -"211 Transpose_8508" -> "214 Transpose_8526" [label="[1, 576, 7, 7]", style=solid]; -"212 Transpose_8272" -> "215 Transpose_8272/fq_output_0" [label="[1, 576, 7, 7]", style=solid]; -"213 Multiply_12194" -> "216 Transpose_8402" [label="[1, 576, 7, 7]", style=solid]; -"214 Transpose_8526" -> "217 Transpose_8526/fq_output_0" [label="[1, 576, 7, 7]", style=solid]; -"215 Transpose_8272/fq_output_0" -> "218 Transpose_8276" [label="[1, 576, 7, 7]", style=solid]; -"215 Transpose_8272/fq_output_0" -> "219 Transpose_8308" [label="[1, 576, 7, 7]", style=solid]; -"216 Transpose_8402" -> "220 Transpose_8420" [label="[1, 576, 7, 7]", style=solid]; -"217 Transpose_8526/fq_output_0" -> "221 Transpose_8530" [label="[1, 576, 7, 7]", style=solid]; -"218 Transpose_8276" -> "222 Transpose_8276/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"219 Transpose_8308" -> "223 Transpose_8308/fq_output_0" [label="[1, 576, 7, 7]", style=solid]; -"220 Transpose_8420" -> "224 Transpose_8420/fq_output_0" [label="[1, 576, 7, 7]", style=solid]; -"221 Transpose_8530" -> "225 Transpose_8530/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"222 Transpose_8276/fq_output_0" -> "226 Convolution_3447" [label="[1, 576, 1, 1]", style=solid]; -"223 Transpose_8308/fq_output_0" -> "227 Multiply_12166" [label="[1, 576, 7, 7]", style=solid]; -"224 Transpose_8420/fq_output_0" -> "228 Transpose_8424" [label="[1, 576, 7, 7]", style=solid]; -"224 Transpose_8420/fq_output_0" -> "229 Transpose_8456" [label="[1, 576, 7, 7]", style=solid]; -"225 Transpose_8530/fq_output_0" -> "230 Convolution_3637" [label="[1, 576, 1, 1]", style=solid]; -"226 Convolution_3447" -> "231 Transpose_8282" [label="[1, 144, 1, 1]", style=solid]; -"227 Multiply_12166" -> "232 Transpose_8334" [label="[1, 96, 7, 7]", style=solid]; -"228 Transpose_8424" -> "233 Transpose_8424/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"229 Transpose_8456" -> "234 Transpose_8456/fq_output_0" [label="[1, 576, 7, 7]", style=solid]; -"230 Convolution_3637" -> "235 Transpose_8536" [label="[1, 1024, 1, 1]", style=solid]; -"231 Transpose_8282" -> "236 Relu_8283" [label="[1, 144, 1, 1]", style=solid]; -"232 Transpose_8334" -> "237 Transpose_8334/fq_output_0" [label="[1, 96, 7, 7]", style=solid]; -"233 Transpose_8424/fq_output_0" -> "238 Convolution_3558" [label="[1, 576, 1, 1]", style=solid]; -"234 Transpose_8456/fq_output_0" -> "239 Multiply_12208" [label="[1, 576, 7, 7]", style=solid]; -"235 Transpose_8536" -> "240 Transpose_8554" [label="[1, 1024, 1, 1]", style=solid]; -"236 Relu_8283" -> "241 Relu_8283/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; -"237 Transpose_8334/fq_output_0" -> "191 Transpose_8336" [label="[1, 96, 7, 7]", style=solid]; -"238 Convolution_3558" -> "242 Transpose_8430" [label="[1, 144, 1, 1]", style=solid]; -"239 Multiply_12208" -> "243 Transpose_8482" [label="[1, 96, 7, 7]", style=solid]; -"240 Transpose_8554" -> "244 Transpose_8554/fq_output_0" [label="[1, 1024, 1, 1]", style=solid]; -"241 Relu_8283/fq_output_0" -> "245 Convolution_3456" [label="[1, 144, 1, 1]", style=solid]; -"242 Transpose_8430" -> "246 Relu_8431" [label="[1, 144, 1, 1]", style=solid]; -"243 Transpose_8482" -> "247 Transpose_8482/fq_output_0" [label="[1, 96, 7, 7]", style=solid]; -"244 Transpose_8554/fq_output_0" -> "248 Convolution_3649" [label="[1, 1024, 1, 1]", style=solid]; -"245 Convolution_3456" -> "249 Transpose_10195" [label="[1, 576, 1, 1]", style=solid]; -"246 Relu_8431" -> "250 Relu_8431/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; -"247 Transpose_8482/fq_output_0" -> "200 Transpose_8484" [label="[1, 96, 7, 7]", style=solid]; -"248 Convolution_3649" -> "251 Convolution_3649/fq_output_0" [label="[1, 1000, 1, 1]", style=solid]; -"249 Transpose_10195" -> "252 Transpose_8306" [label="[1, 576, 1, 1]", style=solid]; -"250 Relu_8431/fq_output_0" -> "253 Convolution_3567" [label="[1, 144, 1, 1]", style=solid]; -"251 Convolution_3649/fq_output_0" -> "254 Transpose_10375" [label="[1, 1000, 1, 1]", style=solid]; -"252 Transpose_8306" -> "255 Transpose_8306/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"253 Convolution_3567" -> "256 Transpose_10295" [label="[1, 576, 1, 1]", style=solid]; -"254 Transpose_10375" -> "257 MobilenetV3small/Logits/BiasAdd" [label="[1, 1, 1, 1000]", style=solid]; -"255 Transpose_8306/fq_output_0" -> "219 Transpose_8308" [label="[1, 576, 1, 1]", style=solid]; -"256 Transpose_10295" -> "258 Transpose_8454" [label="[1, 576, 1, 1]", style=solid]; -"257 MobilenetV3small/Logits/BiasAdd" -> "259 MobilenetV3small/flatten/Reshape" [label="[1, 1, 1, 1000]", style=solid]; -"258 Transpose_8454" -> "260 Transpose_8454/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"259 MobilenetV3small/flatten/Reshape" -> "261 MobilenetV3small/Predictions/Softmax" [label="[1, 1000]", style=solid]; -"260 Transpose_8454/fq_output_0" -> "229 Transpose_8456" [label="[1, 576, 1, 1]", style=solid]; -"261 MobilenetV3small/Predictions/Softmax" -> "262 Predictions" [label="[1, 1000]", style=solid]; -"263 MobilenetV3small/flatten/Const" -> "259 MobilenetV3small/flatten/Reshape" [label="[2]", style=dashed]; -"264 Transpose_10377" -> "257 MobilenetV3small/Logits/BiasAdd" [label="[1, 1, 1, 1000]", style=solid]; -"265 Constant_11480" -> "254 Transpose_10375" [label="[4]", style=dashed]; -"266 Constant_15344" -> "251 Convolution_3649/fq_output_0" [label="[]", style=solid]; -"267 Constant_15343" -> "251 Convolution_3649/fq_output_0" [label="[]", style=solid]; -"268 Constant_15342" -> "251 Convolution_3649/fq_output_0" [label="[]", style=solid]; -"269 Constant_15341" -> "251 Convolution_3649/fq_output_0" [label="[]", style=solid]; -"270 Convolution_3649/fq_weights_1" -> "248 Convolution_3649" [label="[1000, 1024, 1, 1]", style=solid]; -"271 Constant_15349" -> "270 Convolution_3649/fq_weights_1" [label="[1000, 1, 1, 1]", style=solid]; -"272 Constant_15348" -> "270 Convolution_3649/fq_weights_1" [label="[1000, 1, 1, 1]", style=solid]; -"273 Constant_15347" -> "270 Convolution_3649/fq_weights_1" [label="[1000, 1, 1, 1]", style=solid]; -"274 Constant_15346" -> "270 Convolution_3649/fq_weights_1" [label="[1000, 1, 1, 1]", style=solid]; -"275 Transpose_3648" -> "270 Convolution_3649/fq_weights_1" [label="[1000, 1024, 1, 1]", style=solid]; -"276 Constant_15339" -> "244 Transpose_8554/fq_output_0" [label="[]", style=solid]; -"277 Constant_15338" -> "244 Transpose_8554/fq_output_0" [label="[]", style=solid]; -"278 Constant_15337" -> "244 Transpose_8554/fq_output_0" [label="[]", style=solid]; -"279 Constant_15336" -> "244 Transpose_8554/fq_output_0" [label="[]", style=solid]; -"280 Transpose_8534" -> "235 Transpose_8536" [label="[1, 1024, 1, 1]", style=solid]; -"281 Convolution_3637/fq_weights_1" -> "230 Convolution_3637" [label="[1024, 576, 1, 1]", style=solid]; -"282 Constant_15334" -> "281 Convolution_3637/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; -"283 Constant_15333" -> "281 Convolution_3637/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; -"284 Constant_15332" -> "281 Convolution_3637/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; -"285 Constant_15331" -> "281 Convolution_3637/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; -"286 Transpose_3636" -> "281 Convolution_3637/fq_weights_1" [label="[1024, 576, 1, 1]", style=solid]; -"287 Constant_15329" -> "225 Transpose_8530/fq_output_0" [label="[]", style=solid]; -"288 Constant_15328" -> "225 Transpose_8530/fq_output_0" [label="[]", style=solid]; -"289 Constant_15327" -> "225 Transpose_8530/fq_output_0" [label="[]", style=solid]; -"290 Constant_15326" -> "225 Transpose_8530/fq_output_0" [label="[]", style=solid]; -"291 Constant_8528" -> "221 Transpose_8530" [label="[2]", style=dashed]; -"292 Constant_15324" -> "217 Transpose_8526/fq_output_0" [label="[]", style=solid]; -"293 Constant_15323" -> "217 Transpose_8526/fq_output_0" [label="[]", style=solid]; -"294 Constant_15322" -> "217 Transpose_8526/fq_output_0" [label="[]", style=solid]; -"295 Constant_15321" -> "217 Transpose_8526/fq_output_0" [label="[]", style=solid]; -"296 Constant_12230" -> "211 Transpose_8508" [label="[1, 576, 1, 1]", style=solid]; -"297 Multiply_12222/fq_weights_1" -> "208 Multiply_12222" [label="[576, 96, 1, 1]", style=solid]; -"298 Constant_15319" -> "297 Multiply_12222/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"299 Constant_15318" -> "297 Multiply_12222/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"300 Constant_15317" -> "297 Multiply_12222/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"301 Constant_15316" -> "297 Multiply_12222/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"302 Multiply_12423" -> "297 Multiply_12222/fq_weights_1" [label="[576, 96, 1, 1]", style=solid]; -"303 Constant_15314" -> "204 Transpose_8484/fq_output_0" [label="[]", style=solid]; -"304 Constant_15313" -> "204 Transpose_8484/fq_output_0" [label="[]", style=solid]; -"305 Constant_15312" -> "204 Transpose_8484/fq_output_0" [label="[]", style=solid]; -"306 Constant_15311" -> "204 Transpose_8484/fq_output_0" [label="[]", style=solid]; -"307 Constant_15309" -> "247 Transpose_8482/fq_output_0" [label="[]", style=solid]; -"308 Constant_15308" -> "247 Transpose_8482/fq_output_0" [label="[]", style=solid]; -"309 Constant_15307" -> "247 Transpose_8482/fq_output_0" [label="[]", style=solid]; -"310 Constant_15306" -> "247 Transpose_8482/fq_output_0" [label="[]", style=solid]; -"311 Constant_12216" -> "243 Transpose_8482" [label="[1, 96, 1, 1]", style=solid]; -"312 Multiply_12208/fq_weights_1" -> "239 Multiply_12208" [label="[96, 576, 1, 1]", style=solid]; -"313 Constant_15304" -> "312 Multiply_12208/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"314 Constant_15303" -> "312 Multiply_12208/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"315 Constant_15302" -> "312 Multiply_12208/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"316 Constant_15301" -> "312 Multiply_12208/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"317 Multiply_12417" -> "312 Multiply_12208/fq_weights_1" [label="[96, 576, 1, 1]", style=solid]; -"318 Constant_15299" -> "234 Transpose_8456/fq_output_0" [label="[]", style=solid]; -"319 Constant_15298" -> "234 Transpose_8456/fq_output_0" [label="[]", style=solid]; -"320 Constant_15297" -> "234 Transpose_8456/fq_output_0" [label="[]", style=solid]; -"321 Constant_15296" -> "234 Transpose_8456/fq_output_0" [label="[]", style=solid]; -"322 Constant_15294" -> "260 Transpose_8454/fq_output_0" [label="[]", style=solid]; -"323 Constant_15293" -> "260 Transpose_8454/fq_output_0" [label="[]", style=solid]; -"324 Constant_15292" -> "260 Transpose_8454/fq_output_0" [label="[]", style=solid]; -"325 Constant_15291" -> "260 Transpose_8454/fq_output_0" [label="[]", style=solid]; -"326 Transpose_8436" -> "256 Transpose_10295" [label="[1, 576, 1, 1]", style=solid]; -"327 Convolution_3567/fq_weights_1" -> "253 Convolution_3567" [label="[576, 144, 1, 1]", style=solid]; -"328 Constant_15289" -> "327 Convolution_3567/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"329 Constant_15288" -> "327 Convolution_3567/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"330 Constant_15287" -> "327 Convolution_3567/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"331 Constant_15286" -> "327 Convolution_3567/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"332 Transpose_3566" -> "327 Convolution_3567/fq_weights_1" [label="[576, 144, 1, 1]", style=solid]; -"333 Constant_15284" -> "250 Relu_8431/fq_output_0" [label="[]", style=solid]; -"334 Constant_15283" -> "250 Relu_8431/fq_output_0" [label="[]", style=solid]; -"335 Constant_15282" -> "250 Relu_8431/fq_output_0" [label="[]", style=solid]; -"336 Constant_15281" -> "250 Relu_8431/fq_output_0" [label="[]", style=solid]; -"337 Transpose_8428" -> "242 Transpose_8430" [label="[1, 144, 1, 1]", style=solid]; -"338 Convolution_3558/fq_weights_1" -> "238 Convolution_3558" [label="[144, 576, 1, 1]", style=solid]; -"339 Constant_15279" -> "338 Convolution_3558/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"340 Constant_15278" -> "338 Convolution_3558/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"341 Constant_15277" -> "338 Convolution_3558/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"342 Constant_15276" -> "338 Convolution_3558/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"343 Transpose_3557" -> "338 Convolution_3558/fq_weights_1" [label="[144, 576, 1, 1]", style=solid]; -"344 Constant_15274" -> "233 Transpose_8424/fq_output_0" [label="[]", style=solid]; -"345 Constant_15273" -> "233 Transpose_8424/fq_output_0" [label="[]", style=solid]; -"346 Constant_15272" -> "233 Transpose_8424/fq_output_0" [label="[]", style=solid]; -"347 Constant_15271" -> "233 Transpose_8424/fq_output_0" [label="[]", style=solid]; -"348 Constant_8422" -> "228 Transpose_8424" [label="[2]", style=dashed]; -"349 Constant_15269" -> "224 Transpose_8420/fq_output_0" [label="[]", style=solid]; -"350 Constant_15268" -> "224 Transpose_8420/fq_output_0" [label="[]", style=solid]; -"351 Constant_15267" -> "224 Transpose_8420/fq_output_0" [label="[]", style=solid]; -"352 Constant_15266" -> "224 Transpose_8420/fq_output_0" [label="[]", style=solid]; -"353 Constant_12202" -> "216 Transpose_8402" [label="[1, 576, 1, 1]", style=solid]; -"354 Multiply_12194/fq_weights_1" -> "213 Multiply_12194" [label="[576, 1, 1, 5, 5]", style=solid]; -"355 Constant_15264" -> "354 Multiply_12194/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; -"356 Constant_15263" -> "354 Multiply_12194/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; -"357 Constant_15262" -> "354 Multiply_12194/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; -"358 Constant_15261" -> "354 Multiply_12194/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; -"359 Multiply_12412" -> "354 Multiply_12194/fq_weights_1" [label="[576, 1, 1, 5, 5]", style=solid]; -"360 Constant_15259" -> "210 Transpose_8378/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"361 Constant_15258" -> "210 Transpose_8378/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"362 Constant_15257" -> "210 Transpose_8378/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"363 Constant_15256" -> "210 Transpose_8378/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"364 Constant_12188" -> "203 Transpose_8360" [label="[1, 576, 1, 1]", style=solid]; -"365 Multiply_12180/fq_weights_1" -> "199 Multiply_12180" [label="[576, 96, 1, 1]", style=solid]; -"366 Constant_15254" -> "365 Multiply_12180/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"367 Constant_15253" -> "365 Multiply_12180/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"368 Constant_15252" -> "365 Multiply_12180/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"369 Constant_15251" -> "365 Multiply_12180/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"370 Multiply_12406" -> "365 Multiply_12180/fq_weights_1" [label="[576, 96, 1, 1]", style=solid]; -"371 Constant_15249" -> "195 Transpose_8336/fq_output_0" [label="[]", style=solid]; -"372 Constant_15248" -> "195 Transpose_8336/fq_output_0" [label="[]", style=solid]; -"373 Constant_15247" -> "195 Transpose_8336/fq_output_0" [label="[]", style=solid]; -"374 Constant_15246" -> "195 Transpose_8336/fq_output_0" [label="[]", style=solid]; -"375 Constant_15244" -> "237 Transpose_8334/fq_output_0" [label="[]", style=solid]; -"376 Constant_15243" -> "237 Transpose_8334/fq_output_0" [label="[]", style=solid]; -"377 Constant_15242" -> "237 Transpose_8334/fq_output_0" [label="[]", style=solid]; -"378 Constant_15241" -> "237 Transpose_8334/fq_output_0" [label="[]", style=solid]; -"379 Constant_12174" -> "232 Transpose_8334" [label="[1, 96, 1, 1]", style=solid]; -"380 Multiply_12166/fq_weights_1" -> "227 Multiply_12166" [label="[96, 576, 1, 1]", style=solid]; -"381 Constant_15239" -> "380 Multiply_12166/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"382 Constant_15238" -> "380 Multiply_12166/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"383 Constant_15237" -> "380 Multiply_12166/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"384 Constant_15236" -> "380 Multiply_12166/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"385 Multiply_12400" -> "380 Multiply_12166/fq_weights_1" [label="[96, 576, 1, 1]", style=solid]; -"386 Constant_15234" -> "223 Transpose_8308/fq_output_0" [label="[]", style=solid]; -"387 Constant_15233" -> "223 Transpose_8308/fq_output_0" [label="[]", style=solid]; -"388 Constant_15232" -> "223 Transpose_8308/fq_output_0" [label="[]", style=solid]; -"389 Constant_15231" -> "223 Transpose_8308/fq_output_0" [label="[]", style=solid]; -"390 Constant_15229" -> "255 Transpose_8306/fq_output_0" [label="[]", style=solid]; -"391 Constant_15228" -> "255 Transpose_8306/fq_output_0" [label="[]", style=solid]; -"392 Constant_15227" -> "255 Transpose_8306/fq_output_0" [label="[]", style=solid]; -"393 Constant_15226" -> "255 Transpose_8306/fq_output_0" [label="[]", style=solid]; -"394 Transpose_8288" -> "249 Transpose_10195" [label="[1, 576, 1, 1]", style=solid]; -"395 Convolution_3456/fq_weights_1" -> "245 Convolution_3456" [label="[576, 144, 1, 1]", style=solid]; -"396 Constant_15224" -> "395 Convolution_3456/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"397 Constant_15223" -> "395 Convolution_3456/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"398 Constant_15222" -> "395 Convolution_3456/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"399 Constant_15221" -> "395 Convolution_3456/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"400 Transpose_3455" -> "395 Convolution_3456/fq_weights_1" [label="[576, 144, 1, 1]", style=solid]; -"401 Constant_15219" -> "241 Relu_8283/fq_output_0" [label="[]", style=solid]; -"402 Constant_15218" -> "241 Relu_8283/fq_output_0" [label="[]", style=solid]; -"403 Constant_15217" -> "241 Relu_8283/fq_output_0" [label="[]", style=solid]; -"404 Constant_15216" -> "241 Relu_8283/fq_output_0" [label="[]", style=solid]; -"405 Transpose_8280" -> "231 Transpose_8282" [label="[1, 144, 1, 1]", style=solid]; -"406 Convolution_3447/fq_weights_1" -> "226 Convolution_3447" [label="[144, 576, 1, 1]", style=solid]; -"407 Constant_15214" -> "406 Convolution_3447/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"408 Constant_15213" -> "406 Convolution_3447/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"409 Constant_15212" -> "406 Convolution_3447/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"410 Constant_15211" -> "406 Convolution_3447/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"411 Transpose_3446" -> "406 Convolution_3447/fq_weights_1" [label="[144, 576, 1, 1]", style=solid]; -"412 Constant_15209" -> "222 Transpose_8276/fq_output_0" [label="[]", style=solid]; -"413 Constant_15208" -> "222 Transpose_8276/fq_output_0" [label="[]", style=solid]; -"414 Constant_15207" -> "222 Transpose_8276/fq_output_0" [label="[]", style=solid]; -"415 Constant_15206" -> "222 Transpose_8276/fq_output_0" [label="[]", style=solid]; -"416 Constant_8274" -> "218 Transpose_8276" [label="[2]", style=dashed]; -"417 Constant_15204" -> "215 Transpose_8272/fq_output_0" [label="[]", style=solid]; -"418 Constant_15203" -> "215 Transpose_8272/fq_output_0" [label="[]", style=solid]; -"419 Constant_15202" -> "215 Transpose_8272/fq_output_0" [label="[]", style=solid]; -"420 Constant_15201" -> "215 Transpose_8272/fq_output_0" [label="[]", style=solid]; -"421 Constant_12160" -> "209 Transpose_8254" [label="[1, 576, 1, 1]", style=solid]; -"422 Multiply_12152/fq_weights_1" -> "206 Multiply_12152" [label="[576, 1, 1, 5, 5]", style=solid]; -"423 Constant_15199" -> "422 Multiply_12152/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; -"424 Constant_15198" -> "422 Multiply_12152/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; -"425 Constant_15197" -> "422 Multiply_12152/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; -"426 Constant_15196" -> "422 Multiply_12152/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; -"427 Multiply_12395" -> "422 Multiply_12152/fq_weights_1" [label="[576, 1, 1, 5, 5]", style=solid]; -"428 Constant_15194" -> "202 Transpose_8230/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"429 Constant_15193" -> "202 Transpose_8230/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"430 Constant_15192" -> "202 Transpose_8230/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"431 Constant_15191" -> "202 Transpose_8230/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; -"432 Constant_12146" -> "194 Transpose_8212" [label="[1, 576, 1, 1]", style=solid]; -"433 Multiply_12138/fq_weights_1" -> "190 Multiply_12138" [label="[576, 96, 1, 1]", style=solid]; -"434 Constant_15189" -> "433 Multiply_12138/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"435 Constant_15188" -> "433 Multiply_12138/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"436 Constant_15187" -> "433 Multiply_12138/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"437 Constant_15186" -> "433 Multiply_12138/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; -"438 Multiply_12389" -> "433 Multiply_12138/fq_weights_1" [label="[576, 96, 1, 1]", style=solid]; -"439 Constant_15184" -> "187 Transpose_8188/fq_output_0" [label="[]", style=solid]; -"440 Constant_15183" -> "187 Transpose_8188/fq_output_0" [label="[]", style=solid]; -"441 Constant_15182" -> "187 Transpose_8188/fq_output_0" [label="[]", style=solid]; -"442 Constant_15181" -> "187 Transpose_8188/fq_output_0" [label="[]", style=solid]; -"443 Constant_12132" -> "184 Transpose_8188" [label="[1, 96, 1, 1]", style=solid]; -"444 Multiply_12124/fq_weights_1" -> "181 Multiply_12124" [label="[96, 288, 1, 1]", style=solid]; -"445 Constant_15179" -> "444 Multiply_12124/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"446 Constant_15178" -> "444 Multiply_12124/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"447 Constant_15177" -> "444 Multiply_12124/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"448 Constant_15176" -> "444 Multiply_12124/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"449 Multiply_12383" -> "444 Multiply_12124/fq_weights_1" [label="[96, 288, 1, 1]", style=solid]; -"450 Constant_15174" -> "177 Transpose_8164/fq_output_0" [label="[]", style=solid]; -"451 Constant_15173" -> "177 Transpose_8164/fq_output_0" [label="[]", style=solid]; -"452 Constant_15172" -> "177 Transpose_8164/fq_output_0" [label="[]", style=solid]; -"453 Constant_15171" -> "177 Transpose_8164/fq_output_0" [label="[]", style=solid]; -"454 Constant_15169" -> "205 Transpose_8162/fq_output_0" [label="[]", style=solid]; -"455 Constant_15168" -> "205 Transpose_8162/fq_output_0" [label="[]", style=solid]; -"456 Constant_15167" -> "205 Transpose_8162/fq_output_0" [label="[]", style=solid]; -"457 Constant_15166" -> "205 Transpose_8162/fq_output_0" [label="[]", style=solid]; -"458 Transpose_8144" -> "197 Transpose_10127" [label="[1, 288, 1, 1]", style=solid]; -"459 Convolution_3346/fq_weights_1" -> "193 Convolution_3346" [label="[288, 72, 1, 1]", style=solid]; -"460 Constant_15164" -> "459 Convolution_3346/fq_weights_1" [label="[288, 1, 1, 1]", style=solid]; -"461 Constant_15163" -> "459 Convolution_3346/fq_weights_1" [label="[288, 1, 1, 1]", style=solid]; -"462 Constant_15162" -> "459 Convolution_3346/fq_weights_1" [label="[288, 1, 1, 1]", style=solid]; -"463 Constant_15161" -> "459 Convolution_3346/fq_weights_1" [label="[288, 1, 1, 1]", style=solid]; -"464 Transpose_3345" -> "459 Convolution_3346/fq_weights_1" [label="[288, 72, 1, 1]", style=solid]; -"465 Constant_15159" -> "189 Relu_8139/fq_output_0" [label="[]", style=solid]; -"466 Constant_15158" -> "189 Relu_8139/fq_output_0" [label="[]", style=solid]; -"467 Constant_15157" -> "189 Relu_8139/fq_output_0" [label="[]", style=solid]; -"468 Constant_15156" -> "189 Relu_8139/fq_output_0" [label="[]", style=solid]; -"469 Transpose_8136" -> "183 Transpose_8138" [label="[1, 72, 1, 1]", style=solid]; -"470 Convolution_3337/fq_weights_1" -> "180 Convolution_3337" [label="[72, 288, 1, 1]", style=solid]; -"471 Constant_15154" -> "470 Convolution_3337/fq_weights_1" [label="[72, 1, 1, 1]", style=solid]; -"472 Constant_15153" -> "470 Convolution_3337/fq_weights_1" [label="[72, 1, 1, 1]", style=solid]; -"473 Constant_15152" -> "470 Convolution_3337/fq_weights_1" [label="[72, 1, 1, 1]", style=solid]; -"474 Constant_15151" -> "470 Convolution_3337/fq_weights_1" [label="[72, 1, 1, 1]", style=solid]; -"475 Transpose_3336" -> "470 Convolution_3337/fq_weights_1" [label="[72, 288, 1, 1]", style=solid]; -"476 Constant_15149" -> "176 Transpose_8132/fq_output_0" [label="[]", style=solid]; -"477 Constant_15148" -> "176 Transpose_8132/fq_output_0" [label="[]", style=solid]; -"478 Constant_15147" -> "176 Transpose_8132/fq_output_0" [label="[]", style=solid]; -"479 Constant_15146" -> "176 Transpose_8132/fq_output_0" [label="[]", style=solid]; -"480 Constant_8130" -> "172 Transpose_8132" [label="[2]", style=dashed]; -"481 Constant_15144" -> "169 Transpose_8128/fq_output_0" [label="[]", style=solid]; -"482 Constant_15143" -> "169 Transpose_8128/fq_output_0" [label="[]", style=solid]; -"483 Constant_15142" -> "169 Transpose_8128/fq_output_0" [label="[]", style=solid]; -"484 Constant_15141" -> "169 Transpose_8128/fq_output_0" [label="[]", style=solid]; -"485 Constant_12118" -> "163 Transpose_8110" [label="[1, 288, 1, 1]", style=solid]; -"486 Multiply_12110/fq_weights_1" -> "161 Multiply_12110" [label="[288, 1, 1, 5, 5]", style=solid]; -"487 Constant_15139" -> "486 Multiply_12110/fq_weights_1" [label="[288, 1, 1, 1, 1]", style=solid]; -"488 Constant_15138" -> "486 Multiply_12110/fq_weights_1" [label="[288, 1, 1, 1, 1]", style=solid]; -"489 Constant_15137" -> "486 Multiply_12110/fq_weights_1" [label="[288, 1, 1, 1, 1]", style=solid]; -"490 Constant_15136" -> "486 Multiply_12110/fq_weights_1" [label="[288, 1, 1, 1, 1]", style=solid]; -"491 Multiply_12378" -> "486 Multiply_12110/fq_weights_1" [label="[288, 1, 1, 5, 5]", style=solid]; -"492 Constant_15134" -> "159 Transpose_8061/fq_output_0" [label="[1, 288, 1, 1]", style=solid]; -"493 Constant_15133" -> "159 Transpose_8061/fq_output_0" [label="[1, 288, 1, 1]", style=solid]; -"494 Constant_15132" -> "159 Transpose_8061/fq_output_0" [label="[1, 288, 1, 1]", style=solid]; -"495 Constant_15131" -> "159 Transpose_8061/fq_output_0" [label="[1, 288, 1, 1]", style=solid]; -"496 Constant_12104" -> "154 Transpose_8043" [label="[1, 288, 1, 1]", style=solid]; -"497 Multiply_12096/fq_weights_1" -> "151 Multiply_12096" [label="[288, 48, 1, 1]", style=solid]; -"498 Constant_15129" -> "497 Multiply_12096/fq_weights_1" [label="[288, 1, 1, 1]", style=solid]; -"499 Constant_15128" -> "497 Multiply_12096/fq_weights_1" [label="[288, 1, 1, 1]", style=solid]; -"500 Constant_15127" -> "497 Multiply_12096/fq_weights_1" [label="[288, 1, 1, 1]", style=solid]; -"501 Constant_15126" -> "497 Multiply_12096/fq_weights_1" [label="[288, 1, 1, 1]", style=solid]; -"502 Multiply_12372" -> "497 Multiply_12096/fq_weights_1" [label="[288, 48, 1, 1]", style=solid]; -"503 Constant_15124" -> "147 Transpose_8019/fq_output_0" [label="[]", style=solid]; -"504 Constant_15123" -> "147 Transpose_8019/fq_output_0" [label="[]", style=solid]; -"505 Constant_15122" -> "147 Transpose_8019/fq_output_0" [label="[]", style=solid]; -"506 Constant_15121" -> "147 Transpose_8019/fq_output_0" [label="[]", style=solid]; -"507 Constant_15119" -> "179 Transpose_8017/fq_output_0" [label="[]", style=solid]; -"508 Constant_15118" -> "179 Transpose_8017/fq_output_0" [label="[]", style=solid]; -"509 Constant_15117" -> "179 Transpose_8017/fq_output_0" [label="[]", style=solid]; -"510 Constant_15116" -> "179 Transpose_8017/fq_output_0" [label="[]", style=solid]; -"511 Constant_12090" -> "175 Transpose_8017" [label="[1, 48, 1, 1]", style=solid]; -"512 Multiply_12082/fq_weights_1" -> "171 Multiply_12082" [label="[48, 144, 1, 1]", style=solid]; -"513 Constant_15114" -> "512 Multiply_12082/fq_weights_1" [label="[48, 1, 1, 1]", style=solid]; -"514 Constant_15113" -> "512 Multiply_12082/fq_weights_1" [label="[48, 1, 1, 1]", style=solid]; -"515 Constant_15112" -> "512 Multiply_12082/fq_weights_1" [label="[48, 1, 1, 1]", style=solid]; -"516 Constant_15111" -> "512 Multiply_12082/fq_weights_1" [label="[48, 1, 1, 1]", style=solid]; -"517 Multiply_12366" -> "512 Multiply_12082/fq_weights_1" [label="[48, 144, 1, 1]", style=solid]; -"518 Constant_15109" -> "168 Transpose_7991/fq_output_0" [label="[]", style=solid]; -"519 Constant_15108" -> "168 Transpose_7991/fq_output_0" [label="[]", style=solid]; -"520 Constant_15107" -> "168 Transpose_7991/fq_output_0" [label="[]", style=solid]; -"521 Constant_15106" -> "168 Transpose_7991/fq_output_0" [label="[]", style=solid]; -"522 Constant_15104" -> "196 Transpose_7989/fq_output_0" [label="[]", style=solid]; -"523 Constant_15103" -> "196 Transpose_7989/fq_output_0" [label="[]", style=solid]; -"524 Constant_15102" -> "196 Transpose_7989/fq_output_0" [label="[]", style=solid]; -"525 Constant_15101" -> "196 Transpose_7989/fq_output_0" [label="[]", style=solid]; -"526 Transpose_7971" -> "188 Transpose_10027" [label="[1, 144, 1, 1]", style=solid]; -"527 Convolution_3209/fq_weights_1" -> "185 Convolution_3209" [label="[144, 40, 1, 1]", style=solid]; -"528 Constant_15099" -> "527 Convolution_3209/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"529 Constant_15098" -> "527 Convolution_3209/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"530 Constant_15097" -> "527 Convolution_3209/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"531 Constant_15096" -> "527 Convolution_3209/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"532 Transpose_3208" -> "527 Convolution_3209/fq_weights_1" [label="[144, 40, 1, 1]", style=solid]; -"533 Constant_15094" -> "182 Relu_7966/fq_output_0" [label="[]", style=solid]; -"534 Constant_15093" -> "182 Relu_7966/fq_output_0" [label="[]", style=solid]; -"535 Constant_15092" -> "182 Relu_7966/fq_output_0" [label="[]", style=solid]; -"536 Constant_15091" -> "182 Relu_7966/fq_output_0" [label="[]", style=solid]; -"537 Transpose_7963" -> "174 Transpose_7965" [label="[1, 40, 1, 1]", style=solid]; -"538 Convolution_3200/fq_weights_1" -> "170 Convolution_3200" [label="[40, 144, 1, 1]", style=solid]; -"539 Constant_15089" -> "538 Convolution_3200/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; -"540 Constant_15088" -> "538 Convolution_3200/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; -"541 Constant_15087" -> "538 Convolution_3200/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; -"542 Constant_15086" -> "538 Convolution_3200/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; -"543 Transpose_3199" -> "538 Convolution_3200/fq_weights_1" [label="[40, 144, 1, 1]", style=solid]; -"544 Constant_15084" -> "167 Transpose_7959/fq_output_0" [label="[]", style=solid]; -"545 Constant_15083" -> "167 Transpose_7959/fq_output_0" [label="[]", style=solid]; -"546 Constant_15082" -> "167 Transpose_7959/fq_output_0" [label="[]", style=solid]; -"547 Constant_15081" -> "167 Transpose_7959/fq_output_0" [label="[]", style=solid]; -"548 Constant_7957" -> "164 Transpose_7959" [label="[2]", style=dashed]; -"549 Constant_15079" -> "162 Transpose_7955/fq_output_0" [label="[]", style=solid]; -"550 Constant_15078" -> "162 Transpose_7955/fq_output_0" [label="[]", style=solid]; -"551 Constant_15077" -> "162 Transpose_7955/fq_output_0" [label="[]", style=solid]; -"552 Constant_15076" -> "162 Transpose_7955/fq_output_0" [label="[]", style=solid]; -"553 Constant_12076" -> "158 Transpose_7937" [label="[1, 144, 1, 1]", style=solid]; -"554 Multiply_12068/fq_weights_1" -> "156 Multiply_12068" [label="[144, 1, 1, 5, 5]", style=solid]; -"555 Constant_15074" -> "554 Multiply_12068/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; -"556 Constant_15073" -> "554 Multiply_12068/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; -"557 Constant_15072" -> "554 Multiply_12068/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; -"558 Constant_15071" -> "554 Multiply_12068/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; -"559 Multiply_12361" -> "554 Multiply_12068/fq_weights_1" [label="[144, 1, 1, 5, 5]", style=solid]; -"560 Constant_15069" -> "153 Transpose_7913/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; -"561 Constant_15068" -> "153 Transpose_7913/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; -"562 Constant_15067" -> "153 Transpose_7913/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; -"563 Constant_15066" -> "153 Transpose_7913/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; -"564 Constant_12062" -> "146 Transpose_7895" [label="[1, 144, 1, 1]", style=solid]; -"565 Multiply_12054/fq_weights_1" -> "142 Multiply_12054" [label="[144, 48, 1, 1]", style=solid]; -"566 Constant_15064" -> "565 Multiply_12054/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"567 Constant_15063" -> "565 Multiply_12054/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"568 Constant_15062" -> "565 Multiply_12054/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"569 Constant_15061" -> "565 Multiply_12054/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; -"570 Multiply_12355" -> "565 Multiply_12054/fq_weights_1" [label="[144, 48, 1, 1]", style=solid]; -"571 Constant_15059" -> "138 Transpose_7871/fq_output_0" [label="[]", style=solid]; -"572 Constant_15058" -> "138 Transpose_7871/fq_output_0" [label="[]", style=solid]; -"573 Constant_15057" -> "138 Transpose_7871/fq_output_0" [label="[]", style=solid]; -"574 Constant_15056" -> "138 Transpose_7871/fq_output_0" [label="[]", style=solid]; -"575 Constant_12048" -> "134 Transpose_7871" [label="[1, 48, 1, 1]", style=solid]; -"576 Multiply_12040/fq_weights_1" -> "130 Multiply_12040" [label="[48, 120, 1, 1]", style=solid]; -"577 Constant_15054" -> "576 Multiply_12040/fq_weights_1" [label="[48, 1, 1, 1]", style=solid]; -"578 Constant_15053" -> "576 Multiply_12040/fq_weights_1" [label="[48, 1, 1, 1]", style=solid]; -"579 Constant_15052" -> "576 Multiply_12040/fq_weights_1" [label="[48, 1, 1, 1]", style=solid]; -"580 Constant_15051" -> "576 Multiply_12040/fq_weights_1" [label="[48, 1, 1, 1]", style=solid]; -"581 Multiply_12349" -> "576 Multiply_12040/fq_weights_1" [label="[48, 120, 1, 1]", style=solid]; -"582 Constant_15049" -> "125 Transpose_7847/fq_output_0" [label="[]", style=solid]; -"583 Constant_15048" -> "125 Transpose_7847/fq_output_0" [label="[]", style=solid]; -"584 Constant_15047" -> "125 Transpose_7847/fq_output_0" [label="[]", style=solid]; -"585 Constant_15046" -> "125 Transpose_7847/fq_output_0" [label="[]", style=solid]; -"586 Constant_15044" -> "155 Transpose_7845/fq_output_0" [label="[]", style=solid]; -"587 Constant_15043" -> "155 Transpose_7845/fq_output_0" [label="[]", style=solid]; -"588 Constant_15042" -> "155 Transpose_7845/fq_output_0" [label="[]", style=solid]; -"589 Constant_15041" -> "155 Transpose_7845/fq_output_0" [label="[]", style=solid]; -"590 Transpose_7827" -> "149 Transpose_9959" [label="[1, 120, 1, 1]", style=solid]; -"591 Convolution_3099/fq_weights_1" -> "145 Convolution_3099" [label="[120, 32, 1, 1]", style=solid]; -"592 Constant_15039" -> "591 Convolution_3099/fq_weights_1" [label="[120, 1, 1, 1]", style=solid]; -"593 Constant_15038" -> "591 Convolution_3099/fq_weights_1" [label="[120, 1, 1, 1]", style=solid]; -"594 Constant_15037" -> "591 Convolution_3099/fq_weights_1" [label="[120, 1, 1, 1]", style=solid]; -"595 Constant_15036" -> "591 Convolution_3099/fq_weights_1" [label="[120, 1, 1, 1]", style=solid]; -"596 Transpose_3098" -> "591 Convolution_3099/fq_weights_1" [label="[120, 32, 1, 1]", style=solid]; -"597 Constant_15034" -> "141 Relu_7822/fq_output_0" [label="[]", style=solid]; -"598 Constant_15033" -> "141 Relu_7822/fq_output_0" [label="[]", style=solid]; -"599 Constant_15032" -> "141 Relu_7822/fq_output_0" [label="[]", style=solid]; -"600 Constant_15031" -> "141 Relu_7822/fq_output_0" [label="[]", style=solid]; -"601 Transpose_7819" -> "133 Transpose_7821" [label="[1, 32, 1, 1]", style=solid]; -"602 Convolution_3090/fq_weights_1" -> "129 Convolution_3090" [label="[32, 120, 1, 1]", style=solid]; -"603 Constant_15029" -> "602 Convolution_3090/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"604 Constant_15028" -> "602 Convolution_3090/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"605 Constant_15027" -> "602 Convolution_3090/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"606 Constant_15026" -> "602 Convolution_3090/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"607 Transpose_3089" -> "602 Convolution_3090/fq_weights_1" [label="[32, 120, 1, 1]", style=solid]; -"608 Constant_15024" -> "124 Transpose_7815/fq_output_0" [label="[]", style=solid]; -"609 Constant_15023" -> "124 Transpose_7815/fq_output_0" [label="[]", style=solid]; -"610 Constant_15022" -> "124 Transpose_7815/fq_output_0" [label="[]", style=solid]; -"611 Constant_15021" -> "124 Transpose_7815/fq_output_0" [label="[]", style=solid]; -"612 Constant_7813" -> "119 Transpose_7815" [label="[2]", style=dashed]; -"613 Constant_15019" -> "114 Transpose_7811/fq_output_0" [label="[]", style=solid]; -"614 Constant_15018" -> "114 Transpose_7811/fq_output_0" [label="[]", style=solid]; -"615 Constant_15017" -> "114 Transpose_7811/fq_output_0" [label="[]", style=solid]; -"616 Constant_15016" -> "114 Transpose_7811/fq_output_0" [label="[]", style=solid]; -"617 Constant_12034" -> "104 Transpose_7793" [label="[1, 120, 1, 1]", style=solid]; -"618 Multiply_12026/fq_weights_1" -> "100 Multiply_12026" [label="[120, 1, 1, 5, 5]", style=solid]; -"619 Constant_15014" -> "618 Multiply_12026/fq_weights_1" [label="[120, 1, 1, 1, 1]", style=solid]; -"620 Constant_15013" -> "618 Multiply_12026/fq_weights_1" [label="[120, 1, 1, 1, 1]", style=solid]; -"621 Constant_15012" -> "618 Multiply_12026/fq_weights_1" [label="[120, 1, 1, 1, 1]", style=solid]; -"622 Constant_15011" -> "618 Multiply_12026/fq_weights_1" [label="[120, 1, 1, 1, 1]", style=solid]; -"623 Multiply_12344" -> "618 Multiply_12026/fq_weights_1" [label="[120, 1, 1, 5, 5]", style=solid]; -"624 Constant_15009" -> "96 Transpose_7769/fq_output_0" [label="[1, 120, 1, 1]", style=solid]; -"625 Constant_15008" -> "96 Transpose_7769/fq_output_0" [label="[1, 120, 1, 1]", style=solid]; -"626 Constant_15007" -> "96 Transpose_7769/fq_output_0" [label="[1, 120, 1, 1]", style=solid]; -"627 Constant_15006" -> "96 Transpose_7769/fq_output_0" [label="[1, 120, 1, 1]", style=solid]; -"628 Constant_12020" -> "90 Transpose_7751" [label="[1, 120, 1, 1]", style=solid]; -"629 Multiply_12012/fq_weights_1" -> "87 Multiply_12012" [label="[120, 40, 1, 1]", style=solid]; -"630 Constant_15004" -> "629 Multiply_12012/fq_weights_1" [label="[120, 1, 1, 1]", style=solid]; -"631 Constant_15003" -> "629 Multiply_12012/fq_weights_1" [label="[120, 1, 1, 1]", style=solid]; -"632 Constant_15002" -> "629 Multiply_12012/fq_weights_1" [label="[120, 1, 1, 1]", style=solid]; -"633 Constant_15001" -> "629 Multiply_12012/fq_weights_1" [label="[120, 1, 1, 1]", style=solid]; -"634 Multiply_12338" -> "629 Multiply_12012/fq_weights_1" [label="[120, 40, 1, 1]", style=solid]; -"635 Constant_14999" -> "83 Transpose_7727/fq_output_0" [label="[]", style=solid]; -"636 Constant_14998" -> "83 Transpose_7727/fq_output_0" [label="[]", style=solid]; -"637 Constant_14997" -> "83 Transpose_7727/fq_output_0" [label="[]", style=solid]; -"638 Constant_14996" -> "83 Transpose_7727/fq_output_0" [label="[]", style=solid]; -"639 Constant_14994" -> "128 Transpose_7725/fq_output_0" [label="[]", style=solid]; -"640 Constant_14993" -> "128 Transpose_7725/fq_output_0" [label="[]", style=solid]; -"641 Constant_14992" -> "128 Transpose_7725/fq_output_0" [label="[]", style=solid]; -"642 Constant_14991" -> "128 Transpose_7725/fq_output_0" [label="[]", style=solid]; -"643 Constant_12006" -> "123 Transpose_7725" [label="[1, 40, 1, 1]", style=solid]; -"644 Multiply_11998/fq_weights_1" -> "118 Multiply_11998" [label="[40, 240, 1, 1]", style=solid]; -"645 Constant_14989" -> "644 Multiply_11998/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; -"646 Constant_14988" -> "644 Multiply_11998/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; -"647 Constant_14987" -> "644 Multiply_11998/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; -"648 Constant_14986" -> "644 Multiply_11998/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; -"649 Multiply_12332" -> "644 Multiply_11998/fq_weights_1" [label="[40, 240, 1, 1]", style=solid]; -"650 Constant_14984" -> "113 Transpose_7699/fq_output_0" [label="[]", style=solid]; -"651 Constant_14983" -> "113 Transpose_7699/fq_output_0" [label="[]", style=solid]; -"652 Constant_14982" -> "113 Transpose_7699/fq_output_0" [label="[]", style=solid]; -"653 Constant_14981" -> "113 Transpose_7699/fq_output_0" [label="[]", style=solid]; -"654 Constant_14979" -> "148 Transpose_7697/fq_output_0" [label="[]", style=solid]; -"655 Constant_14978" -> "148 Transpose_7697/fq_output_0" [label="[]", style=solid]; -"656 Constant_14977" -> "148 Transpose_7697/fq_output_0" [label="[]", style=solid]; -"657 Constant_14976" -> "148 Transpose_7697/fq_output_0" [label="[]", style=solid]; -"658 Transpose_7679" -> "140 Transpose_9859" [label="[1, 240, 1, 1]", style=solid]; -"659 Convolution_2988/fq_weights_1" -> "136 Convolution_2988" [label="[240, 64, 1, 1]", style=solid]; -"660 Constant_14974" -> "659 Convolution_2988/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; -"661 Constant_14973" -> "659 Convolution_2988/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; -"662 Constant_14972" -> "659 Convolution_2988/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; -"663 Constant_14971" -> "659 Convolution_2988/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; -"664 Transpose_2987" -> "659 Convolution_2988/fq_weights_1" [label="[240, 64, 1, 1]", style=solid]; -"665 Constant_14969" -> "132 Relu_7674/fq_output_0" [label="[]", style=solid]; -"666 Constant_14968" -> "132 Relu_7674/fq_output_0" [label="[]", style=solid]; -"667 Constant_14967" -> "132 Relu_7674/fq_output_0" [label="[]", style=solid]; -"668 Constant_14966" -> "132 Relu_7674/fq_output_0" [label="[]", style=solid]; -"669 Transpose_7671" -> "122 Transpose_7673" [label="[1, 64, 1, 1]", style=solid]; -"670 Convolution_2979/fq_weights_1" -> "117 Convolution_2979" [label="[64, 240, 1, 1]", style=solid]; -"671 Constant_14964" -> "670 Convolution_2979/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"672 Constant_14963" -> "670 Convolution_2979/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"673 Constant_14962" -> "670 Convolution_2979/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"674 Constant_14961" -> "670 Convolution_2979/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"675 Transpose_2978" -> "670 Convolution_2979/fq_weights_1" [label="[64, 240, 1, 1]", style=solid]; -"676 Constant_14959" -> "112 Transpose_7667/fq_output_0" [label="[]", style=solid]; -"677 Constant_14958" -> "112 Transpose_7667/fq_output_0" [label="[]", style=solid]; -"678 Constant_14957" -> "112 Transpose_7667/fq_output_0" [label="[]", style=solid]; -"679 Constant_14956" -> "112 Transpose_7667/fq_output_0" [label="[]", style=solid]; -"680 Constant_7665" -> "107 Transpose_7667" [label="[2]", style=dashed]; -"681 Constant_14954" -> "103 Transpose_7663/fq_output_0" [label="[]", style=solid]; -"682 Constant_14953" -> "103 Transpose_7663/fq_output_0" [label="[]", style=solid]; -"683 Constant_14952" -> "103 Transpose_7663/fq_output_0" [label="[]", style=solid]; -"684 Constant_14951" -> "103 Transpose_7663/fq_output_0" [label="[]", style=solid]; -"685 Constant_11992" -> "95 Transpose_7645" [label="[1, 240, 1, 1]", style=solid]; -"686 Multiply_11984/fq_weights_1" -> "92 Multiply_11984" [label="[240, 1, 1, 5, 5]", style=solid]; -"687 Constant_14949" -> "686 Multiply_11984/fq_weights_1" [label="[240, 1, 1, 1, 1]", style=solid]; -"688 Constant_14948" -> "686 Multiply_11984/fq_weights_1" [label="[240, 1, 1, 1, 1]", style=solid]; -"689 Constant_14947" -> "686 Multiply_11984/fq_weights_1" [label="[240, 1, 1, 1, 1]", style=solid]; -"690 Constant_14946" -> "686 Multiply_11984/fq_weights_1" [label="[240, 1, 1, 1, 1]", style=solid]; -"691 Multiply_12327" -> "686 Multiply_11984/fq_weights_1" [label="[240, 1, 1, 5, 5]", style=solid]; -"692 Constant_14944" -> "89 Transpose_7621/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; -"693 Constant_14943" -> "89 Transpose_7621/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; -"694 Constant_14942" -> "89 Transpose_7621/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; -"695 Constant_14941" -> "89 Transpose_7621/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; -"696 Constant_11978" -> "82 Transpose_7603" [label="[1, 240, 1, 1]", style=solid]; -"697 Multiply_11970/fq_weights_1" -> "78 Multiply_11970" [label="[240, 40, 1, 1]", style=solid]; -"698 Constant_14939" -> "697 Multiply_11970/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; -"699 Constant_14938" -> "697 Multiply_11970/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; -"700 Constant_14937" -> "697 Multiply_11970/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; -"701 Constant_14936" -> "697 Multiply_11970/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; -"702 Multiply_12321" -> "697 Multiply_11970/fq_weights_1" [label="[240, 40, 1, 1]", style=solid]; -"703 Constant_14934" -> "75 Transpose_7579/fq_output_0" [label="[]", style=solid]; -"704 Constant_14933" -> "75 Transpose_7579/fq_output_0" [label="[]", style=solid]; -"705 Constant_14932" -> "75 Transpose_7579/fq_output_0" [label="[]", style=solid]; -"706 Constant_14931" -> "75 Transpose_7579/fq_output_0" [label="[]", style=solid]; -"707 Constant_14929" -> "116 Transpose_7577/fq_output_0" [label="[]", style=solid]; -"708 Constant_14928" -> "116 Transpose_7577/fq_output_0" [label="[]", style=solid]; -"709 Constant_14927" -> "116 Transpose_7577/fq_output_0" [label="[]", style=solid]; -"710 Constant_14926" -> "116 Transpose_7577/fq_output_0" [label="[]", style=solid]; -"711 Constant_11964" -> "111 Transpose_7577" [label="[1, 40, 1, 1]", style=solid]; -"712 Multiply_11956/fq_weights_1" -> "106 Multiply_11956" [label="[40, 240, 1, 1]", style=solid]; -"713 Constant_14924" -> "712 Multiply_11956/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; -"714 Constant_14923" -> "712 Multiply_11956/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; -"715 Constant_14922" -> "712 Multiply_11956/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; -"716 Constant_14921" -> "712 Multiply_11956/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; -"717 Multiply_12315" -> "712 Multiply_11956/fq_weights_1" [label="[40, 240, 1, 1]", style=solid]; -"718 Constant_14919" -> "102 Transpose_7551/fq_output_0" [label="[]", style=solid]; -"719 Constant_14918" -> "102 Transpose_7551/fq_output_0" [label="[]", style=solid]; -"720 Constant_14917" -> "102 Transpose_7551/fq_output_0" [label="[]", style=solid]; -"721 Constant_14916" -> "102 Transpose_7551/fq_output_0" [label="[]", style=solid]; -"722 Constant_14914" -> "139 Transpose_7549/fq_output_0" [label="[]", style=solid]; -"723 Constant_14913" -> "139 Transpose_7549/fq_output_0" [label="[]", style=solid]; -"724 Constant_14912" -> "139 Transpose_7549/fq_output_0" [label="[]", style=solid]; -"725 Constant_14911" -> "139 Transpose_7549/fq_output_0" [label="[]", style=solid]; -"726 Transpose_7531" -> "131 Transpose_9759" [label="[1, 240, 1, 1]", style=solid]; -"727 Convolution_2877/fq_weights_1" -> "126 Convolution_2877" [label="[240, 64, 1, 1]", style=solid]; -"728 Constant_14909" -> "727 Convolution_2877/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; -"729 Constant_14908" -> "727 Convolution_2877/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; -"730 Constant_14907" -> "727 Convolution_2877/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; -"731 Constant_14906" -> "727 Convolution_2877/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; -"732 Transpose_2876" -> "727 Convolution_2877/fq_weights_1" [label="[240, 64, 1, 1]", style=solid]; -"733 Constant_14904" -> "121 Relu_7526/fq_output_0" [label="[]", style=solid]; -"734 Constant_14903" -> "121 Relu_7526/fq_output_0" [label="[]", style=solid]; -"735 Constant_14902" -> "121 Relu_7526/fq_output_0" [label="[]", style=solid]; -"736 Constant_14901" -> "121 Relu_7526/fq_output_0" [label="[]", style=solid]; -"737 Transpose_7523" -> "110 Transpose_7525" [label="[1, 64, 1, 1]", style=solid]; -"738 Convolution_2868/fq_weights_1" -> "105 Convolution_2868" [label="[64, 240, 1, 1]", style=solid]; -"739 Constant_14899" -> "738 Convolution_2868/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"740 Constant_14898" -> "738 Convolution_2868/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"741 Constant_14897" -> "738 Convolution_2868/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"742 Constant_14896" -> "738 Convolution_2868/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"743 Transpose_2867" -> "738 Convolution_2868/fq_weights_1" [label="[64, 240, 1, 1]", style=solid]; -"744 Constant_14894" -> "101 Transpose_7519/fq_output_0" [label="[]", style=solid]; -"745 Constant_14893" -> "101 Transpose_7519/fq_output_0" [label="[]", style=solid]; -"746 Constant_14892" -> "101 Transpose_7519/fq_output_0" [label="[]", style=solid]; -"747 Constant_14891" -> "101 Transpose_7519/fq_output_0" [label="[]", style=solid]; -"748 Constant_7517" -> "97 Transpose_7519" [label="[2]", style=dashed]; -"749 Constant_14889" -> "94 Transpose_7515/fq_output_0" [label="[]", style=solid]; -"750 Constant_14888" -> "94 Transpose_7515/fq_output_0" [label="[]", style=solid]; -"751 Constant_14887" -> "94 Transpose_7515/fq_output_0" [label="[]", style=solid]; -"752 Constant_14886" -> "94 Transpose_7515/fq_output_0" [label="[]", style=solid]; -"753 Constant_11950" -> "88 Transpose_7497" [label="[1, 240, 1, 1]", style=solid]; -"754 Multiply_11942/fq_weights_1" -> "85 Multiply_11942" [label="[240, 1, 1, 5, 5]", style=solid]; -"755 Constant_14884" -> "754 Multiply_11942/fq_weights_1" [label="[240, 1, 1, 1, 1]", style=solid]; -"756 Constant_14883" -> "754 Multiply_11942/fq_weights_1" [label="[240, 1, 1, 1, 1]", style=solid]; -"757 Constant_14882" -> "754 Multiply_11942/fq_weights_1" [label="[240, 1, 1, 1, 1]", style=solid]; -"758 Constant_14881" -> "754 Multiply_11942/fq_weights_1" [label="[240, 1, 1, 1, 1]", style=solid]; -"759 Multiply_12310" -> "754 Multiply_11942/fq_weights_1" [label="[240, 1, 1, 5, 5]", style=solid]; -"760 Constant_14879" -> "81 Transpose_7473/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; -"761 Constant_14878" -> "81 Transpose_7473/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; -"762 Constant_14877" -> "81 Transpose_7473/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; -"763 Constant_14876" -> "81 Transpose_7473/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; -"764 Constant_11936" -> "74 Transpose_7455" [label="[1, 240, 1, 1]", style=solid]; -"765 Multiply_11928/fq_weights_1" -> "71 Multiply_11928" [label="[240, 40, 1, 1]", style=solid]; -"766 Constant_14874" -> "765 Multiply_11928/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; -"767 Constant_14873" -> "765 Multiply_11928/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; -"768 Constant_14872" -> "765 Multiply_11928/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; -"769 Constant_14871" -> "765 Multiply_11928/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; -"770 Multiply_12304" -> "765 Multiply_11928/fq_weights_1" [label="[240, 40, 1, 1]", style=solid]; -"771 Constant_14869" -> "69 Transpose_7431/fq_output_0" [label="[]", style=solid]; -"772 Constant_14868" -> "69 Transpose_7431/fq_output_0" [label="[]", style=solid]; -"773 Constant_14867" -> "69 Transpose_7431/fq_output_0" [label="[]", style=solid]; -"774 Constant_14866" -> "69 Transpose_7431/fq_output_0" [label="[]", style=solid]; -"775 Constant_11922" -> "67 Transpose_7431" [label="[1, 40, 1, 1]", style=solid]; -"776 Multiply_11914/fq_weights_1" -> "65 Multiply_11914" [label="[40, 96, 1, 1]", style=solid]; -"777 Constant_14864" -> "776 Multiply_11914/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; -"778 Constant_14863" -> "776 Multiply_11914/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; -"779 Constant_14862" -> "776 Multiply_11914/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; -"780 Constant_14861" -> "776 Multiply_11914/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; -"781 Multiply_12298" -> "776 Multiply_11914/fq_weights_1" [label="[40, 96, 1, 1]", style=solid]; -"782 Constant_14859" -> "63 Transpose_7407/fq_output_0" [label="[]", style=solid]; -"783 Constant_14858" -> "63 Transpose_7407/fq_output_0" [label="[]", style=solid]; -"784 Constant_14857" -> "63 Transpose_7407/fq_output_0" [label="[]", style=solid]; -"785 Constant_14856" -> "63 Transpose_7407/fq_output_0" [label="[]", style=solid]; -"786 Constant_14854" -> "84 Transpose_7405/fq_output_0" [label="[]", style=solid]; -"787 Constant_14853" -> "84 Transpose_7405/fq_output_0" [label="[]", style=solid]; -"788 Constant_14852" -> "84 Transpose_7405/fq_output_0" [label="[]", style=solid]; -"789 Constant_14851" -> "84 Transpose_7405/fq_output_0" [label="[]", style=solid]; -"790 Transpose_7387" -> "76 Transpose_9691" [label="[1, 96, 1, 1]", style=solid]; -"791 Convolution_2767/fq_weights_1" -> "73 Convolution_2767" [label="[96, 24, 1, 1]", style=solid]; -"792 Constant_14849" -> "791 Convolution_2767/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"793 Constant_14848" -> "791 Convolution_2767/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"794 Constant_14847" -> "791 Convolution_2767/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"795 Constant_14846" -> "791 Convolution_2767/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"796 Transpose_2766" -> "791 Convolution_2767/fq_weights_1" [label="[96, 24, 1, 1]", style=solid]; -"797 Constant_14844" -> "70 Relu_7382/fq_output_0" [label="[]", style=solid]; -"798 Constant_14843" -> "70 Relu_7382/fq_output_0" [label="[]", style=solid]; -"799 Constant_14842" -> "70 Relu_7382/fq_output_0" [label="[]", style=solid]; -"800 Constant_14841" -> "70 Relu_7382/fq_output_0" [label="[]", style=solid]; -"801 Transpose_7379" -> "66 Transpose_7381" [label="[1, 24, 1, 1]", style=solid]; -"802 Convolution_2758/fq_weights_1" -> "64 Convolution_2758" [label="[24, 96, 1, 1]", style=solid]; -"803 Constant_14839" -> "802 Convolution_2758/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; -"804 Constant_14838" -> "802 Convolution_2758/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; -"805 Constant_14837" -> "802 Convolution_2758/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; -"806 Constant_14836" -> "802 Convolution_2758/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; -"807 Transpose_2757" -> "802 Convolution_2758/fq_weights_1" [label="[24, 96, 1, 1]", style=solid]; -"808 Constant_14834" -> "62 Transpose_7375/fq_output_0" [label="[]", style=solid]; -"809 Constant_14833" -> "62 Transpose_7375/fq_output_0" [label="[]", style=solid]; -"810 Constant_14832" -> "62 Transpose_7375/fq_output_0" [label="[]", style=solid]; -"811 Constant_14831" -> "62 Transpose_7375/fq_output_0" [label="[]", style=solid]; -"812 Constant_7373" -> "60 Transpose_7375" [label="[2]", style=dashed]; -"813 Constant_14829" -> "58 Transpose_7371/fq_output_0" [label="[]", style=solid]; -"814 Constant_14828" -> "58 Transpose_7371/fq_output_0" [label="[]", style=solid]; -"815 Constant_14827" -> "58 Transpose_7371/fq_output_0" [label="[]", style=solid]; -"816 Constant_14826" -> "58 Transpose_7371/fq_output_0" [label="[]", style=solid]; -"817 Constant_11908" -> "54 Transpose_7353" [label="[1, 96, 1, 1]", style=solid]; -"818 Multiply_11900/fq_weights_1" -> "52 Multiply_11900" [label="[96, 1, 1, 5, 5]", style=solid]; -"819 Constant_14824" -> "818 Multiply_11900/fq_weights_1" [label="[96, 1, 1, 1, 1]", style=solid]; -"820 Constant_14823" -> "818 Multiply_11900/fq_weights_1" [label="[96, 1, 1, 1, 1]", style=solid]; -"821 Constant_14822" -> "818 Multiply_11900/fq_weights_1" [label="[96, 1, 1, 1, 1]", style=solid]; -"822 Constant_14821" -> "818 Multiply_11900/fq_weights_1" [label="[96, 1, 1, 1, 1]", style=solid]; -"823 Multiply_12293" -> "818 Multiply_11900/fq_weights_1" [label="[96, 1, 1, 5, 5]", style=solid]; -"824 Constant_14819" -> "50 Transpose_7304/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; -"825 Constant_14818" -> "50 Transpose_7304/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; -"826 Constant_14817" -> "50 Transpose_7304/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; -"827 Constant_14816" -> "50 Transpose_7304/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; -"828 Constant_11894" -> "46 Transpose_7286" [label="[1, 96, 1, 1]", style=solid]; -"829 Multiply_11886/fq_weights_1" -> "44 Multiply_11886" [label="[96, 24, 1, 1]", style=solid]; -"830 Constant_14814" -> "829 Multiply_11886/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"831 Constant_14813" -> "829 Multiply_11886/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"832 Constant_14812" -> "829 Multiply_11886/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"833 Constant_14811" -> "829 Multiply_11886/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; -"834 Multiply_12287" -> "829 Multiply_11886/fq_weights_1" [label="[96, 24, 1, 1]", style=solid]; -"835 Constant_14809" -> "42 Transpose_7262/fq_output_0" [label="[]", style=solid]; -"836 Constant_14808" -> "42 Transpose_7262/fq_output_0" [label="[]", style=solid]; -"837 Constant_14807" -> "42 Transpose_7262/fq_output_0" [label="[]", style=solid]; -"838 Constant_14806" -> "42 Transpose_7262/fq_output_0" [label="[]", style=solid]; -"839 Constant_14804" -> "59 Transpose_7260/fq_output_0" [label="[]", style=solid]; -"840 Constant_14803" -> "59 Transpose_7260/fq_output_0" [label="[]", style=solid]; -"841 Constant_14802" -> "59 Transpose_7260/fq_output_0" [label="[]", style=solid]; -"842 Constant_14801" -> "59 Transpose_7260/fq_output_0" [label="[]", style=solid]; -"843 Constant_11880" -> "57 Transpose_7260" [label="[1, 24, 1, 1]", style=solid]; -"844 Multiply_11872/fq_weights_1" -> "55 Multiply_11872" [label="[24, 88, 1, 1]", style=solid]; -"845 Constant_14799" -> "844 Multiply_11872/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; -"846 Constant_14798" -> "844 Multiply_11872/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; -"847 Constant_14797" -> "844 Multiply_11872/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; -"848 Constant_14796" -> "844 Multiply_11872/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; -"849 Multiply_12281" -> "844 Multiply_11872/fq_weights_1" [label="[24, 88, 1, 1]", style=solid]; -"850 Constant_14794" -> "53 Relu_7233/fq_output_0" [label="[]", style=solid]; -"851 Constant_14793" -> "53 Relu_7233/fq_output_0" [label="[]", style=solid]; -"852 Constant_14792" -> "53 Relu_7233/fq_output_0" [label="[]", style=solid]; -"853 Constant_14791" -> "53 Relu_7233/fq_output_0" [label="[]", style=solid]; -"854 Constant_11866" -> "49 Transpose_7232" [label="[1, 88, 1, 1]", style=solid]; -"855 Multiply_11858/fq_weights_1" -> "47 Multiply_11858" [label="[88, 1, 1, 3, 3]", style=solid]; -"856 Constant_14789" -> "855 Multiply_11858/fq_weights_1" [label="[88, 1, 1, 1, 1]", style=solid]; -"857 Constant_14788" -> "855 Multiply_11858/fq_weights_1" [label="[88, 1, 1, 1, 1]", style=solid]; -"858 Constant_14787" -> "855 Multiply_11858/fq_weights_1" [label="[88, 1, 1, 1, 1]", style=solid]; -"859 Constant_14786" -> "855 Multiply_11858/fq_weights_1" [label="[88, 1, 1, 1, 1]", style=solid]; -"860 Multiply_12276" -> "855 Multiply_11858/fq_weights_1" [label="[88, 1, 1, 3, 3]", style=solid]; -"861 Constant_14784" -> "45 Relu_7207/fq_output_0" [label="[1, 88, 1, 1]", style=solid]; -"862 Constant_14783" -> "45 Relu_7207/fq_output_0" [label="[1, 88, 1, 1]", style=solid]; -"863 Constant_14782" -> "45 Relu_7207/fq_output_0" [label="[1, 88, 1, 1]", style=solid]; -"864 Constant_14781" -> "45 Relu_7207/fq_output_0" [label="[1, 88, 1, 1]", style=solid]; -"865 Constant_11852" -> "41 Transpose_7206" [label="[1, 88, 1, 1]", style=solid]; -"866 Multiply_11844/fq_weights_1" -> "39 Multiply_11844" [label="[88, 24, 1, 1]", style=solid]; -"867 Constant_14779" -> "866 Multiply_11844/fq_weights_1" [label="[88, 1, 1, 1]", style=solid]; -"868 Constant_14778" -> "866 Multiply_11844/fq_weights_1" [label="[88, 1, 1, 1]", style=solid]; -"869 Constant_14777" -> "866 Multiply_11844/fq_weights_1" [label="[88, 1, 1, 1]", style=solid]; -"870 Constant_14776" -> "866 Multiply_11844/fq_weights_1" [label="[88, 1, 1, 1]", style=solid]; -"871 Multiply_12270" -> "866 Multiply_11844/fq_weights_1" [label="[88, 24, 1, 1]", style=solid]; -"872 Constant_14774" -> "38 Transpose_7182/fq_output_0" [label="[]", style=solid]; -"873 Constant_14773" -> "38 Transpose_7182/fq_output_0" [label="[]", style=solid]; -"874 Constant_14772" -> "38 Transpose_7182/fq_output_0" [label="[]", style=solid]; -"875 Constant_14771" -> "38 Transpose_7182/fq_output_0" [label="[]", style=solid]; -"876 Constant_11838" -> "37 Transpose_7182" [label="[1, 24, 1, 1]", style=solid]; -"877 Multiply_11830/fq_weights_1" -> "36 Multiply_11830" [label="[24, 72, 1, 1]", style=solid]; -"878 Constant_14769" -> "877 Multiply_11830/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; -"879 Constant_14768" -> "877 Multiply_11830/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; -"880 Constant_14767" -> "877 Multiply_11830/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; -"881 Constant_14766" -> "877 Multiply_11830/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; -"882 Multiply_12264" -> "877 Multiply_11830/fq_weights_1" [label="[24, 72, 1, 1]", style=solid]; -"883 Constant_14764" -> "35 Relu_7157/fq_output_0" [label="[]", style=solid]; -"884 Constant_14763" -> "35 Relu_7157/fq_output_0" [label="[]", style=solid]; -"885 Constant_14762" -> "35 Relu_7157/fq_output_0" [label="[]", style=solid]; -"886 Constant_14761" -> "35 Relu_7157/fq_output_0" [label="[]", style=solid]; -"887 Constant_11824" -> "33 Transpose_7156" [label="[1, 72, 1, 1]", style=solid]; -"888 Multiply_11816/fq_weights_1" -> "32 Multiply_11816" [label="[72, 1, 1, 3, 3]", style=solid]; -"889 Constant_14759" -> "888 Multiply_11816/fq_weights_1" [label="[72, 1, 1, 1, 1]", style=solid]; -"890 Constant_14758" -> "888 Multiply_11816/fq_weights_1" [label="[72, 1, 1, 1, 1]", style=solid]; -"891 Constant_14757" -> "888 Multiply_11816/fq_weights_1" [label="[72, 1, 1, 1, 1]", style=solid]; -"892 Constant_14756" -> "888 Multiply_11816/fq_weights_1" [label="[72, 1, 1, 1, 1]", style=solid]; -"893 Multiply_12259" -> "888 Multiply_11816/fq_weights_1" [label="[72, 1, 1, 3, 3]", style=solid]; -"894 Constant_14754" -> "30 Relu_7106/fq_output_0" [label="[1, 72, 1, 1]", style=solid]; -"895 Constant_14753" -> "30 Relu_7106/fq_output_0" [label="[1, 72, 1, 1]", style=solid]; -"896 Constant_14752" -> "30 Relu_7106/fq_output_0" [label="[1, 72, 1, 1]", style=solid]; -"897 Constant_14751" -> "30 Relu_7106/fq_output_0" [label="[1, 72, 1, 1]", style=solid]; -"898 Constant_11810" -> "26 Transpose_7105" [label="[1, 72, 1, 1]", style=solid]; -"899 Multiply_11802/fq_weights_1" -> "24 Multiply_11802" [label="[72, 16, 1, 1]", style=solid]; -"900 Constant_14749" -> "899 Multiply_11802/fq_weights_1" [label="[72, 1, 1, 1]", style=solid]; -"901 Constant_14748" -> "899 Multiply_11802/fq_weights_1" [label="[72, 1, 1, 1]", style=solid]; -"902 Constant_14747" -> "899 Multiply_11802/fq_weights_1" [label="[72, 1, 1, 1]", style=solid]; -"903 Constant_14746" -> "899 Multiply_11802/fq_weights_1" [label="[72, 1, 1, 1]", style=solid]; -"904 Multiply_12253" -> "899 Multiply_11802/fq_weights_1" [label="[72, 16, 1, 1]", style=solid]; -"905 Constant_14744" -> "22 Transpose_7081/fq_output_0" [label="[]", style=solid]; -"906 Constant_14743" -> "22 Transpose_7081/fq_output_0" [label="[]", style=solid]; -"907 Constant_14742" -> "22 Transpose_7081/fq_output_0" [label="[]", style=solid]; -"908 Constant_14741" -> "22 Transpose_7081/fq_output_0" [label="[]", style=solid]; -"909 Constant_11796" -> "20 Transpose_7081" [label="[1, 16, 1, 1]", style=solid]; -"910 Multiply_11788/fq_weights_1" -> "18 Multiply_11788" [label="[16, 16, 1, 1]", style=solid]; -"911 Constant_14739" -> "910 Multiply_11788/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; -"912 Constant_14738" -> "910 Multiply_11788/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; -"913 Constant_14737" -> "910 Multiply_11788/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; -"914 Constant_14736" -> "910 Multiply_11788/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; -"915 Multiply_12247" -> "910 Multiply_11788/fq_weights_1" [label="[16, 16, 1, 1]", style=solid]; -"916 Constant_14734" -> "16 Transpose_7057/fq_output_0" [label="[]", style=solid]; -"917 Constant_14733" -> "16 Transpose_7057/fq_output_0" [label="[]", style=solid]; -"918 Constant_14732" -> "16 Transpose_7057/fq_output_0" [label="[]", style=solid]; -"919 Constant_14731" -> "16 Transpose_7057/fq_output_0" [label="[]", style=solid]; -"920 Constant_14729" -> "31 Transpose_7055/fq_output_0" [label="[]", style=solid]; -"921 Constant_14728" -> "31 Transpose_7055/fq_output_0" [label="[]", style=solid]; -"922 Constant_14727" -> "31 Transpose_7055/fq_output_0" [label="[]", style=solid]; -"923 Constant_14726" -> "31 Transpose_7055/fq_output_0" [label="[]", style=solid]; -"924 Transpose_7037" -> "27 Transpose_9591" [label="[1, 16, 1, 1]", style=solid]; -"925 Convolution_2440/fq_weights_1" -> "25 Convolution_2440" [label="[16, 8, 1, 1]", style=solid]; -"926 Constant_14724" -> "925 Convolution_2440/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; -"927 Constant_14723" -> "925 Convolution_2440/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; -"928 Constant_14722" -> "925 Convolution_2440/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; -"929 Constant_14721" -> "925 Convolution_2440/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; -"930 Transpose_2439" -> "925 Convolution_2440/fq_weights_1" [label="[16, 8, 1, 1]", style=solid]; -"931 Constant_14719" -> "23 Relu_7032/fq_output_0" [label="[]", style=solid]; -"932 Constant_14718" -> "23 Relu_7032/fq_output_0" [label="[]", style=solid]; -"933 Constant_14717" -> "23 Relu_7032/fq_output_0" [label="[]", style=solid]; -"934 Constant_14716" -> "23 Relu_7032/fq_output_0" [label="[]", style=solid]; -"935 Transpose_7029" -> "19 Transpose_7031" [label="[1, 8, 1, 1]", style=solid]; -"936 Convolution_2431/fq_weights_1" -> "17 Convolution_2431" [label="[8, 16, 1, 1]", style=solid]; -"937 Constant_14714" -> "936 Convolution_2431/fq_weights_1" [label="[8, 1, 1, 1]", style=solid]; -"938 Constant_14713" -> "936 Convolution_2431/fq_weights_1" [label="[8, 1, 1, 1]", style=solid]; -"939 Constant_14712" -> "936 Convolution_2431/fq_weights_1" [label="[8, 1, 1, 1]", style=solid]; -"940 Constant_14711" -> "936 Convolution_2431/fq_weights_1" [label="[8, 1, 1, 1]", style=solid]; -"941 Transpose_2430" -> "936 Convolution_2431/fq_weights_1" [label="[8, 16, 1, 1]", style=solid]; -"942 Constant_14709" -> "15 Transpose_7025/fq_output_0" [label="[]", style=solid]; -"943 Constant_14708" -> "15 Transpose_7025/fq_output_0" [label="[]", style=solid]; -"944 Constant_14707" -> "15 Transpose_7025/fq_output_0" [label="[]", style=solid]; -"945 Constant_14706" -> "15 Transpose_7025/fq_output_0" [label="[]", style=solid]; -"946 Constant_7023" -> "13 Transpose_7025" [label="[2]", style=dashed]; -"947 Constant_14704" -> "12 Relu_7020/fq_output_0" [label="[]", style=solid]; -"948 Constant_14703" -> "12 Relu_7020/fq_output_0" [label="[]", style=solid]; -"949 Constant_14702" -> "12 Relu_7020/fq_output_0" [label="[]", style=solid]; -"950 Constant_14701" -> "12 Relu_7020/fq_output_0" [label="[]", style=solid]; -"951 Constant_11782" -> "10 Transpose_7019" [label="[1, 16, 1, 1]", style=solid]; -"952 Multiply_11774/fq_weights_1" -> "9 Multiply_11774" [label="[16, 1, 1, 3, 3]", style=solid]; -"953 Constant_14699" -> "952 Multiply_11774/fq_weights_1" [label="[16, 1, 1, 1, 1]", style=solid]; -"954 Constant_14698" -> "952 Multiply_11774/fq_weights_1" [label="[16, 1, 1, 1, 1]", style=solid]; -"955 Constant_14697" -> "952 Multiply_11774/fq_weights_1" [label="[16, 1, 1, 1, 1]", style=solid]; -"956 Constant_14696" -> "952 Multiply_11774/fq_weights_1" [label="[16, 1, 1, 1, 1]", style=solid]; -"957 Multiply_12242" -> "952 Multiply_11774/fq_weights_1" [label="[16, 1, 1, 3, 3]", style=solid]; -"958 Constant_14694" -> "8 Transpose_6970/fq_output_0" [label="[1, 16, 1, 1]", style=solid]; -"959 Constant_14693" -> "8 Transpose_6970/fq_output_0" [label="[1, 16, 1, 1]", style=solid]; -"960 Constant_14692" -> "8 Transpose_6970/fq_output_0" [label="[1, 16, 1, 1]", style=solid]; -"961 Constant_14691" -> "8 Transpose_6970/fq_output_0" [label="[1, 16, 1, 1]", style=solid]; -"962 Constant_11768" -> "6 Transpose_6952" [label="[1, 16, 1, 1]", style=solid]; -"963 Multiply_11760/fq_weights_1" -> "5 Multiply_11760" [label="[16, 3, 3, 3]", style=solid]; -"964 Constant_14689" -> "963 Multiply_11760/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; -"965 Constant_14688" -> "963 Multiply_11760/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; -"966 Constant_14687" -> "963 Multiply_11760/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; -"967 Constant_14686" -> "963 Multiply_11760/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; -"968 Gather_12661" -> "963 Multiply_11760/fq_weights_1" [label="[16, 3, 3, 3]", style=solid]; -"969 Constant_14684" -> "4 Transpose_2342/fq_output_0" [label="[]", style=solid]; -"970 Constant_14683" -> "4 Transpose_2342/fq_output_0" [label="[]", style=solid]; -"971 Constant_14682" -> "4 Transpose_2342/fq_output_0" [label="[]", style=solid]; -"972 Constant_14681" -> "4 Transpose_2342/fq_output_0" [label="[]", style=solid]; -"973 Unsqueeze_9541" -> "3 Transpose_2342" [label="[1, 1, 1, 1]", style=solid]; -"974 Unsqueeze_9547" -> "2 Transpose_9539" [label="[1, 1, 1, 1]", style=solid]; -"975 Constant_9544" -> "1 Transpose_9545" [label="[4]", style=dashed]; -} diff --git a/tests/openvino/native/data/reference_graphs/quantized/mobilenet-v3-small-1.0-224-tf_performance.dot b/tests/openvino/native/data/reference_graphs/quantized/mobilenet-v3-small-1.0-224-tf_performance.dot new file mode 100644 index 00000000000..1fe2c06c4ae --- /dev/null +++ b/tests/openvino/native/data/reference_graphs/quantized/mobilenet-v3-small-1.0-224-tf_performance.dot @@ -0,0 +1,1958 @@ +strict digraph { +"0 input_1" [id=0, type=Parameter]; +"1 Transpose_7780" [id=1, type=Transpose]; +"2 Transpose_7774" [id=2, type=Multiply]; +"3 Transpose_710" [id=3, type=Add]; +"4 Transpose_710/fq_output_0" [id=4, type=FakeQuantize]; +"5 Multiply_9167" [id=5, type=Convolution]; +"6 Transpose_5170" [id=6, type=Add]; +"7 Transpose_5188" [id=7, type=HSwish]; +"8 Transpose_5188/fq_output_0" [id=8, type=FakeQuantize]; +"9 Multiply_9181" [id=9, type=GroupConvolution]; +"10 Transpose_5239" [id=10, type=Add]; +"11 Transpose_5241" [id=11, type=Relu]; +"12 Transpose_5241/fq_output_0" [id=12, type=FakeQuantize]; +"13 Transpose_5245" [id=13, type=ReduceMean]; +"14 Transpose_5277" [id=14, type=Multiply]; +"15 Transpose_5245/fq_output_0" [id=15, type=FakeQuantize]; +"16 Transpose_5277/fq_output_0" [id=16, type=FakeQuantize]; +"17 Convolution_801" [id=17, type=Convolution]; +"18 Multiply_9195" [id=18, type=Convolution]; +"19 Transpose_5251" [id=19, type=Add]; +"20 Transpose_5301" [id=20, type=Add]; +"21 Transpose_5253" [id=21, type=Relu]; +"22 Transpose_5301/fq_output_0" [id=22, type=FakeQuantize]; +"23 Transpose_5253/fq_output_0" [id=23, type=FakeQuantize]; +"24 Multiply_9209" [id=24, type=Convolution]; +"25 Convolution_810" [id=25, type=Convolution]; +"26 Transpose_5325" [id=26, type=Add]; +"27 Transpose_5259" [id=27, type=Add]; +"28 Transpose_5327" [id=28, type=Relu]; +"29 Transpose_5273" [id=29, type=HSigmoid]; +"30 Transpose_5327/fq_output_0" [id=30, type=FakeQuantize]; +"31 Transpose_5273/fq_output_0" [id=31, type=FakeQuantize]; +"32 Multiply_9223" [id=32, type=GroupConvolution]; +"33 Transpose_5378" [id=33, type=Add]; +"34 Transpose_5380" [id=34, type=Relu]; +"35 Transpose_5380/fq_output_0" [id=35, type=FakeQuantize]; +"36 Multiply_9237" [id=36, type=Convolution]; +"37 Transpose_5404" [id=37, type=Add]; +"38 Transpose_5404/fq_output_0" [id=38, type=FakeQuantize]; +"39 Multiply_9251" [id=39, type=Convolution]; +"40 Transpose_5484" [id=40, type=Add]; +"41 Transpose_5428" [id=41, type=Add]; +"42 Transpose_5484/fq_output_0" [id=42, type=FakeQuantize]; +"43 Transpose_5430" [id=43, type=Relu]; +"44 Multiply_9293" [id=44, type=Convolution]; +"45 Transpose_5430/fq_output_0" [id=45, type=FakeQuantize]; +"46 Transpose_5508" [id=46, type=Add]; +"47 Multiply_9265" [id=47, type=GroupConvolution]; +"48 Transpose_5526" [id=48, type=HSwish]; +"49 Transpose_5454" [id=49, type=Add]; +"50 Transpose_5526/fq_output_0" [id=50, type=FakeQuantize]; +"51 Transpose_5456" [id=51, type=Relu]; +"52 Multiply_9307" [id=52, type=GroupConvolution]; +"53 Transpose_5456/fq_output_0" [id=53, type=FakeQuantize]; +"54 Transpose_5577" [id=54, type=Add]; +"55 Multiply_9279" [id=55, type=Convolution]; +"56 Transpose_5595" [id=56, type=HSwish]; +"57 Transpose_5480" [id=57, type=Add]; +"58 Transpose_5595/fq_output_0" [id=58, type=FakeQuantize]; +"59 Transpose_5480/fq_output_0" [id=59, type=FakeQuantize]; +"60 Transpose_5599" [id=60, type=ReduceMean]; +"61 Transpose_5631" [id=61, type=Multiply]; +"62 Transpose_5599/fq_output_0" [id=62, type=FakeQuantize]; +"63 Transpose_5631/fq_output_0" [id=63, type=FakeQuantize]; +"64 Convolution_1132" [id=64, type=Convolution]; +"65 Multiply_9321" [id=65, type=Convolution]; +"66 Transpose_5605" [id=66, type=Add]; +"67 Transpose_5655" [id=67, type=Add]; +"68 Transpose_5607" [id=68, type=Relu]; +"69 Transpose_5655/fq_output_0" [id=69, type=FakeQuantize]; +"70 Transpose_5607/fq_output_0" [id=70, type=FakeQuantize]; +"71 Multiply_9335" [id=71, type=Convolution]; +"72 Transpose_5803" [id=72, type=Add]; +"73 Convolution_1141" [id=73, type=Convolution]; +"74 Transpose_5679" [id=74, type=Add]; +"75 Transpose_5803/fq_output_0" [id=75, type=FakeQuantize]; +"76 Transpose_5613" [id=76, type=Add]; +"77 Transpose_5697" [id=77, type=HSwish]; +"78 Multiply_9377" [id=78, type=Convolution]; +"79 Transpose_5951" [id=79, type=Add]; +"80 Transpose_5627" [id=80, type=HSigmoid]; +"81 Transpose_5697/fq_output_0" [id=81, type=FakeQuantize]; +"82 Transpose_5827" [id=82, type=Add]; +"83 Transpose_5951/fq_output_0" [id=83, type=FakeQuantize]; +"84 Transpose_5627/fq_output_0" [id=84, type=FakeQuantize]; +"85 Multiply_9349" [id=85, type=GroupConvolution]; +"86 Transpose_5845" [id=86, type=HSwish]; +"87 Multiply_9419" [id=87, type=Convolution]; +"88 Transpose_5721" [id=88, type=Add]; +"89 Transpose_5845/fq_output_0" [id=89, type=FakeQuantize]; +"90 Transpose_5975" [id=90, type=Add]; +"91 Transpose_5739" [id=91, type=HSwish]; +"92 Multiply_9391" [id=92, type=GroupConvolution]; +"93 Transpose_5993" [id=93, type=HSwish]; +"94 Transpose_5739/fq_output_0" [id=94, type=FakeQuantize]; +"95 Transpose_5869" [id=95, type=Add]; +"96 Transpose_5993/fq_output_0" [id=96, type=FakeQuantize]; +"97 Transpose_5743" [id=97, type=ReduceMean]; +"98 Transpose_5775" [id=98, type=Multiply]; +"99 Transpose_5887" [id=99, type=HSwish]; +"100 Multiply_9433" [id=100, type=GroupConvolution]; +"101 Transpose_5743/fq_output_0" [id=101, type=FakeQuantize]; +"102 Transpose_5775/fq_output_0" [id=102, type=FakeQuantize]; +"103 Transpose_5887/fq_output_0" [id=103, type=FakeQuantize]; +"104 Transpose_6017" [id=104, type=Add]; +"105 Convolution_1242" [id=105, type=Convolution]; +"106 Multiply_9363" [id=106, type=Convolution]; +"107 Transpose_5891" [id=107, type=ReduceMean]; +"108 Transpose_5923" [id=108, type=Multiply]; +"109 Transpose_6035" [id=109, type=HSwish]; +"110 Transpose_5749" [id=110, type=Add]; +"111 Transpose_5799" [id=111, type=Add]; +"112 Transpose_5891/fq_output_0" [id=112, type=FakeQuantize]; +"113 Transpose_5923/fq_output_0" [id=113, type=FakeQuantize]; +"114 Transpose_6035/fq_output_0" [id=114, type=FakeQuantize]; +"115 Transpose_5751" [id=115, type=Relu]; +"116 Transpose_5799/fq_output_0" [id=116, type=FakeQuantize]; +"117 Convolution_1353" [id=117, type=Convolution]; +"118 Multiply_9405" [id=118, type=Convolution]; +"119 Transpose_6039" [id=119, type=ReduceMean]; +"120 Transpose_6071" [id=120, type=Multiply]; +"121 Transpose_5751/fq_output_0" [id=121, type=FakeQuantize]; +"122 Transpose_5897" [id=122, type=Add]; +"123 Transpose_5947" [id=123, type=Add]; +"124 Transpose_6039/fq_output_0" [id=124, type=FakeQuantize]; +"125 Transpose_6071/fq_output_0" [id=125, type=FakeQuantize]; +"126 Convolution_1251" [id=126, type=Convolution]; +"127 Transpose_5899" [id=127, type=Relu]; +"128 Transpose_5947/fq_output_0" [id=128, type=FakeQuantize]; +"129 Convolution_1464" [id=129, type=Convolution]; +"130 Multiply_9447" [id=130, type=Convolution]; +"131 Transpose_5757" [id=131, type=Add]; +"132 Transpose_5899/fq_output_0" [id=132, type=FakeQuantize]; +"133 Transpose_6045" [id=133, type=Add]; +"134 Transpose_6095" [id=134, type=Add]; +"135 Transpose_5771" [id=135, type=HSigmoid]; +"136 Convolution_1362" [id=136, type=Convolution]; +"137 Transpose_6047" [id=137, type=Relu]; +"138 Transpose_6095/fq_output_0" [id=138, type=FakeQuantize]; +"139 Transpose_5771/fq_output_0" [id=139, type=FakeQuantize]; +"140 Transpose_5905" [id=140, type=Add]; +"141 Transpose_6047/fq_output_0" [id=141, type=FakeQuantize]; +"142 Multiply_9461" [id=142, type=Convolution]; +"143 Transpose_6243" [id=143, type=Add]; +"144 Transpose_5919" [id=144, type=HSigmoid]; +"145 Convolution_1473" [id=145, type=Convolution]; +"146 Transpose_6119" [id=146, type=Add]; +"147 Transpose_6243/fq_output_0" [id=147, type=FakeQuantize]; +"148 Transpose_5919/fq_output_0" [id=148, type=FakeQuantize]; +"149 Transpose_6053" [id=149, type=Add]; +"150 Transpose_6137" [id=150, type=HSwish]; +"151 Multiply_9503" [id=151, type=Convolution]; +"152 Transpose_6067" [id=152, type=HSigmoid]; +"153 Transpose_6137/fq_output_0" [id=153, type=FakeQuantize]; +"154 Transpose_6267" [id=154, type=Add]; +"155 Transpose_6067/fq_output_0" [id=155, type=FakeQuantize]; +"156 Multiply_9475" [id=156, type=GroupConvolution]; +"157 Transpose_6285" [id=157, type=HSwish]; +"158 Transpose_6161" [id=158, type=Add]; +"159 Transpose_6285/fq_output_0" [id=159, type=FakeQuantize]; +"160 Transpose_6179" [id=160, type=HSwish]; +"161 Multiply_9517" [id=161, type=GroupConvolution]; +"162 Transpose_6179/fq_output_0" [id=162, type=FakeQuantize]; +"163 Transpose_6336" [id=163, type=Add]; +"164 Transpose_6183" [id=164, type=ReduceMean]; +"165 Transpose_6215" [id=165, type=Multiply]; +"166 Transpose_6354" [id=166, type=HSwish]; +"167 Transpose_6183/fq_output_0" [id=167, type=FakeQuantize]; +"168 Transpose_6215/fq_output_0" [id=168, type=FakeQuantize]; +"169 Transpose_6354/fq_output_0" [id=169, type=FakeQuantize]; +"170 Convolution_1574" [id=170, type=Convolution]; +"171 Multiply_9489" [id=171, type=Convolution]; +"172 Transpose_6358" [id=172, type=ReduceMean]; +"173 Transpose_6390" [id=173, type=Multiply]; +"174 Transpose_6189" [id=174, type=Add]; +"175 Transpose_6239" [id=175, type=Add]; +"176 Transpose_6358/fq_output_0" [id=176, type=FakeQuantize]; +"177 Transpose_6390/fq_output_0" [id=177, type=FakeQuantize]; +"178 Transpose_6191" [id=178, type=Relu]; +"179 Transpose_6239/fq_output_0" [id=179, type=FakeQuantize]; +"180 Convolution_1713" [id=180, type=Convolution]; +"181 Multiply_9531" [id=181, type=Convolution]; +"182 Transpose_6191/fq_output_0" [id=182, type=FakeQuantize]; +"183 Transpose_6364" [id=183, type=Add]; +"184 Transpose_6414" [id=184, type=Add]; +"185 Convolution_1583" [id=185, type=Convolution]; +"186 Transpose_6366" [id=186, type=Relu]; +"187 Transpose_6414/fq_output_0" [id=187, type=FakeQuantize]; +"188 Transpose_6197" [id=188, type=Add]; +"189 Transpose_6366/fq_output_0" [id=189, type=FakeQuantize]; +"190 Multiply_9545" [id=190, type=Convolution]; +"191 Transpose_6562" [id=191, type=Add]; +"192 Transpose_6211" [id=192, type=HSigmoid]; +"193 Convolution_1722" [id=193, type=Convolution]; +"194 Transpose_6438" [id=194, type=Add]; +"195 Transpose_6562/fq_output_0" [id=195, type=FakeQuantize]; +"196 Transpose_6211/fq_output_0" [id=196, type=FakeQuantize]; +"197 Transpose_6372" [id=197, type=Add]; +"198 Transpose_6456" [id=198, type=HSwish]; +"199 Multiply_9587" [id=199, type=Convolution]; +"200 Transpose_6710" [id=200, type=Add]; +"201 Transpose_6386" [id=201, type=HSigmoid]; +"202 Transpose_6456/fq_output_0" [id=202, type=FakeQuantize]; +"203 Transpose_6586" [id=203, type=Add]; +"204 Transpose_6710/fq_output_0" [id=204, type=FakeQuantize]; +"205 Transpose_6386/fq_output_0" [id=205, type=FakeQuantize]; +"206 Multiply_9559" [id=206, type=GroupConvolution]; +"207 Transpose_6604" [id=207, type=HSwish]; +"208 Multiply_9629" [id=208, type=Convolution]; +"209 Transpose_6480" [id=209, type=Add]; +"210 Transpose_6604/fq_output_0" [id=210, type=FakeQuantize]; +"211 Transpose_6734" [id=211, type=Add]; +"212 Transpose_6498" [id=212, type=HSwish]; +"213 Multiply_9601" [id=213, type=GroupConvolution]; +"214 Transpose_6752" [id=214, type=HSwish]; +"215 Transpose_6498/fq_output_0" [id=215, type=FakeQuantize]; +"216 Transpose_6628" [id=216, type=Add]; +"217 Transpose_6752/fq_output_0" [id=217, type=FakeQuantize]; +"218 Transpose_6502" [id=218, type=ReduceMean]; +"219 Transpose_6534" [id=219, type=Multiply]; +"220 Transpose_6646" [id=220, type=HSwish]; +"221 Transpose_6756" [id=221, type=ReduceMean]; +"222 Transpose_6502/fq_output_0" [id=222, type=FakeQuantize]; +"223 Transpose_6534/fq_output_0" [id=223, type=FakeQuantize]; +"224 Transpose_6646/fq_output_0" [id=224, type=FakeQuantize]; +"225 Transpose_6756/fq_output_0" [id=225, type=FakeQuantize]; +"226 Convolution_1823" [id=226, type=Convolution]; +"227 Multiply_9573" [id=227, type=Convolution]; +"228 Transpose_6650" [id=228, type=ReduceMean]; +"229 Transpose_6682" [id=229, type=Multiply]; +"230 Convolution_2013" [id=230, type=Convolution]; +"231 Transpose_6508" [id=231, type=Add]; +"232 Transpose_6558" [id=232, type=Add]; +"233 Transpose_6650/fq_output_0" [id=233, type=FakeQuantize]; +"234 Transpose_6682/fq_output_0" [id=234, type=FakeQuantize]; +"235 Transpose_6762" [id=235, type=Add]; +"236 Transpose_6510" [id=236, type=Relu]; +"237 Transpose_6558/fq_output_0" [id=237, type=FakeQuantize]; +"238 Convolution_1934" [id=238, type=Convolution]; +"239 Multiply_9615" [id=239, type=Convolution]; +"240 Transpose_6780" [id=240, type=HSwish]; +"241 Transpose_6510/fq_output_0" [id=241, type=FakeQuantize]; +"242 Transpose_6656" [id=242, type=Add]; +"243 Transpose_6706" [id=243, type=Add]; +"244 Transpose_6780/fq_output_0" [id=244, type=FakeQuantize]; +"245 Convolution_1832" [id=245, type=Convolution]; +"246 Transpose_6658" [id=246, type=Relu]; +"247 Transpose_6706/fq_output_0" [id=247, type=FakeQuantize]; +"248 Convolution_2025" [id=248, type=Convolution]; +"249 Transpose_6516" [id=249, type=Add]; +"250 Transpose_6658/fq_output_0" [id=250, type=FakeQuantize]; +"251 Transpose_6786" [id=251, type=Add]; +"252 Transpose_6530" [id=252, type=HSigmoid]; +"253 Convolution_1943" [id=253, type=Convolution]; +"254 MobilenetV3small/Logits/BiasAdd" [id=254, type=Reshape]; +"255 Transpose_6530/fq_output_0" [id=255, type=FakeQuantize]; +"256 Transpose_6664" [id=256, type=Add]; +"257 MobilenetV3small/flatten/Reshape" [id=257, type=Reshape]; +"258 Transpose_6678" [id=258, type=HSigmoid]; +"259 MobilenetV3small/Predictions/Softmax" [id=259, type=Softmax]; +"260 Transpose_6678/fq_output_0" [id=260, type=FakeQuantize]; +"261 Predictions" [id=261, type=Result]; +"262 MobilenetV3small/flatten/Const" [id=262, type=Constant]; +"263 Constant_8887" [id=263, type=Constant]; +"264 Transpose_6784" [id=264, type=Constant]; +"265 Convolution_2025/fq_weights_1" [id=265, type=FakeQuantize]; +"266 Constant_13812" [id=266, type=Constant]; +"267 Constant_13811" [id=267, type=Constant]; +"268 Constant_13810" [id=268, type=Constant]; +"269 Constant_13809" [id=269, type=Constant]; +"270 Transpose_2024" [id=270, type=Constant]; +"271 Constant_13807" [id=271, type=Constant]; +"272 Constant_13806" [id=272, type=Constant]; +"273 Constant_13805" [id=273, type=Constant]; +"274 Constant_13804" [id=274, type=Constant]; +"275 Transpose_6760" [id=275, type=Constant]; +"276 Convolution_2013/fq_weights_1" [id=276, type=FakeQuantize]; +"277 Constant_13802" [id=277, type=Constant]; +"278 Constant_13801" [id=278, type=Constant]; +"279 Constant_13800" [id=279, type=Constant]; +"280 Constant_13799" [id=280, type=Constant]; +"281 Transpose_2012" [id=281, type=Constant]; +"282 Constant_13797" [id=282, type=Constant]; +"283 Constant_13796" [id=283, type=Constant]; +"284 Constant_13795" [id=284, type=Constant]; +"285 Constant_13794" [id=285, type=Constant]; +"286 Constant_6754" [id=286, type=Constant]; +"287 Constant_13792" [id=287, type=Constant]; +"288 Constant_13791" [id=288, type=Constant]; +"289 Constant_13790" [id=289, type=Constant]; +"290 Constant_13789" [id=290, type=Constant]; +"291 Constant_9637" [id=291, type=Constant]; +"292 Multiply_9629/fq_weights_1" [id=292, type=FakeQuantize]; +"293 Constant_13787" [id=293, type=Constant]; +"294 Constant_13786" [id=294, type=Constant]; +"295 Constant_13785" [id=295, type=Constant]; +"296 Constant_13784" [id=296, type=Constant]; +"297 Multiply_9830" [id=297, type=Constant]; +"298 Constant_13782" [id=298, type=Constant]; +"299 Constant_13781" [id=299, type=Constant]; +"300 Constant_13780" [id=300, type=Constant]; +"301 Constant_13779" [id=301, type=Constant]; +"302 Constant_13777" [id=302, type=Constant]; +"303 Constant_13776" [id=303, type=Constant]; +"304 Constant_13775" [id=304, type=Constant]; +"305 Constant_13774" [id=305, type=Constant]; +"306 Constant_9623" [id=306, type=Constant]; +"307 Multiply_9615/fq_weights_1" [id=307, type=FakeQuantize]; +"308 Constant_13772" [id=308, type=Constant]; +"309 Constant_13771" [id=309, type=Constant]; +"310 Constant_13770" [id=310, type=Constant]; +"311 Constant_13769" [id=311, type=Constant]; +"312 Multiply_9824" [id=312, type=Constant]; +"313 Constant_13767" [id=313, type=Constant]; +"314 Constant_13766" [id=314, type=Constant]; +"315 Constant_13765" [id=315, type=Constant]; +"316 Constant_13764" [id=316, type=Constant]; +"317 Constant_13762" [id=317, type=Constant]; +"318 Constant_13761" [id=318, type=Constant]; +"319 Constant_13760" [id=319, type=Constant]; +"320 Constant_13759" [id=320, type=Constant]; +"321 Transpose_6662" [id=321, type=Constant]; +"322 Convolution_1943/fq_weights_1" [id=322, type=FakeQuantize]; +"323 Constant_13757" [id=323, type=Constant]; +"324 Constant_13756" [id=324, type=Constant]; +"325 Constant_13755" [id=325, type=Constant]; +"326 Constant_13754" [id=326, type=Constant]; +"327 Transpose_1942" [id=327, type=Constant]; +"328 Constant_13752" [id=328, type=Constant]; +"329 Constant_13751" [id=329, type=Constant]; +"330 Constant_13750" [id=330, type=Constant]; +"331 Constant_13749" [id=331, type=Constant]; +"332 Transpose_6654" [id=332, type=Constant]; +"333 Convolution_1934/fq_weights_1" [id=333, type=FakeQuantize]; +"334 Constant_13747" [id=334, type=Constant]; +"335 Constant_13746" [id=335, type=Constant]; +"336 Constant_13745" [id=336, type=Constant]; +"337 Constant_13744" [id=337, type=Constant]; +"338 Transpose_1933" [id=338, type=Constant]; +"339 Constant_13742" [id=339, type=Constant]; +"340 Constant_13741" [id=340, type=Constant]; +"341 Constant_13740" [id=341, type=Constant]; +"342 Constant_13739" [id=342, type=Constant]; +"343 Constant_6648" [id=343, type=Constant]; +"344 Constant_13737" [id=344, type=Constant]; +"345 Constant_13736" [id=345, type=Constant]; +"346 Constant_13735" [id=346, type=Constant]; +"347 Constant_13734" [id=347, type=Constant]; +"348 Constant_9609" [id=348, type=Constant]; +"349 Multiply_9601/fq_weights_1" [id=349, type=FakeQuantize]; +"350 Constant_13732" [id=350, type=Constant]; +"351 Constant_13731" [id=351, type=Constant]; +"352 Constant_13730" [id=352, type=Constant]; +"353 Constant_13729" [id=353, type=Constant]; +"354 Multiply_9819" [id=354, type=Constant]; +"355 Constant_13727" [id=355, type=Constant]; +"356 Constant_13726" [id=356, type=Constant]; +"357 Constant_13725" [id=357, type=Constant]; +"358 Constant_13724" [id=358, type=Constant]; +"359 Constant_9595" [id=359, type=Constant]; +"360 Multiply_9587/fq_weights_1" [id=360, type=FakeQuantize]; +"361 Constant_13722" [id=361, type=Constant]; +"362 Constant_13721" [id=362, type=Constant]; +"363 Constant_13720" [id=363, type=Constant]; +"364 Constant_13719" [id=364, type=Constant]; +"365 Multiply_9813" [id=365, type=Constant]; +"366 Constant_13717" [id=366, type=Constant]; +"367 Constant_13716" [id=367, type=Constant]; +"368 Constant_13715" [id=368, type=Constant]; +"369 Constant_13714" [id=369, type=Constant]; +"370 Constant_13712" [id=370, type=Constant]; +"371 Constant_13711" [id=371, type=Constant]; +"372 Constant_13710" [id=372, type=Constant]; +"373 Constant_13709" [id=373, type=Constant]; +"374 Constant_9581" [id=374, type=Constant]; +"375 Multiply_9573/fq_weights_1" [id=375, type=FakeQuantize]; +"376 Constant_13707" [id=376, type=Constant]; +"377 Constant_13706" [id=377, type=Constant]; +"378 Constant_13705" [id=378, type=Constant]; +"379 Constant_13704" [id=379, type=Constant]; +"380 Multiply_9807" [id=380, type=Constant]; +"381 Constant_13702" [id=381, type=Constant]; +"382 Constant_13701" [id=382, type=Constant]; +"383 Constant_13700" [id=383, type=Constant]; +"384 Constant_13699" [id=384, type=Constant]; +"385 Constant_13697" [id=385, type=Constant]; +"386 Constant_13696" [id=386, type=Constant]; +"387 Constant_13695" [id=387, type=Constant]; +"388 Constant_13694" [id=388, type=Constant]; +"389 Transpose_6514" [id=389, type=Constant]; +"390 Convolution_1832/fq_weights_1" [id=390, type=FakeQuantize]; +"391 Constant_13692" [id=391, type=Constant]; +"392 Constant_13691" [id=392, type=Constant]; +"393 Constant_13690" [id=393, type=Constant]; +"394 Constant_13689" [id=394, type=Constant]; +"395 Transpose_1831" [id=395, type=Constant]; +"396 Constant_13687" [id=396, type=Constant]; +"397 Constant_13686" [id=397, type=Constant]; +"398 Constant_13685" [id=398, type=Constant]; +"399 Constant_13684" [id=399, type=Constant]; +"400 Transpose_6506" [id=400, type=Constant]; +"401 Convolution_1823/fq_weights_1" [id=401, type=FakeQuantize]; +"402 Constant_13682" [id=402, type=Constant]; +"403 Constant_13681" [id=403, type=Constant]; +"404 Constant_13680" [id=404, type=Constant]; +"405 Constant_13679" [id=405, type=Constant]; +"406 Transpose_1822" [id=406, type=Constant]; +"407 Constant_13677" [id=407, type=Constant]; +"408 Constant_13676" [id=408, type=Constant]; +"409 Constant_13675" [id=409, type=Constant]; +"410 Constant_13674" [id=410, type=Constant]; +"411 Constant_6500" [id=411, type=Constant]; +"412 Constant_13672" [id=412, type=Constant]; +"413 Constant_13671" [id=413, type=Constant]; +"414 Constant_13670" [id=414, type=Constant]; +"415 Constant_13669" [id=415, type=Constant]; +"416 Constant_9567" [id=416, type=Constant]; +"417 Multiply_9559/fq_weights_1" [id=417, type=FakeQuantize]; +"418 Constant_13667" [id=418, type=Constant]; +"419 Constant_13666" [id=419, type=Constant]; +"420 Constant_13665" [id=420, type=Constant]; +"421 Constant_13664" [id=421, type=Constant]; +"422 Multiply_9802" [id=422, type=Constant]; +"423 Constant_13662" [id=423, type=Constant]; +"424 Constant_13661" [id=424, type=Constant]; +"425 Constant_13660" [id=425, type=Constant]; +"426 Constant_13659" [id=426, type=Constant]; +"427 Constant_9553" [id=427, type=Constant]; +"428 Multiply_9545/fq_weights_1" [id=428, type=FakeQuantize]; +"429 Constant_13657" [id=429, type=Constant]; +"430 Constant_13656" [id=430, type=Constant]; +"431 Constant_13655" [id=431, type=Constant]; +"432 Constant_13654" [id=432, type=Constant]; +"433 Multiply_9796" [id=433, type=Constant]; +"434 Constant_13652" [id=434, type=Constant]; +"435 Constant_13651" [id=435, type=Constant]; +"436 Constant_13650" [id=436, type=Constant]; +"437 Constant_13649" [id=437, type=Constant]; +"438 Constant_9539" [id=438, type=Constant]; +"439 Multiply_9531/fq_weights_1" [id=439, type=FakeQuantize]; +"440 Constant_13647" [id=440, type=Constant]; +"441 Constant_13646" [id=441, type=Constant]; +"442 Constant_13645" [id=442, type=Constant]; +"443 Constant_13644" [id=443, type=Constant]; +"444 Multiply_9790" [id=444, type=Constant]; +"445 Constant_13642" [id=445, type=Constant]; +"446 Constant_13641" [id=446, type=Constant]; +"447 Constant_13640" [id=447, type=Constant]; +"448 Constant_13639" [id=448, type=Constant]; +"449 Constant_13637" [id=449, type=Constant]; +"450 Constant_13636" [id=450, type=Constant]; +"451 Constant_13635" [id=451, type=Constant]; +"452 Constant_13634" [id=452, type=Constant]; +"453 Transpose_6370" [id=453, type=Constant]; +"454 Convolution_1722/fq_weights_1" [id=454, type=FakeQuantize]; +"455 Constant_13632" [id=455, type=Constant]; +"456 Constant_13631" [id=456, type=Constant]; +"457 Constant_13630" [id=457, type=Constant]; +"458 Constant_13629" [id=458, type=Constant]; +"459 Transpose_1721" [id=459, type=Constant]; +"460 Constant_13627" [id=460, type=Constant]; +"461 Constant_13626" [id=461, type=Constant]; +"462 Constant_13625" [id=462, type=Constant]; +"463 Constant_13624" [id=463, type=Constant]; +"464 Transpose_6362" [id=464, type=Constant]; +"465 Convolution_1713/fq_weights_1" [id=465, type=FakeQuantize]; +"466 Constant_13622" [id=466, type=Constant]; +"467 Constant_13621" [id=467, type=Constant]; +"468 Constant_13620" [id=468, type=Constant]; +"469 Constant_13619" [id=469, type=Constant]; +"470 Transpose_1712" [id=470, type=Constant]; +"471 Constant_13617" [id=471, type=Constant]; +"472 Constant_13616" [id=472, type=Constant]; +"473 Constant_13615" [id=473, type=Constant]; +"474 Constant_13614" [id=474, type=Constant]; +"475 Constant_6356" [id=475, type=Constant]; +"476 Constant_13612" [id=476, type=Constant]; +"477 Constant_13611" [id=477, type=Constant]; +"478 Constant_13610" [id=478, type=Constant]; +"479 Constant_13609" [id=479, type=Constant]; +"480 Constant_9525" [id=480, type=Constant]; +"481 Multiply_9517/fq_weights_1" [id=481, type=FakeQuantize]; +"482 Constant_13607" [id=482, type=Constant]; +"483 Constant_13606" [id=483, type=Constant]; +"484 Constant_13605" [id=484, type=Constant]; +"485 Constant_13604" [id=485, type=Constant]; +"486 Multiply_9785" [id=486, type=Constant]; +"487 Constant_13602" [id=487, type=Constant]; +"488 Constant_13601" [id=488, type=Constant]; +"489 Constant_13600" [id=489, type=Constant]; +"490 Constant_13599" [id=490, type=Constant]; +"491 Constant_9511" [id=491, type=Constant]; +"492 Multiply_9503/fq_weights_1" [id=492, type=FakeQuantize]; +"493 Constant_13597" [id=493, type=Constant]; +"494 Constant_13596" [id=494, type=Constant]; +"495 Constant_13595" [id=495, type=Constant]; +"496 Constant_13594" [id=496, type=Constant]; +"497 Multiply_9779" [id=497, type=Constant]; +"498 Constant_13592" [id=498, type=Constant]; +"499 Constant_13591" [id=499, type=Constant]; +"500 Constant_13590" [id=500, type=Constant]; +"501 Constant_13589" [id=501, type=Constant]; +"502 Constant_13587" [id=502, type=Constant]; +"503 Constant_13586" [id=503, type=Constant]; +"504 Constant_13585" [id=504, type=Constant]; +"505 Constant_13584" [id=505, type=Constant]; +"506 Constant_9497" [id=506, type=Constant]; +"507 Multiply_9489/fq_weights_1" [id=507, type=FakeQuantize]; +"508 Constant_13582" [id=508, type=Constant]; +"509 Constant_13581" [id=509, type=Constant]; +"510 Constant_13580" [id=510, type=Constant]; +"511 Constant_13579" [id=511, type=Constant]; +"512 Multiply_9773" [id=512, type=Constant]; +"513 Constant_13577" [id=513, type=Constant]; +"514 Constant_13576" [id=514, type=Constant]; +"515 Constant_13575" [id=515, type=Constant]; +"516 Constant_13574" [id=516, type=Constant]; +"517 Constant_13572" [id=517, type=Constant]; +"518 Constant_13571" [id=518, type=Constant]; +"519 Constant_13570" [id=519, type=Constant]; +"520 Constant_13569" [id=520, type=Constant]; +"521 Transpose_6195" [id=521, type=Constant]; +"522 Convolution_1583/fq_weights_1" [id=522, type=FakeQuantize]; +"523 Constant_13567" [id=523, type=Constant]; +"524 Constant_13566" [id=524, type=Constant]; +"525 Constant_13565" [id=525, type=Constant]; +"526 Constant_13564" [id=526, type=Constant]; +"527 Transpose_1582" [id=527, type=Constant]; +"528 Constant_13562" [id=528, type=Constant]; +"529 Constant_13561" [id=529, type=Constant]; +"530 Constant_13560" [id=530, type=Constant]; +"531 Constant_13559" [id=531, type=Constant]; +"532 Transpose_6187" [id=532, type=Constant]; +"533 Convolution_1574/fq_weights_1" [id=533, type=FakeQuantize]; +"534 Constant_13557" [id=534, type=Constant]; +"535 Constant_13556" [id=535, type=Constant]; +"536 Constant_13555" [id=536, type=Constant]; +"537 Constant_13554" [id=537, type=Constant]; +"538 Transpose_1573" [id=538, type=Constant]; +"539 Constant_13552" [id=539, type=Constant]; +"540 Constant_13551" [id=540, type=Constant]; +"541 Constant_13550" [id=541, type=Constant]; +"542 Constant_13549" [id=542, type=Constant]; +"543 Constant_6181" [id=543, type=Constant]; +"544 Constant_13547" [id=544, type=Constant]; +"545 Constant_13546" [id=545, type=Constant]; +"546 Constant_13545" [id=546, type=Constant]; +"547 Constant_13544" [id=547, type=Constant]; +"548 Constant_9483" [id=548, type=Constant]; +"549 Multiply_9475/fq_weights_1" [id=549, type=FakeQuantize]; +"550 Constant_13542" [id=550, type=Constant]; +"551 Constant_13541" [id=551, type=Constant]; +"552 Constant_13540" [id=552, type=Constant]; +"553 Constant_13539" [id=553, type=Constant]; +"554 Multiply_9768" [id=554, type=Constant]; +"555 Constant_13537" [id=555, type=Constant]; +"556 Constant_13536" [id=556, type=Constant]; +"557 Constant_13535" [id=557, type=Constant]; +"558 Constant_13534" [id=558, type=Constant]; +"559 Constant_9469" [id=559, type=Constant]; +"560 Multiply_9461/fq_weights_1" [id=560, type=FakeQuantize]; +"561 Constant_13532" [id=561, type=Constant]; +"562 Constant_13531" [id=562, type=Constant]; +"563 Constant_13530" [id=563, type=Constant]; +"564 Constant_13529" [id=564, type=Constant]; +"565 Multiply_9762" [id=565, type=Constant]; +"566 Constant_13527" [id=566, type=Constant]; +"567 Constant_13526" [id=567, type=Constant]; +"568 Constant_13525" [id=568, type=Constant]; +"569 Constant_13524" [id=569, type=Constant]; +"570 Constant_9455" [id=570, type=Constant]; +"571 Multiply_9447/fq_weights_1" [id=571, type=FakeQuantize]; +"572 Constant_13522" [id=572, type=Constant]; +"573 Constant_13521" [id=573, type=Constant]; +"574 Constant_13520" [id=574, type=Constant]; +"575 Constant_13519" [id=575, type=Constant]; +"576 Multiply_9756" [id=576, type=Constant]; +"577 Constant_13517" [id=577, type=Constant]; +"578 Constant_13516" [id=578, type=Constant]; +"579 Constant_13515" [id=579, type=Constant]; +"580 Constant_13514" [id=580, type=Constant]; +"581 Constant_13512" [id=581, type=Constant]; +"582 Constant_13511" [id=582, type=Constant]; +"583 Constant_13510" [id=583, type=Constant]; +"584 Constant_13509" [id=584, type=Constant]; +"585 Transpose_6051" [id=585, type=Constant]; +"586 Convolution_1473/fq_weights_1" [id=586, type=FakeQuantize]; +"587 Constant_13507" [id=587, type=Constant]; +"588 Constant_13506" [id=588, type=Constant]; +"589 Constant_13505" [id=589, type=Constant]; +"590 Constant_13504" [id=590, type=Constant]; +"591 Transpose_1472" [id=591, type=Constant]; +"592 Constant_13502" [id=592, type=Constant]; +"593 Constant_13501" [id=593, type=Constant]; +"594 Constant_13500" [id=594, type=Constant]; +"595 Constant_13499" [id=595, type=Constant]; +"596 Transpose_6043" [id=596, type=Constant]; +"597 Convolution_1464/fq_weights_1" [id=597, type=FakeQuantize]; +"598 Constant_13497" [id=598, type=Constant]; +"599 Constant_13496" [id=599, type=Constant]; +"600 Constant_13495" [id=600, type=Constant]; +"601 Constant_13494" [id=601, type=Constant]; +"602 Transpose_1463" [id=602, type=Constant]; +"603 Constant_13492" [id=603, type=Constant]; +"604 Constant_13491" [id=604, type=Constant]; +"605 Constant_13490" [id=605, type=Constant]; +"606 Constant_13489" [id=606, type=Constant]; +"607 Constant_6037" [id=607, type=Constant]; +"608 Constant_13487" [id=608, type=Constant]; +"609 Constant_13486" [id=609, type=Constant]; +"610 Constant_13485" [id=610, type=Constant]; +"611 Constant_13484" [id=611, type=Constant]; +"612 Constant_9441" [id=612, type=Constant]; +"613 Multiply_9433/fq_weights_1" [id=613, type=FakeQuantize]; +"614 Constant_13482" [id=614, type=Constant]; +"615 Constant_13481" [id=615, type=Constant]; +"616 Constant_13480" [id=616, type=Constant]; +"617 Constant_13479" [id=617, type=Constant]; +"618 Multiply_9751" [id=618, type=Constant]; +"619 Constant_13477" [id=619, type=Constant]; +"620 Constant_13476" [id=620, type=Constant]; +"621 Constant_13475" [id=621, type=Constant]; +"622 Constant_13474" [id=622, type=Constant]; +"623 Constant_9427" [id=623, type=Constant]; +"624 Multiply_9419/fq_weights_1" [id=624, type=FakeQuantize]; +"625 Constant_13472" [id=625, type=Constant]; +"626 Constant_13471" [id=626, type=Constant]; +"627 Constant_13470" [id=627, type=Constant]; +"628 Constant_13469" [id=628, type=Constant]; +"629 Multiply_9745" [id=629, type=Constant]; +"630 Constant_13467" [id=630, type=Constant]; +"631 Constant_13466" [id=631, type=Constant]; +"632 Constant_13465" [id=632, type=Constant]; +"633 Constant_13464" [id=633, type=Constant]; +"634 Constant_13462" [id=634, type=Constant]; +"635 Constant_13461" [id=635, type=Constant]; +"636 Constant_13460" [id=636, type=Constant]; +"637 Constant_13459" [id=637, type=Constant]; +"638 Constant_9413" [id=638, type=Constant]; +"639 Multiply_9405/fq_weights_1" [id=639, type=FakeQuantize]; +"640 Constant_13457" [id=640, type=Constant]; +"641 Constant_13456" [id=641, type=Constant]; +"642 Constant_13455" [id=642, type=Constant]; +"643 Constant_13454" [id=643, type=Constant]; +"644 Multiply_9739" [id=644, type=Constant]; +"645 Constant_13452" [id=645, type=Constant]; +"646 Constant_13451" [id=646, type=Constant]; +"647 Constant_13450" [id=647, type=Constant]; +"648 Constant_13449" [id=648, type=Constant]; +"649 Constant_13447" [id=649, type=Constant]; +"650 Constant_13446" [id=650, type=Constant]; +"651 Constant_13445" [id=651, type=Constant]; +"652 Constant_13444" [id=652, type=Constant]; +"653 Transpose_5903" [id=653, type=Constant]; +"654 Convolution_1362/fq_weights_1" [id=654, type=FakeQuantize]; +"655 Constant_13442" [id=655, type=Constant]; +"656 Constant_13441" [id=656, type=Constant]; +"657 Constant_13440" [id=657, type=Constant]; +"658 Constant_13439" [id=658, type=Constant]; +"659 Transpose_1361" [id=659, type=Constant]; +"660 Constant_13437" [id=660, type=Constant]; +"661 Constant_13436" [id=661, type=Constant]; +"662 Constant_13435" [id=662, type=Constant]; +"663 Constant_13434" [id=663, type=Constant]; +"664 Transpose_5895" [id=664, type=Constant]; +"665 Convolution_1353/fq_weights_1" [id=665, type=FakeQuantize]; +"666 Constant_13432" [id=666, type=Constant]; +"667 Constant_13431" [id=667, type=Constant]; +"668 Constant_13430" [id=668, type=Constant]; +"669 Constant_13429" [id=669, type=Constant]; +"670 Transpose_1352" [id=670, type=Constant]; +"671 Constant_13427" [id=671, type=Constant]; +"672 Constant_13426" [id=672, type=Constant]; +"673 Constant_13425" [id=673, type=Constant]; +"674 Constant_13424" [id=674, type=Constant]; +"675 Constant_5889" [id=675, type=Constant]; +"676 Constant_13422" [id=676, type=Constant]; +"677 Constant_13421" [id=677, type=Constant]; +"678 Constant_13420" [id=678, type=Constant]; +"679 Constant_13419" [id=679, type=Constant]; +"680 Constant_9399" [id=680, type=Constant]; +"681 Multiply_9391/fq_weights_1" [id=681, type=FakeQuantize]; +"682 Constant_13417" [id=682, type=Constant]; +"683 Constant_13416" [id=683, type=Constant]; +"684 Constant_13415" [id=684, type=Constant]; +"685 Constant_13414" [id=685, type=Constant]; +"686 Multiply_9734" [id=686, type=Constant]; +"687 Constant_13412" [id=687, type=Constant]; +"688 Constant_13411" [id=688, type=Constant]; +"689 Constant_13410" [id=689, type=Constant]; +"690 Constant_13409" [id=690, type=Constant]; +"691 Constant_9385" [id=691, type=Constant]; +"692 Multiply_9377/fq_weights_1" [id=692, type=FakeQuantize]; +"693 Constant_13407" [id=693, type=Constant]; +"694 Constant_13406" [id=694, type=Constant]; +"695 Constant_13405" [id=695, type=Constant]; +"696 Constant_13404" [id=696, type=Constant]; +"697 Multiply_9728" [id=697, type=Constant]; +"698 Constant_13402" [id=698, type=Constant]; +"699 Constant_13401" [id=699, type=Constant]; +"700 Constant_13400" [id=700, type=Constant]; +"701 Constant_13399" [id=701, type=Constant]; +"702 Constant_13397" [id=702, type=Constant]; +"703 Constant_13396" [id=703, type=Constant]; +"704 Constant_13395" [id=704, type=Constant]; +"705 Constant_13394" [id=705, type=Constant]; +"706 Constant_9371" [id=706, type=Constant]; +"707 Multiply_9363/fq_weights_1" [id=707, type=FakeQuantize]; +"708 Constant_13392" [id=708, type=Constant]; +"709 Constant_13391" [id=709, type=Constant]; +"710 Constant_13390" [id=710, type=Constant]; +"711 Constant_13389" [id=711, type=Constant]; +"712 Multiply_9722" [id=712, type=Constant]; +"713 Constant_13387" [id=713, type=Constant]; +"714 Constant_13386" [id=714, type=Constant]; +"715 Constant_13385" [id=715, type=Constant]; +"716 Constant_13384" [id=716, type=Constant]; +"717 Constant_13382" [id=717, type=Constant]; +"718 Constant_13381" [id=718, type=Constant]; +"719 Constant_13380" [id=719, type=Constant]; +"720 Constant_13379" [id=720, type=Constant]; +"721 Transpose_5755" [id=721, type=Constant]; +"722 Convolution_1251/fq_weights_1" [id=722, type=FakeQuantize]; +"723 Constant_13377" [id=723, type=Constant]; +"724 Constant_13376" [id=724, type=Constant]; +"725 Constant_13375" [id=725, type=Constant]; +"726 Constant_13374" [id=726, type=Constant]; +"727 Transpose_1250" [id=727, type=Constant]; +"728 Constant_13372" [id=728, type=Constant]; +"729 Constant_13371" [id=729, type=Constant]; +"730 Constant_13370" [id=730, type=Constant]; +"731 Constant_13369" [id=731, type=Constant]; +"732 Transpose_5747" [id=732, type=Constant]; +"733 Convolution_1242/fq_weights_1" [id=733, type=FakeQuantize]; +"734 Constant_13367" [id=734, type=Constant]; +"735 Constant_13366" [id=735, type=Constant]; +"736 Constant_13365" [id=736, type=Constant]; +"737 Constant_13364" [id=737, type=Constant]; +"738 Transpose_1241" [id=738, type=Constant]; +"739 Constant_13362" [id=739, type=Constant]; +"740 Constant_13361" [id=740, type=Constant]; +"741 Constant_13360" [id=741, type=Constant]; +"742 Constant_13359" [id=742, type=Constant]; +"743 Constant_5741" [id=743, type=Constant]; +"744 Constant_13357" [id=744, type=Constant]; +"745 Constant_13356" [id=745, type=Constant]; +"746 Constant_13355" [id=746, type=Constant]; +"747 Constant_13354" [id=747, type=Constant]; +"748 Constant_9357" [id=748, type=Constant]; +"749 Multiply_9349/fq_weights_1" [id=749, type=FakeQuantize]; +"750 Constant_13352" [id=750, type=Constant]; +"751 Constant_13351" [id=751, type=Constant]; +"752 Constant_13350" [id=752, type=Constant]; +"753 Constant_13349" [id=753, type=Constant]; +"754 Multiply_9717" [id=754, type=Constant]; +"755 Constant_13347" [id=755, type=Constant]; +"756 Constant_13346" [id=756, type=Constant]; +"757 Constant_13345" [id=757, type=Constant]; +"758 Constant_13344" [id=758, type=Constant]; +"759 Constant_9343" [id=759, type=Constant]; +"760 Multiply_9335/fq_weights_1" [id=760, type=FakeQuantize]; +"761 Constant_13342" [id=761, type=Constant]; +"762 Constant_13341" [id=762, type=Constant]; +"763 Constant_13340" [id=763, type=Constant]; +"764 Constant_13339" [id=764, type=Constant]; +"765 Multiply_9711" [id=765, type=Constant]; +"766 Constant_13337" [id=766, type=Constant]; +"767 Constant_13336" [id=767, type=Constant]; +"768 Constant_13335" [id=768, type=Constant]; +"769 Constant_13334" [id=769, type=Constant]; +"770 Constant_9329" [id=770, type=Constant]; +"771 Multiply_9321/fq_weights_1" [id=771, type=FakeQuantize]; +"772 Constant_13332" [id=772, type=Constant]; +"773 Constant_13331" [id=773, type=Constant]; +"774 Constant_13330" [id=774, type=Constant]; +"775 Constant_13329" [id=775, type=Constant]; +"776 Multiply_9705" [id=776, type=Constant]; +"777 Constant_13327" [id=777, type=Constant]; +"778 Constant_13326" [id=778, type=Constant]; +"779 Constant_13325" [id=779, type=Constant]; +"780 Constant_13324" [id=780, type=Constant]; +"781 Constant_13322" [id=781, type=Constant]; +"782 Constant_13321" [id=782, type=Constant]; +"783 Constant_13320" [id=783, type=Constant]; +"784 Constant_13319" [id=784, type=Constant]; +"785 Transpose_5611" [id=785, type=Constant]; +"786 Convolution_1141/fq_weights_1" [id=786, type=FakeQuantize]; +"787 Constant_13317" [id=787, type=Constant]; +"788 Constant_13316" [id=788, type=Constant]; +"789 Constant_13315" [id=789, type=Constant]; +"790 Constant_13314" [id=790, type=Constant]; +"791 Transpose_1140" [id=791, type=Constant]; +"792 Constant_13312" [id=792, type=Constant]; +"793 Constant_13311" [id=793, type=Constant]; +"794 Constant_13310" [id=794, type=Constant]; +"795 Constant_13309" [id=795, type=Constant]; +"796 Transpose_5603" [id=796, type=Constant]; +"797 Convolution_1132/fq_weights_1" [id=797, type=FakeQuantize]; +"798 Constant_13307" [id=798, type=Constant]; +"799 Constant_13306" [id=799, type=Constant]; +"800 Constant_13305" [id=800, type=Constant]; +"801 Constant_13304" [id=801, type=Constant]; +"802 Transpose_1131" [id=802, type=Constant]; +"803 Constant_13302" [id=803, type=Constant]; +"804 Constant_13301" [id=804, type=Constant]; +"805 Constant_13300" [id=805, type=Constant]; +"806 Constant_13299" [id=806, type=Constant]; +"807 Constant_5597" [id=807, type=Constant]; +"808 Constant_13297" [id=808, type=Constant]; +"809 Constant_13296" [id=809, type=Constant]; +"810 Constant_13295" [id=810, type=Constant]; +"811 Constant_13294" [id=811, type=Constant]; +"812 Constant_9315" [id=812, type=Constant]; +"813 Multiply_9307/fq_weights_1" [id=813, type=FakeQuantize]; +"814 Constant_13292" [id=814, type=Constant]; +"815 Constant_13291" [id=815, type=Constant]; +"816 Constant_13290" [id=816, type=Constant]; +"817 Constant_13289" [id=817, type=Constant]; +"818 Multiply_9700" [id=818, type=Constant]; +"819 Constant_13287" [id=819, type=Constant]; +"820 Constant_13286" [id=820, type=Constant]; +"821 Constant_13285" [id=821, type=Constant]; +"822 Constant_13284" [id=822, type=Constant]; +"823 Constant_9301" [id=823, type=Constant]; +"824 Multiply_9293/fq_weights_1" [id=824, type=FakeQuantize]; +"825 Constant_13282" [id=825, type=Constant]; +"826 Constant_13281" [id=826, type=Constant]; +"827 Constant_13280" [id=827, type=Constant]; +"828 Constant_13279" [id=828, type=Constant]; +"829 Multiply_9694" [id=829, type=Constant]; +"830 Constant_13277" [id=830, type=Constant]; +"831 Constant_13276" [id=831, type=Constant]; +"832 Constant_13275" [id=832, type=Constant]; +"833 Constant_13274" [id=833, type=Constant]; +"834 Constant_13272" [id=834, type=Constant]; +"835 Constant_13271" [id=835, type=Constant]; +"836 Constant_13270" [id=836, type=Constant]; +"837 Constant_13269" [id=837, type=Constant]; +"838 Constant_9287" [id=838, type=Constant]; +"839 Multiply_9279/fq_weights_1" [id=839, type=FakeQuantize]; +"840 Constant_13267" [id=840, type=Constant]; +"841 Constant_13266" [id=841, type=Constant]; +"842 Constant_13265" [id=842, type=Constant]; +"843 Constant_13264" [id=843, type=Constant]; +"844 Multiply_9688" [id=844, type=Constant]; +"845 Constant_13262" [id=845, type=Constant]; +"846 Constant_13261" [id=846, type=Constant]; +"847 Constant_13260" [id=847, type=Constant]; +"848 Constant_13259" [id=848, type=Constant]; +"849 Constant_9273" [id=849, type=Constant]; +"850 Multiply_9265/fq_weights_1" [id=850, type=FakeQuantize]; +"851 Constant_13257" [id=851, type=Constant]; +"852 Constant_13256" [id=852, type=Constant]; +"853 Constant_13255" [id=853, type=Constant]; +"854 Constant_13254" [id=854, type=Constant]; +"855 Multiply_9683" [id=855, type=Constant]; +"856 Constant_13252" [id=856, type=Constant]; +"857 Constant_13251" [id=857, type=Constant]; +"858 Constant_13250" [id=858, type=Constant]; +"859 Constant_13249" [id=859, type=Constant]; +"860 Constant_9259" [id=860, type=Constant]; +"861 Multiply_9251/fq_weights_1" [id=861, type=FakeQuantize]; +"862 Constant_13247" [id=862, type=Constant]; +"863 Constant_13246" [id=863, type=Constant]; +"864 Constant_13245" [id=864, type=Constant]; +"865 Constant_13244" [id=865, type=Constant]; +"866 Multiply_9677" [id=866, type=Constant]; +"867 Constant_13242" [id=867, type=Constant]; +"868 Constant_13241" [id=868, type=Constant]; +"869 Constant_13240" [id=869, type=Constant]; +"870 Constant_13239" [id=870, type=Constant]; +"871 Constant_9245" [id=871, type=Constant]; +"872 Multiply_9237/fq_weights_1" [id=872, type=FakeQuantize]; +"873 Constant_13237" [id=873, type=Constant]; +"874 Constant_13236" [id=874, type=Constant]; +"875 Constant_13235" [id=875, type=Constant]; +"876 Constant_13234" [id=876, type=Constant]; +"877 Multiply_9671" [id=877, type=Constant]; +"878 Constant_13232" [id=878, type=Constant]; +"879 Constant_13231" [id=879, type=Constant]; +"880 Constant_13230" [id=880, type=Constant]; +"881 Constant_13229" [id=881, type=Constant]; +"882 Constant_9231" [id=882, type=Constant]; +"883 Multiply_9223/fq_weights_1" [id=883, type=FakeQuantize]; +"884 Constant_13227" [id=884, type=Constant]; +"885 Constant_13226" [id=885, type=Constant]; +"886 Constant_13225" [id=886, type=Constant]; +"887 Constant_13224" [id=887, type=Constant]; +"888 Multiply_9666" [id=888, type=Constant]; +"889 Constant_13222" [id=889, type=Constant]; +"890 Constant_13221" [id=890, type=Constant]; +"891 Constant_13220" [id=891, type=Constant]; +"892 Constant_13219" [id=892, type=Constant]; +"893 Constant_9217" [id=893, type=Constant]; +"894 Multiply_9209/fq_weights_1" [id=894, type=FakeQuantize]; +"895 Constant_13217" [id=895, type=Constant]; +"896 Constant_13216" [id=896, type=Constant]; +"897 Constant_13215" [id=897, type=Constant]; +"898 Constant_13214" [id=898, type=Constant]; +"899 Multiply_9660" [id=899, type=Constant]; +"900 Constant_13212" [id=900, type=Constant]; +"901 Constant_13211" [id=901, type=Constant]; +"902 Constant_13210" [id=902, type=Constant]; +"903 Constant_13209" [id=903, type=Constant]; +"904 Constant_9203" [id=904, type=Constant]; +"905 Multiply_9195/fq_weights_1" [id=905, type=FakeQuantize]; +"906 Constant_13207" [id=906, type=Constant]; +"907 Constant_13206" [id=907, type=Constant]; +"908 Constant_13205" [id=908, type=Constant]; +"909 Constant_13204" [id=909, type=Constant]; +"910 Multiply_9654" [id=910, type=Constant]; +"911 Constant_13202" [id=911, type=Constant]; +"912 Constant_13201" [id=912, type=Constant]; +"913 Constant_13200" [id=913, type=Constant]; +"914 Constant_13199" [id=914, type=Constant]; +"915 Constant_13197" [id=915, type=Constant]; +"916 Constant_13196" [id=916, type=Constant]; +"917 Constant_13195" [id=917, type=Constant]; +"918 Constant_13194" [id=918, type=Constant]; +"919 Transpose_5257" [id=919, type=Constant]; +"920 Convolution_810/fq_weights_1" [id=920, type=FakeQuantize]; +"921 Constant_13192" [id=921, type=Constant]; +"922 Constant_13191" [id=922, type=Constant]; +"923 Constant_13190" [id=923, type=Constant]; +"924 Constant_13189" [id=924, type=Constant]; +"925 Transpose_809" [id=925, type=Constant]; +"926 Constant_13187" [id=926, type=Constant]; +"927 Constant_13186" [id=927, type=Constant]; +"928 Constant_13185" [id=928, type=Constant]; +"929 Constant_13184" [id=929, type=Constant]; +"930 Transpose_5249" [id=930, type=Constant]; +"931 Convolution_801/fq_weights_1" [id=931, type=FakeQuantize]; +"932 Constant_13182" [id=932, type=Constant]; +"933 Constant_13181" [id=933, type=Constant]; +"934 Constant_13180" [id=934, type=Constant]; +"935 Constant_13179" [id=935, type=Constant]; +"936 Transpose_800" [id=936, type=Constant]; +"937 Constant_13177" [id=937, type=Constant]; +"938 Constant_13176" [id=938, type=Constant]; +"939 Constant_13175" [id=939, type=Constant]; +"940 Constant_13174" [id=940, type=Constant]; +"941 Constant_5243" [id=941, type=Constant]; +"942 Constant_13172" [id=942, type=Constant]; +"943 Constant_13171" [id=943, type=Constant]; +"944 Constant_13170" [id=944, type=Constant]; +"945 Constant_13169" [id=945, type=Constant]; +"946 Constant_9189" [id=946, type=Constant]; +"947 Multiply_9181/fq_weights_1" [id=947, type=FakeQuantize]; +"948 Constant_13167" [id=948, type=Constant]; +"949 Constant_13166" [id=949, type=Constant]; +"950 Constant_13165" [id=950, type=Constant]; +"951 Constant_13164" [id=951, type=Constant]; +"952 Multiply_9649" [id=952, type=Constant]; +"953 Constant_13162" [id=953, type=Constant]; +"954 Constant_13161" [id=954, type=Constant]; +"955 Constant_13160" [id=955, type=Constant]; +"956 Constant_13159" [id=956, type=Constant]; +"957 Constant_9175" [id=957, type=Constant]; +"958 Multiply_9167/fq_weights_1" [id=958, type=FakeQuantize]; +"959 Constant_13157" [id=959, type=Constant]; +"960 Constant_13156" [id=960, type=Constant]; +"961 Constant_13155" [id=961, type=Constant]; +"962 Constant_13154" [id=962, type=Constant]; +"963 Gather_10068" [id=963, type=Constant]; +"964 Constant_13152" [id=964, type=Constant]; +"965 Constant_13151" [id=965, type=Constant]; +"966 Constant_13150" [id=966, type=Constant]; +"967 Constant_13149" [id=967, type=Constant]; +"968 Unsqueeze_7776" [id=968, type=Constant]; +"969 Unsqueeze_7782" [id=969, type=Constant]; +"970 Constant_7779" [id=970, type=Constant]; +"0 input_1" -> "1 Transpose_7780" [label="[1, 224, 224, 3]", style=solid]; +"1 Transpose_7780" -> "2 Transpose_7774" [label="[1, 3, 224, 224]", style=solid]; +"2 Transpose_7774" -> "3 Transpose_710" [label="[1, 3, 224, 224]", style=solid]; +"3 Transpose_710" -> "4 Transpose_710/fq_output_0" [label="[1, 3, 224, 224]", style=solid]; +"4 Transpose_710/fq_output_0" -> "5 Multiply_9167" [label="[1, 3, 224, 224]", style=solid]; +"5 Multiply_9167" -> "6 Transpose_5170" [label="[1, 16, 112, 112]", style=solid]; +"6 Transpose_5170" -> "7 Transpose_5188" [label="[1, 16, 112, 112]", style=solid]; +"7 Transpose_5188" -> "8 Transpose_5188/fq_output_0" [label="[1, 16, 112, 112]", style=solid]; +"8 Transpose_5188/fq_output_0" -> "9 Multiply_9181" [label="[1, 16, 112, 112]", style=solid]; +"9 Multiply_9181" -> "10 Transpose_5239" [label="[1, 16, 56, 56]", style=solid]; +"10 Transpose_5239" -> "11 Transpose_5241" [label="[1, 16, 56, 56]", style=solid]; +"11 Transpose_5241" -> "12 Transpose_5241/fq_output_0" [label="[1, 16, 56, 56]", style=solid]; +"12 Transpose_5241/fq_output_0" -> "13 Transpose_5245" [label="[1, 16, 56, 56]", style=solid]; +"12 Transpose_5241/fq_output_0" -> "14 Transpose_5277" [label="[1, 16, 56, 56]", style=solid]; +"13 Transpose_5245" -> "15 Transpose_5245/fq_output_0" [label="[1, 16, 1, 1]", style=solid]; +"14 Transpose_5277" -> "16 Transpose_5277/fq_output_0" [label="[1, 16, 56, 56]", style=solid]; +"15 Transpose_5245/fq_output_0" -> "17 Convolution_801" [label="[1, 16, 1, 1]", style=solid]; +"16 Transpose_5277/fq_output_0" -> "18 Multiply_9195" [label="[1, 16, 56, 56]", style=solid]; +"17 Convolution_801" -> "19 Transpose_5251" [label="[1, 8, 1, 1]", style=solid]; +"18 Multiply_9195" -> "20 Transpose_5301" [label="[1, 16, 56, 56]", style=solid]; +"19 Transpose_5251" -> "21 Transpose_5253" [label="[1, 8, 1, 1]", style=solid]; +"20 Transpose_5301" -> "22 Transpose_5301/fq_output_0" [label="[1, 16, 56, 56]", style=solid]; +"21 Transpose_5253" -> "23 Transpose_5253/fq_output_0" [label="[1, 8, 1, 1]", style=solid]; +"22 Transpose_5301/fq_output_0" -> "24 Multiply_9209" [label="[1, 16, 56, 56]", style=solid]; +"23 Transpose_5253/fq_output_0" -> "25 Convolution_810" [label="[1, 8, 1, 1]", style=solid]; +"24 Multiply_9209" -> "26 Transpose_5325" [label="[1, 72, 56, 56]", style=solid]; +"25 Convolution_810" -> "27 Transpose_5259" [label="[1, 16, 1, 1]", style=solid]; +"26 Transpose_5325" -> "28 Transpose_5327" [label="[1, 72, 56, 56]", style=solid]; +"27 Transpose_5259" -> "29 Transpose_5273" [label="[1, 16, 1, 1]", style=solid]; +"28 Transpose_5327" -> "30 Transpose_5327/fq_output_0" [label="[1, 72, 56, 56]", style=solid]; +"29 Transpose_5273" -> "31 Transpose_5273/fq_output_0" [label="[1, 16, 1, 1]", style=solid]; +"30 Transpose_5327/fq_output_0" -> "32 Multiply_9223" [label="[1, 72, 56, 56]", style=solid]; +"31 Transpose_5273/fq_output_0" -> "14 Transpose_5277" [label="[1, 16, 1, 1]", style=solid]; +"32 Multiply_9223" -> "33 Transpose_5378" [label="[1, 72, 28, 28]", style=solid]; +"33 Transpose_5378" -> "34 Transpose_5380" [label="[1, 72, 28, 28]", style=solid]; +"34 Transpose_5380" -> "35 Transpose_5380/fq_output_0" [label="[1, 72, 28, 28]", style=solid]; +"35 Transpose_5380/fq_output_0" -> "36 Multiply_9237" [label="[1, 72, 28, 28]", style=solid]; +"36 Multiply_9237" -> "37 Transpose_5404" [label="[1, 24, 28, 28]", style=solid]; +"37 Transpose_5404" -> "38 Transpose_5404/fq_output_0" [label="[1, 24, 28, 28]", style=solid]; +"38 Transpose_5404/fq_output_0" -> "39 Multiply_9251" [label="[1, 24, 28, 28]", style=solid]; +"38 Transpose_5404/fq_output_0" -> "40 Transpose_5484" [label="[1, 24, 28, 28]", style=solid]; +"39 Multiply_9251" -> "41 Transpose_5428" [label="[1, 88, 28, 28]", style=solid]; +"40 Transpose_5484" -> "42 Transpose_5484/fq_output_0" [label="[1, 24, 28, 28]", style=solid]; +"41 Transpose_5428" -> "43 Transpose_5430" [label="[1, 88, 28, 28]", style=solid]; +"42 Transpose_5484/fq_output_0" -> "44 Multiply_9293" [label="[1, 24, 28, 28]", style=solid]; +"43 Transpose_5430" -> "45 Transpose_5430/fq_output_0" [label="[1, 88, 28, 28]", style=solid]; +"44 Multiply_9293" -> "46 Transpose_5508" [label="[1, 96, 28, 28]", style=solid]; +"45 Transpose_5430/fq_output_0" -> "47 Multiply_9265" [label="[1, 88, 28, 28]", style=solid]; +"46 Transpose_5508" -> "48 Transpose_5526" [label="[1, 96, 28, 28]", style=solid]; +"47 Multiply_9265" -> "49 Transpose_5454" [label="[1, 88, 28, 28]", style=solid]; +"48 Transpose_5526" -> "50 Transpose_5526/fq_output_0" [label="[1, 96, 28, 28]", style=solid]; +"49 Transpose_5454" -> "51 Transpose_5456" [label="[1, 88, 28, 28]", style=solid]; +"50 Transpose_5526/fq_output_0" -> "52 Multiply_9307" [label="[1, 96, 28, 28]", style=solid]; +"51 Transpose_5456" -> "53 Transpose_5456/fq_output_0" [label="[1, 88, 28, 28]", style=solid]; +"52 Multiply_9307" -> "54 Transpose_5577" [label="[1, 96, 14, 14]", style=solid]; +"53 Transpose_5456/fq_output_0" -> "55 Multiply_9279" [label="[1, 88, 28, 28]", style=solid]; +"54 Transpose_5577" -> "56 Transpose_5595" [label="[1, 96, 14, 14]", style=solid]; +"55 Multiply_9279" -> "57 Transpose_5480" [label="[1, 24, 28, 28]", style=solid]; +"56 Transpose_5595" -> "58 Transpose_5595/fq_output_0" [label="[1, 96, 14, 14]", style=solid]; +"57 Transpose_5480" -> "59 Transpose_5480/fq_output_0" [label="[1, 24, 28, 28]", style=solid]; +"58 Transpose_5595/fq_output_0" -> "60 Transpose_5599" [label="[1, 96, 14, 14]", style=solid]; +"58 Transpose_5595/fq_output_0" -> "61 Transpose_5631" [label="[1, 96, 14, 14]", style=solid]; +"59 Transpose_5480/fq_output_0" -> "40 Transpose_5484" [label="[1, 24, 28, 28]", style=solid]; +"60 Transpose_5599" -> "62 Transpose_5599/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; +"61 Transpose_5631" -> "63 Transpose_5631/fq_output_0" [label="[1, 96, 14, 14]", style=solid]; +"62 Transpose_5599/fq_output_0" -> "64 Convolution_1132" [label="[1, 96, 1, 1]", style=solid]; +"63 Transpose_5631/fq_output_0" -> "65 Multiply_9321" [label="[1, 96, 14, 14]", style=solid]; +"64 Convolution_1132" -> "66 Transpose_5605" [label="[1, 24, 1, 1]", style=solid]; +"65 Multiply_9321" -> "67 Transpose_5655" [label="[1, 40, 14, 14]", style=solid]; +"66 Transpose_5605" -> "68 Transpose_5607" [label="[1, 24, 1, 1]", style=solid]; +"67 Transpose_5655" -> "69 Transpose_5655/fq_output_0" [label="[1, 40, 14, 14]", style=solid]; +"68 Transpose_5607" -> "70 Transpose_5607/fq_output_0" [label="[1, 24, 1, 1]", style=solid]; +"69 Transpose_5655/fq_output_0" -> "71 Multiply_9335" [label="[1, 40, 14, 14]", style=solid]; +"69 Transpose_5655/fq_output_0" -> "72 Transpose_5803" [label="[1, 40, 14, 14]", style=solid]; +"70 Transpose_5607/fq_output_0" -> "73 Convolution_1141" [label="[1, 24, 1, 1]", style=solid]; +"71 Multiply_9335" -> "74 Transpose_5679" [label="[1, 240, 14, 14]", style=solid]; +"72 Transpose_5803" -> "75 Transpose_5803/fq_output_0" [label="[1, 40, 14, 14]", style=solid]; +"73 Convolution_1141" -> "76 Transpose_5613" [label="[1, 96, 1, 1]", style=solid]; +"74 Transpose_5679" -> "77 Transpose_5697" [label="[1, 240, 14, 14]", style=solid]; +"75 Transpose_5803/fq_output_0" -> "78 Multiply_9377" [label="[1, 40, 14, 14]", style=solid]; +"75 Transpose_5803/fq_output_0" -> "79 Transpose_5951" [label="[1, 40, 14, 14]", style=solid]; +"76 Transpose_5613" -> "80 Transpose_5627" [label="[1, 96, 1, 1]", style=solid]; +"77 Transpose_5697" -> "81 Transpose_5697/fq_output_0" [label="[1, 240, 14, 14]", style=solid]; +"78 Multiply_9377" -> "82 Transpose_5827" [label="[1, 240, 14, 14]", style=solid]; +"79 Transpose_5951" -> "83 Transpose_5951/fq_output_0" [label="[1, 40, 14, 14]", style=solid]; +"80 Transpose_5627" -> "84 Transpose_5627/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; +"81 Transpose_5697/fq_output_0" -> "85 Multiply_9349" [label="[1, 240, 14, 14]", style=solid]; +"82 Transpose_5827" -> "86 Transpose_5845" [label="[1, 240, 14, 14]", style=solid]; +"83 Transpose_5951/fq_output_0" -> "87 Multiply_9419" [label="[1, 40, 14, 14]", style=solid]; +"84 Transpose_5627/fq_output_0" -> "61 Transpose_5631" [label="[1, 96, 1, 1]", style=solid]; +"85 Multiply_9349" -> "88 Transpose_5721" [label="[1, 240, 14, 14]", style=solid]; +"86 Transpose_5845" -> "89 Transpose_5845/fq_output_0" [label="[1, 240, 14, 14]", style=solid]; +"87 Multiply_9419" -> "90 Transpose_5975" [label="[1, 120, 14, 14]", style=solid]; +"88 Transpose_5721" -> "91 Transpose_5739" [label="[1, 240, 14, 14]", style=solid]; +"89 Transpose_5845/fq_output_0" -> "92 Multiply_9391" [label="[1, 240, 14, 14]", style=solid]; +"90 Transpose_5975" -> "93 Transpose_5993" [label="[1, 120, 14, 14]", style=solid]; +"91 Transpose_5739" -> "94 Transpose_5739/fq_output_0" [label="[1, 240, 14, 14]", style=solid]; +"92 Multiply_9391" -> "95 Transpose_5869" [label="[1, 240, 14, 14]", style=solid]; +"93 Transpose_5993" -> "96 Transpose_5993/fq_output_0" [label="[1, 120, 14, 14]", style=solid]; +"94 Transpose_5739/fq_output_0" -> "97 Transpose_5743" [label="[1, 240, 14, 14]", style=solid]; +"94 Transpose_5739/fq_output_0" -> "98 Transpose_5775" [label="[1, 240, 14, 14]", style=solid]; +"95 Transpose_5869" -> "99 Transpose_5887" [label="[1, 240, 14, 14]", style=solid]; +"96 Transpose_5993/fq_output_0" -> "100 Multiply_9433" [label="[1, 120, 14, 14]", style=solid]; +"97 Transpose_5743" -> "101 Transpose_5743/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; +"98 Transpose_5775" -> "102 Transpose_5775/fq_output_0" [label="[1, 240, 14, 14]", style=solid]; +"99 Transpose_5887" -> "103 Transpose_5887/fq_output_0" [label="[1, 240, 14, 14]", style=solid]; +"100 Multiply_9433" -> "104 Transpose_6017" [label="[1, 120, 14, 14]", style=solid]; +"101 Transpose_5743/fq_output_0" -> "105 Convolution_1242" [label="[1, 240, 1, 1]", style=solid]; +"102 Transpose_5775/fq_output_0" -> "106 Multiply_9363" [label="[1, 240, 14, 14]", style=solid]; +"103 Transpose_5887/fq_output_0" -> "107 Transpose_5891" [label="[1, 240, 14, 14]", style=solid]; +"103 Transpose_5887/fq_output_0" -> "108 Transpose_5923" [label="[1, 240, 14, 14]", style=solid]; +"104 Transpose_6017" -> "109 Transpose_6035" [label="[1, 120, 14, 14]", style=solid]; +"105 Convolution_1242" -> "110 Transpose_5749" [label="[1, 64, 1, 1]", style=solid]; +"106 Multiply_9363" -> "111 Transpose_5799" [label="[1, 40, 14, 14]", style=solid]; +"107 Transpose_5891" -> "112 Transpose_5891/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; +"108 Transpose_5923" -> "113 Transpose_5923/fq_output_0" [label="[1, 240, 14, 14]", style=solid]; +"109 Transpose_6035" -> "114 Transpose_6035/fq_output_0" [label="[1, 120, 14, 14]", style=solid]; +"110 Transpose_5749" -> "115 Transpose_5751" [label="[1, 64, 1, 1]", style=solid]; +"111 Transpose_5799" -> "116 Transpose_5799/fq_output_0" [label="[1, 40, 14, 14]", style=solid]; +"112 Transpose_5891/fq_output_0" -> "117 Convolution_1353" [label="[1, 240, 1, 1]", style=solid]; +"113 Transpose_5923/fq_output_0" -> "118 Multiply_9405" [label="[1, 240, 14, 14]", style=solid]; +"114 Transpose_6035/fq_output_0" -> "119 Transpose_6039" [label="[1, 120, 14, 14]", style=solid]; +"114 Transpose_6035/fq_output_0" -> "120 Transpose_6071" [label="[1, 120, 14, 14]", style=solid]; +"115 Transpose_5751" -> "121 Transpose_5751/fq_output_0" [label="[1, 64, 1, 1]", style=solid]; +"116 Transpose_5799/fq_output_0" -> "72 Transpose_5803" [label="[1, 40, 14, 14]", style=solid]; +"117 Convolution_1353" -> "122 Transpose_5897" [label="[1, 64, 1, 1]", style=solid]; +"118 Multiply_9405" -> "123 Transpose_5947" [label="[1, 40, 14, 14]", style=solid]; +"119 Transpose_6039" -> "124 Transpose_6039/fq_output_0" [label="[1, 120, 1, 1]", style=solid]; +"120 Transpose_6071" -> "125 Transpose_6071/fq_output_0" [label="[1, 120, 14, 14]", style=solid]; +"121 Transpose_5751/fq_output_0" -> "126 Convolution_1251" [label="[1, 64, 1, 1]", style=solid]; +"122 Transpose_5897" -> "127 Transpose_5899" [label="[1, 64, 1, 1]", style=solid]; +"123 Transpose_5947" -> "128 Transpose_5947/fq_output_0" [label="[1, 40, 14, 14]", style=solid]; +"124 Transpose_6039/fq_output_0" -> "129 Convolution_1464" [label="[1, 120, 1, 1]", style=solid]; +"125 Transpose_6071/fq_output_0" -> "130 Multiply_9447" [label="[1, 120, 14, 14]", style=solid]; +"126 Convolution_1251" -> "131 Transpose_5757" [label="[1, 240, 1, 1]", style=solid]; +"127 Transpose_5899" -> "132 Transpose_5899/fq_output_0" [label="[1, 64, 1, 1]", style=solid]; +"128 Transpose_5947/fq_output_0" -> "79 Transpose_5951" [label="[1, 40, 14, 14]", style=solid]; +"129 Convolution_1464" -> "133 Transpose_6045" [label="[1, 32, 1, 1]", style=solid]; +"130 Multiply_9447" -> "134 Transpose_6095" [label="[1, 48, 14, 14]", style=solid]; +"131 Transpose_5757" -> "135 Transpose_5771" [label="[1, 240, 1, 1]", style=solid]; +"132 Transpose_5899/fq_output_0" -> "136 Convolution_1362" [label="[1, 64, 1, 1]", style=solid]; +"133 Transpose_6045" -> "137 Transpose_6047" [label="[1, 32, 1, 1]", style=solid]; +"134 Transpose_6095" -> "138 Transpose_6095/fq_output_0" [label="[1, 48, 14, 14]", style=solid]; +"135 Transpose_5771" -> "139 Transpose_5771/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; +"136 Convolution_1362" -> "140 Transpose_5905" [label="[1, 240, 1, 1]", style=solid]; +"137 Transpose_6047" -> "141 Transpose_6047/fq_output_0" [label="[1, 32, 1, 1]", style=solid]; +"138 Transpose_6095/fq_output_0" -> "142 Multiply_9461" [label="[1, 48, 14, 14]", style=solid]; +"138 Transpose_6095/fq_output_0" -> "143 Transpose_6243" [label="[1, 48, 14, 14]", style=solid]; +"139 Transpose_5771/fq_output_0" -> "98 Transpose_5775" [label="[1, 240, 1, 1]", style=solid]; +"140 Transpose_5905" -> "144 Transpose_5919" [label="[1, 240, 1, 1]", style=solid]; +"141 Transpose_6047/fq_output_0" -> "145 Convolution_1473" [label="[1, 32, 1, 1]", style=solid]; +"142 Multiply_9461" -> "146 Transpose_6119" [label="[1, 144, 14, 14]", style=solid]; +"143 Transpose_6243" -> "147 Transpose_6243/fq_output_0" [label="[1, 48, 14, 14]", style=solid]; +"144 Transpose_5919" -> "148 Transpose_5919/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; +"145 Convolution_1473" -> "149 Transpose_6053" [label="[1, 120, 1, 1]", style=solid]; +"146 Transpose_6119" -> "150 Transpose_6137" [label="[1, 144, 14, 14]", style=solid]; +"147 Transpose_6243/fq_output_0" -> "151 Multiply_9503" [label="[1, 48, 14, 14]", style=solid]; +"148 Transpose_5919/fq_output_0" -> "108 Transpose_5923" [label="[1, 240, 1, 1]", style=solid]; +"149 Transpose_6053" -> "152 Transpose_6067" [label="[1, 120, 1, 1]", style=solid]; +"150 Transpose_6137" -> "153 Transpose_6137/fq_output_0" [label="[1, 144, 14, 14]", style=solid]; +"151 Multiply_9503" -> "154 Transpose_6267" [label="[1, 288, 14, 14]", style=solid]; +"152 Transpose_6067" -> "155 Transpose_6067/fq_output_0" [label="[1, 120, 1, 1]", style=solid]; +"153 Transpose_6137/fq_output_0" -> "156 Multiply_9475" [label="[1, 144, 14, 14]", style=solid]; +"154 Transpose_6267" -> "157 Transpose_6285" [label="[1, 288, 14, 14]", style=solid]; +"155 Transpose_6067/fq_output_0" -> "120 Transpose_6071" [label="[1, 120, 1, 1]", style=solid]; +"156 Multiply_9475" -> "158 Transpose_6161" [label="[1, 144, 14, 14]", style=solid]; +"157 Transpose_6285" -> "159 Transpose_6285/fq_output_0" [label="[1, 288, 14, 14]", style=solid]; +"158 Transpose_6161" -> "160 Transpose_6179" [label="[1, 144, 14, 14]", style=solid]; +"159 Transpose_6285/fq_output_0" -> "161 Multiply_9517" [label="[1, 288, 14, 14]", style=solid]; +"160 Transpose_6179" -> "162 Transpose_6179/fq_output_0" [label="[1, 144, 14, 14]", style=solid]; +"161 Multiply_9517" -> "163 Transpose_6336" [label="[1, 288, 7, 7]", style=solid]; +"162 Transpose_6179/fq_output_0" -> "164 Transpose_6183" [label="[1, 144, 14, 14]", style=solid]; +"162 Transpose_6179/fq_output_0" -> "165 Transpose_6215" [label="[1, 144, 14, 14]", style=solid]; +"163 Transpose_6336" -> "166 Transpose_6354" [label="[1, 288, 7, 7]", style=solid]; +"164 Transpose_6183" -> "167 Transpose_6183/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; +"165 Transpose_6215" -> "168 Transpose_6215/fq_output_0" [label="[1, 144, 14, 14]", style=solid]; +"166 Transpose_6354" -> "169 Transpose_6354/fq_output_0" [label="[1, 288, 7, 7]", style=solid]; +"167 Transpose_6183/fq_output_0" -> "170 Convolution_1574" [label="[1, 144, 1, 1]", style=solid]; +"168 Transpose_6215/fq_output_0" -> "171 Multiply_9489" [label="[1, 144, 14, 14]", style=solid]; +"169 Transpose_6354/fq_output_0" -> "172 Transpose_6358" [label="[1, 288, 7, 7]", style=solid]; +"169 Transpose_6354/fq_output_0" -> "173 Transpose_6390" [label="[1, 288, 7, 7]", style=solid]; +"170 Convolution_1574" -> "174 Transpose_6189" [label="[1, 40, 1, 1]", style=solid]; +"171 Multiply_9489" -> "175 Transpose_6239" [label="[1, 48, 14, 14]", style=solid]; +"172 Transpose_6358" -> "176 Transpose_6358/fq_output_0" [label="[1, 288, 1, 1]", style=solid]; +"173 Transpose_6390" -> "177 Transpose_6390/fq_output_0" [label="[1, 288, 7, 7]", style=solid]; +"174 Transpose_6189" -> "178 Transpose_6191" [label="[1, 40, 1, 1]", style=solid]; +"175 Transpose_6239" -> "179 Transpose_6239/fq_output_0" [label="[1, 48, 14, 14]", style=solid]; +"176 Transpose_6358/fq_output_0" -> "180 Convolution_1713" [label="[1, 288, 1, 1]", style=solid]; +"177 Transpose_6390/fq_output_0" -> "181 Multiply_9531" [label="[1, 288, 7, 7]", style=solid]; +"178 Transpose_6191" -> "182 Transpose_6191/fq_output_0" [label="[1, 40, 1, 1]", style=solid]; +"179 Transpose_6239/fq_output_0" -> "143 Transpose_6243" [label="[1, 48, 14, 14]", style=solid]; +"180 Convolution_1713" -> "183 Transpose_6364" [label="[1, 72, 1, 1]", style=solid]; +"181 Multiply_9531" -> "184 Transpose_6414" [label="[1, 96, 7, 7]", style=solid]; +"182 Transpose_6191/fq_output_0" -> "185 Convolution_1583" [label="[1, 40, 1, 1]", style=solid]; +"183 Transpose_6364" -> "186 Transpose_6366" [label="[1, 72, 1, 1]", style=solid]; +"184 Transpose_6414" -> "187 Transpose_6414/fq_output_0" [label="[1, 96, 7, 7]", style=solid]; +"185 Convolution_1583" -> "188 Transpose_6197" [label="[1, 144, 1, 1]", style=solid]; +"186 Transpose_6366" -> "189 Transpose_6366/fq_output_0" [label="[1, 72, 1, 1]", style=solid]; +"187 Transpose_6414/fq_output_0" -> "190 Multiply_9545" [label="[1, 96, 7, 7]", style=solid]; +"187 Transpose_6414/fq_output_0" -> "191 Transpose_6562" [label="[1, 96, 7, 7]", style=solid]; +"188 Transpose_6197" -> "192 Transpose_6211" [label="[1, 144, 1, 1]", style=solid]; +"189 Transpose_6366/fq_output_0" -> "193 Convolution_1722" [label="[1, 72, 1, 1]", style=solid]; +"190 Multiply_9545" -> "194 Transpose_6438" [label="[1, 576, 7, 7]", style=solid]; +"191 Transpose_6562" -> "195 Transpose_6562/fq_output_0" [label="[1, 96, 7, 7]", style=solid]; +"192 Transpose_6211" -> "196 Transpose_6211/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; +"193 Convolution_1722" -> "197 Transpose_6372" [label="[1, 288, 1, 1]", style=solid]; +"194 Transpose_6438" -> "198 Transpose_6456" [label="[1, 576, 7, 7]", style=solid]; +"195 Transpose_6562/fq_output_0" -> "199 Multiply_9587" [label="[1, 96, 7, 7]", style=solid]; +"195 Transpose_6562/fq_output_0" -> "200 Transpose_6710" [label="[1, 96, 7, 7]", style=solid]; +"196 Transpose_6211/fq_output_0" -> "165 Transpose_6215" [label="[1, 144, 1, 1]", style=solid]; +"197 Transpose_6372" -> "201 Transpose_6386" [label="[1, 288, 1, 1]", style=solid]; +"198 Transpose_6456" -> "202 Transpose_6456/fq_output_0" [label="[1, 576, 7, 7]", style=solid]; +"199 Multiply_9587" -> "203 Transpose_6586" [label="[1, 576, 7, 7]", style=solid]; +"200 Transpose_6710" -> "204 Transpose_6710/fq_output_0" [label="[1, 96, 7, 7]", style=solid]; +"201 Transpose_6386" -> "205 Transpose_6386/fq_output_0" [label="[1, 288, 1, 1]", style=solid]; +"202 Transpose_6456/fq_output_0" -> "206 Multiply_9559" [label="[1, 576, 7, 7]", style=solid]; +"203 Transpose_6586" -> "207 Transpose_6604" [label="[1, 576, 7, 7]", style=solid]; +"204 Transpose_6710/fq_output_0" -> "208 Multiply_9629" [label="[1, 96, 7, 7]", style=solid]; +"205 Transpose_6386/fq_output_0" -> "173 Transpose_6390" [label="[1, 288, 1, 1]", style=solid]; +"206 Multiply_9559" -> "209 Transpose_6480" [label="[1, 576, 7, 7]", style=solid]; +"207 Transpose_6604" -> "210 Transpose_6604/fq_output_0" [label="[1, 576, 7, 7]", style=solid]; +"208 Multiply_9629" -> "211 Transpose_6734" [label="[1, 576, 7, 7]", style=solid]; +"209 Transpose_6480" -> "212 Transpose_6498" [label="[1, 576, 7, 7]", style=solid]; +"210 Transpose_6604/fq_output_0" -> "213 Multiply_9601" [label="[1, 576, 7, 7]", style=solid]; +"211 Transpose_6734" -> "214 Transpose_6752" [label="[1, 576, 7, 7]", style=solid]; +"212 Transpose_6498" -> "215 Transpose_6498/fq_output_0" [label="[1, 576, 7, 7]", style=solid]; +"213 Multiply_9601" -> "216 Transpose_6628" [label="[1, 576, 7, 7]", style=solid]; +"214 Transpose_6752" -> "217 Transpose_6752/fq_output_0" [label="[1, 576, 7, 7]", style=solid]; +"215 Transpose_6498/fq_output_0" -> "218 Transpose_6502" [label="[1, 576, 7, 7]", style=solid]; +"215 Transpose_6498/fq_output_0" -> "219 Transpose_6534" [label="[1, 576, 7, 7]", style=solid]; +"216 Transpose_6628" -> "220 Transpose_6646" [label="[1, 576, 7, 7]", style=solid]; +"217 Transpose_6752/fq_output_0" -> "221 Transpose_6756" [label="[1, 576, 7, 7]", style=solid]; +"218 Transpose_6502" -> "222 Transpose_6502/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"219 Transpose_6534" -> "223 Transpose_6534/fq_output_0" [label="[1, 576, 7, 7]", style=solid]; +"220 Transpose_6646" -> "224 Transpose_6646/fq_output_0" [label="[1, 576, 7, 7]", style=solid]; +"221 Transpose_6756" -> "225 Transpose_6756/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"222 Transpose_6502/fq_output_0" -> "226 Convolution_1823" [label="[1, 576, 1, 1]", style=solid]; +"223 Transpose_6534/fq_output_0" -> "227 Multiply_9573" [label="[1, 576, 7, 7]", style=solid]; +"224 Transpose_6646/fq_output_0" -> "228 Transpose_6650" [label="[1, 576, 7, 7]", style=solid]; +"224 Transpose_6646/fq_output_0" -> "229 Transpose_6682" [label="[1, 576, 7, 7]", style=solid]; +"225 Transpose_6756/fq_output_0" -> "230 Convolution_2013" [label="[1, 576, 1, 1]", style=solid]; +"226 Convolution_1823" -> "231 Transpose_6508" [label="[1, 144, 1, 1]", style=solid]; +"227 Multiply_9573" -> "232 Transpose_6558" [label="[1, 96, 7, 7]", style=solid]; +"228 Transpose_6650" -> "233 Transpose_6650/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"229 Transpose_6682" -> "234 Transpose_6682/fq_output_0" [label="[1, 576, 7, 7]", style=solid]; +"230 Convolution_2013" -> "235 Transpose_6762" [label="[1, 1024, 1, 1]", style=solid]; +"231 Transpose_6508" -> "236 Transpose_6510" [label="[1, 144, 1, 1]", style=solid]; +"232 Transpose_6558" -> "237 Transpose_6558/fq_output_0" [label="[1, 96, 7, 7]", style=solid]; +"233 Transpose_6650/fq_output_0" -> "238 Convolution_1934" [label="[1, 576, 1, 1]", style=solid]; +"234 Transpose_6682/fq_output_0" -> "239 Multiply_9615" [label="[1, 576, 7, 7]", style=solid]; +"235 Transpose_6762" -> "240 Transpose_6780" [label="[1, 1024, 1, 1]", style=solid]; +"236 Transpose_6510" -> "241 Transpose_6510/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; +"237 Transpose_6558/fq_output_0" -> "191 Transpose_6562" [label="[1, 96, 7, 7]", style=solid]; +"238 Convolution_1934" -> "242 Transpose_6656" [label="[1, 144, 1, 1]", style=solid]; +"239 Multiply_9615" -> "243 Transpose_6706" [label="[1, 96, 7, 7]", style=solid]; +"240 Transpose_6780" -> "244 Transpose_6780/fq_output_0" [label="[1, 1024, 1, 1]", style=solid]; +"241 Transpose_6510/fq_output_0" -> "245 Convolution_1832" [label="[1, 144, 1, 1]", style=solid]; +"242 Transpose_6656" -> "246 Transpose_6658" [label="[1, 144, 1, 1]", style=solid]; +"243 Transpose_6706" -> "247 Transpose_6706/fq_output_0" [label="[1, 96, 7, 7]", style=solid]; +"244 Transpose_6780/fq_output_0" -> "248 Convolution_2025" [label="[1, 1024, 1, 1]", style=solid]; +"245 Convolution_1832" -> "249 Transpose_6516" [label="[1, 576, 1, 1]", style=solid]; +"246 Transpose_6658" -> "250 Transpose_6658/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; +"247 Transpose_6706/fq_output_0" -> "200 Transpose_6710" [label="[1, 96, 7, 7]", style=solid]; +"248 Convolution_2025" -> "251 Transpose_6786" [label="[1, 1000, 1, 1]", style=solid]; +"249 Transpose_6516" -> "252 Transpose_6530" [label="[1, 576, 1, 1]", style=solid]; +"250 Transpose_6658/fq_output_0" -> "253 Convolution_1943" [label="[1, 144, 1, 1]", style=solid]; +"251 Transpose_6786" -> "254 MobilenetV3small/Logits/BiasAdd" [label="[1, 1000, 1, 1]", style=solid]; +"252 Transpose_6530" -> "255 Transpose_6530/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"253 Convolution_1943" -> "256 Transpose_6664" [label="[1, 576, 1, 1]", style=solid]; +"254 MobilenetV3small/Logits/BiasAdd" -> "257 MobilenetV3small/flatten/Reshape" [label="[1, 1, 1, 1000]", style=solid]; +"255 Transpose_6530/fq_output_0" -> "219 Transpose_6534" [label="[1, 576, 1, 1]", style=solid]; +"256 Transpose_6664" -> "258 Transpose_6678" [label="[1, 576, 1, 1]", style=solid]; +"257 MobilenetV3small/flatten/Reshape" -> "259 MobilenetV3small/Predictions/Softmax" [label="[1, 1000]", style=solid]; +"258 Transpose_6678" -> "260 Transpose_6678/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"259 MobilenetV3small/Predictions/Softmax" -> "261 Predictions" [label="[1, 1000]", style=solid]; +"260 Transpose_6678/fq_output_0" -> "229 Transpose_6682" [label="[1, 576, 1, 1]", style=solid]; +"262 MobilenetV3small/flatten/Const" -> "257 MobilenetV3small/flatten/Reshape" [label="[2]", style=dashed]; +"263 Constant_8887" -> "254 MobilenetV3small/Logits/BiasAdd" [label="[4]", style=dashed]; +"264 Transpose_6784" -> "251 Transpose_6786" [label="[1, 1000, 1, 1]", style=solid]; +"265 Convolution_2025/fq_weights_1" -> "248 Convolution_2025" [label="[1000, 1024, 1, 1]", style=solid]; +"266 Constant_13812" -> "265 Convolution_2025/fq_weights_1" [label="[1000, 1, 1, 1]", style=solid]; +"267 Constant_13811" -> "265 Convolution_2025/fq_weights_1" [label="[1000, 1, 1, 1]", style=solid]; +"268 Constant_13810" -> "265 Convolution_2025/fq_weights_1" [label="[1000, 1, 1, 1]", style=solid]; +"269 Constant_13809" -> "265 Convolution_2025/fq_weights_1" [label="[1000, 1, 1, 1]", style=solid]; +"270 Transpose_2024" -> "265 Convolution_2025/fq_weights_1" [label="[1000, 1024, 1, 1]", style=solid]; +"271 Constant_13807" -> "244 Transpose_6780/fq_output_0" [label="[]", style=solid]; +"272 Constant_13806" -> "244 Transpose_6780/fq_output_0" [label="[]", style=solid]; +"273 Constant_13805" -> "244 Transpose_6780/fq_output_0" [label="[]", style=solid]; +"274 Constant_13804" -> "244 Transpose_6780/fq_output_0" [label="[]", style=solid]; +"275 Transpose_6760" -> "235 Transpose_6762" [label="[1, 1024, 1, 1]", style=solid]; +"276 Convolution_2013/fq_weights_1" -> "230 Convolution_2013" [label="[1024, 576, 1, 1]", style=solid]; +"277 Constant_13802" -> "276 Convolution_2013/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"278 Constant_13801" -> "276 Convolution_2013/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"279 Constant_13800" -> "276 Convolution_2013/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"280 Constant_13799" -> "276 Convolution_2013/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"281 Transpose_2012" -> "276 Convolution_2013/fq_weights_1" [label="[1024, 576, 1, 1]", style=solid]; +"282 Constant_13797" -> "225 Transpose_6756/fq_output_0" [label="[]", style=solid]; +"283 Constant_13796" -> "225 Transpose_6756/fq_output_0" [label="[]", style=solid]; +"284 Constant_13795" -> "225 Transpose_6756/fq_output_0" [label="[]", style=solid]; +"285 Constant_13794" -> "225 Transpose_6756/fq_output_0" [label="[]", style=solid]; +"286 Constant_6754" -> "221 Transpose_6756" [label="[2]", style=dashed]; +"287 Constant_13792" -> "217 Transpose_6752/fq_output_0" [label="[]", style=solid]; +"288 Constant_13791" -> "217 Transpose_6752/fq_output_0" [label="[]", style=solid]; +"289 Constant_13790" -> "217 Transpose_6752/fq_output_0" [label="[]", style=solid]; +"290 Constant_13789" -> "217 Transpose_6752/fq_output_0" [label="[]", style=solid]; +"291 Constant_9637" -> "211 Transpose_6734" [label="[1, 576, 1, 1]", style=solid]; +"292 Multiply_9629/fq_weights_1" -> "208 Multiply_9629" [label="[576, 96, 1, 1]", style=solid]; +"293 Constant_13787" -> "292 Multiply_9629/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"294 Constant_13786" -> "292 Multiply_9629/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"295 Constant_13785" -> "292 Multiply_9629/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"296 Constant_13784" -> "292 Multiply_9629/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"297 Multiply_9830" -> "292 Multiply_9629/fq_weights_1" [label="[576, 96, 1, 1]", style=solid]; +"298 Constant_13782" -> "204 Transpose_6710/fq_output_0" [label="[]", style=solid]; +"299 Constant_13781" -> "204 Transpose_6710/fq_output_0" [label="[]", style=solid]; +"300 Constant_13780" -> "204 Transpose_6710/fq_output_0" [label="[]", style=solid]; +"301 Constant_13779" -> "204 Transpose_6710/fq_output_0" [label="[]", style=solid]; +"302 Constant_13777" -> "247 Transpose_6706/fq_output_0" [label="[]", style=solid]; +"303 Constant_13776" -> "247 Transpose_6706/fq_output_0" [label="[]", style=solid]; +"304 Constant_13775" -> "247 Transpose_6706/fq_output_0" [label="[]", style=solid]; +"305 Constant_13774" -> "247 Transpose_6706/fq_output_0" [label="[]", style=solid]; +"306 Constant_9623" -> "243 Transpose_6706" [label="[1, 96, 1, 1]", style=solid]; +"307 Multiply_9615/fq_weights_1" -> "239 Multiply_9615" [label="[96, 576, 1, 1]", style=solid]; +"308 Constant_13772" -> "307 Multiply_9615/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"309 Constant_13771" -> "307 Multiply_9615/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"310 Constant_13770" -> "307 Multiply_9615/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"311 Constant_13769" -> "307 Multiply_9615/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"312 Multiply_9824" -> "307 Multiply_9615/fq_weights_1" [label="[96, 576, 1, 1]", style=solid]; +"313 Constant_13767" -> "234 Transpose_6682/fq_output_0" [label="[]", style=solid]; +"314 Constant_13766" -> "234 Transpose_6682/fq_output_0" [label="[]", style=solid]; +"315 Constant_13765" -> "234 Transpose_6682/fq_output_0" [label="[]", style=solid]; +"316 Constant_13764" -> "234 Transpose_6682/fq_output_0" [label="[]", style=solid]; +"317 Constant_13762" -> "260 Transpose_6678/fq_output_0" [label="[]", style=solid]; +"318 Constant_13761" -> "260 Transpose_6678/fq_output_0" [label="[]", style=solid]; +"319 Constant_13760" -> "260 Transpose_6678/fq_output_0" [label="[]", style=solid]; +"320 Constant_13759" -> "260 Transpose_6678/fq_output_0" [label="[]", style=solid]; +"321 Transpose_6662" -> "256 Transpose_6664" [label="[1, 576, 1, 1]", style=solid]; +"322 Convolution_1943/fq_weights_1" -> "253 Convolution_1943" [label="[576, 144, 1, 1]", style=solid]; +"323 Constant_13757" -> "322 Convolution_1943/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"324 Constant_13756" -> "322 Convolution_1943/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"325 Constant_13755" -> "322 Convolution_1943/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"326 Constant_13754" -> "322 Convolution_1943/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"327 Transpose_1942" -> "322 Convolution_1943/fq_weights_1" [label="[576, 144, 1, 1]", style=solid]; +"328 Constant_13752" -> "250 Transpose_6658/fq_output_0" [label="[]", style=solid]; +"329 Constant_13751" -> "250 Transpose_6658/fq_output_0" [label="[]", style=solid]; +"330 Constant_13750" -> "250 Transpose_6658/fq_output_0" [label="[]", style=solid]; +"331 Constant_13749" -> "250 Transpose_6658/fq_output_0" [label="[]", style=solid]; +"332 Transpose_6654" -> "242 Transpose_6656" [label="[1, 144, 1, 1]", style=solid]; +"333 Convolution_1934/fq_weights_1" -> "238 Convolution_1934" [label="[144, 576, 1, 1]", style=solid]; +"334 Constant_13747" -> "333 Convolution_1934/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"335 Constant_13746" -> "333 Convolution_1934/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"336 Constant_13745" -> "333 Convolution_1934/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"337 Constant_13744" -> "333 Convolution_1934/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"338 Transpose_1933" -> "333 Convolution_1934/fq_weights_1" [label="[144, 576, 1, 1]", style=solid]; +"339 Constant_13742" -> "233 Transpose_6650/fq_output_0" [label="[]", style=solid]; +"340 Constant_13741" -> "233 Transpose_6650/fq_output_0" [label="[]", style=solid]; +"341 Constant_13740" -> "233 Transpose_6650/fq_output_0" [label="[]", style=solid]; +"342 Constant_13739" -> "233 Transpose_6650/fq_output_0" [label="[]", style=solid]; +"343 Constant_6648" -> "228 Transpose_6650" [label="[2]", style=dashed]; +"344 Constant_13737" -> "224 Transpose_6646/fq_output_0" [label="[]", style=solid]; +"345 Constant_13736" -> "224 Transpose_6646/fq_output_0" [label="[]", style=solid]; +"346 Constant_13735" -> "224 Transpose_6646/fq_output_0" [label="[]", style=solid]; +"347 Constant_13734" -> "224 Transpose_6646/fq_output_0" [label="[]", style=solid]; +"348 Constant_9609" -> "216 Transpose_6628" [label="[1, 576, 1, 1]", style=solid]; +"349 Multiply_9601/fq_weights_1" -> "213 Multiply_9601" [label="[576, 1, 1, 5, 5]", style=solid]; +"350 Constant_13732" -> "349 Multiply_9601/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"351 Constant_13731" -> "349 Multiply_9601/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"352 Constant_13730" -> "349 Multiply_9601/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"353 Constant_13729" -> "349 Multiply_9601/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"354 Multiply_9819" -> "349 Multiply_9601/fq_weights_1" [label="[576, 1, 1, 5, 5]", style=solid]; +"355 Constant_13727" -> "210 Transpose_6604/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"356 Constant_13726" -> "210 Transpose_6604/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"357 Constant_13725" -> "210 Transpose_6604/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"358 Constant_13724" -> "210 Transpose_6604/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"359 Constant_9595" -> "203 Transpose_6586" [label="[1, 576, 1, 1]", style=solid]; +"360 Multiply_9587/fq_weights_1" -> "199 Multiply_9587" [label="[576, 96, 1, 1]", style=solid]; +"361 Constant_13722" -> "360 Multiply_9587/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"362 Constant_13721" -> "360 Multiply_9587/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"363 Constant_13720" -> "360 Multiply_9587/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"364 Constant_13719" -> "360 Multiply_9587/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"365 Multiply_9813" -> "360 Multiply_9587/fq_weights_1" [label="[576, 96, 1, 1]", style=solid]; +"366 Constant_13717" -> "195 Transpose_6562/fq_output_0" [label="[]", style=solid]; +"367 Constant_13716" -> "195 Transpose_6562/fq_output_0" [label="[]", style=solid]; +"368 Constant_13715" -> "195 Transpose_6562/fq_output_0" [label="[]", style=solid]; +"369 Constant_13714" -> "195 Transpose_6562/fq_output_0" [label="[]", style=solid]; +"370 Constant_13712" -> "237 Transpose_6558/fq_output_0" [label="[]", style=solid]; +"371 Constant_13711" -> "237 Transpose_6558/fq_output_0" [label="[]", style=solid]; +"372 Constant_13710" -> "237 Transpose_6558/fq_output_0" [label="[]", style=solid]; +"373 Constant_13709" -> "237 Transpose_6558/fq_output_0" [label="[]", style=solid]; +"374 Constant_9581" -> "232 Transpose_6558" [label="[1, 96, 1, 1]", style=solid]; +"375 Multiply_9573/fq_weights_1" -> "227 Multiply_9573" [label="[96, 576, 1, 1]", style=solid]; +"376 Constant_13707" -> "375 Multiply_9573/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"377 Constant_13706" -> "375 Multiply_9573/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"378 Constant_13705" -> "375 Multiply_9573/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"379 Constant_13704" -> "375 Multiply_9573/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"380 Multiply_9807" -> "375 Multiply_9573/fq_weights_1" [label="[96, 576, 1, 1]", style=solid]; +"381 Constant_13702" -> "223 Transpose_6534/fq_output_0" [label="[]", style=solid]; +"382 Constant_13701" -> "223 Transpose_6534/fq_output_0" [label="[]", style=solid]; +"383 Constant_13700" -> "223 Transpose_6534/fq_output_0" [label="[]", style=solid]; +"384 Constant_13699" -> "223 Transpose_6534/fq_output_0" [label="[]", style=solid]; +"385 Constant_13697" -> "255 Transpose_6530/fq_output_0" [label="[]", style=solid]; +"386 Constant_13696" -> "255 Transpose_6530/fq_output_0" [label="[]", style=solid]; +"387 Constant_13695" -> "255 Transpose_6530/fq_output_0" [label="[]", style=solid]; +"388 Constant_13694" -> "255 Transpose_6530/fq_output_0" [label="[]", style=solid]; +"389 Transpose_6514" -> "249 Transpose_6516" [label="[1, 576, 1, 1]", style=solid]; +"390 Convolution_1832/fq_weights_1" -> "245 Convolution_1832" [label="[576, 144, 1, 1]", style=solid]; +"391 Constant_13692" -> "390 Convolution_1832/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"392 Constant_13691" -> "390 Convolution_1832/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"393 Constant_13690" -> "390 Convolution_1832/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"394 Constant_13689" -> "390 Convolution_1832/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"395 Transpose_1831" -> "390 Convolution_1832/fq_weights_1" [label="[576, 144, 1, 1]", style=solid]; +"396 Constant_13687" -> "241 Transpose_6510/fq_output_0" [label="[]", style=solid]; +"397 Constant_13686" -> "241 Transpose_6510/fq_output_0" [label="[]", style=solid]; +"398 Constant_13685" -> "241 Transpose_6510/fq_output_0" [label="[]", style=solid]; +"399 Constant_13684" -> "241 Transpose_6510/fq_output_0" [label="[]", style=solid]; +"400 Transpose_6506" -> "231 Transpose_6508" [label="[1, 144, 1, 1]", style=solid]; +"401 Convolution_1823/fq_weights_1" -> "226 Convolution_1823" [label="[144, 576, 1, 1]", style=solid]; +"402 Constant_13682" -> "401 Convolution_1823/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"403 Constant_13681" -> "401 Convolution_1823/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"404 Constant_13680" -> "401 Convolution_1823/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"405 Constant_13679" -> "401 Convolution_1823/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"406 Transpose_1822" -> "401 Convolution_1823/fq_weights_1" [label="[144, 576, 1, 1]", style=solid]; +"407 Constant_13677" -> "222 Transpose_6502/fq_output_0" [label="[]", style=solid]; +"408 Constant_13676" -> "222 Transpose_6502/fq_output_0" [label="[]", style=solid]; +"409 Constant_13675" -> "222 Transpose_6502/fq_output_0" [label="[]", style=solid]; +"410 Constant_13674" -> "222 Transpose_6502/fq_output_0" [label="[]", style=solid]; +"411 Constant_6500" -> "218 Transpose_6502" [label="[2]", style=dashed]; +"412 Constant_13672" -> "215 Transpose_6498/fq_output_0" [label="[]", style=solid]; +"413 Constant_13671" -> "215 Transpose_6498/fq_output_0" [label="[]", style=solid]; +"414 Constant_13670" -> "215 Transpose_6498/fq_output_0" [label="[]", style=solid]; +"415 Constant_13669" -> "215 Transpose_6498/fq_output_0" [label="[]", style=solid]; +"416 Constant_9567" -> "209 Transpose_6480" [label="[1, 576, 1, 1]", style=solid]; +"417 Multiply_9559/fq_weights_1" -> "206 Multiply_9559" [label="[576, 1, 1, 5, 5]", style=solid]; +"418 Constant_13667" -> "417 Multiply_9559/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"419 Constant_13666" -> "417 Multiply_9559/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"420 Constant_13665" -> "417 Multiply_9559/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"421 Constant_13664" -> "417 Multiply_9559/fq_weights_1" [label="[576, 1, 1, 1, 1]", style=solid]; +"422 Multiply_9802" -> "417 Multiply_9559/fq_weights_1" [label="[576, 1, 1, 5, 5]", style=solid]; +"423 Constant_13662" -> "202 Transpose_6456/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"424 Constant_13661" -> "202 Transpose_6456/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"425 Constant_13660" -> "202 Transpose_6456/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"426 Constant_13659" -> "202 Transpose_6456/fq_output_0" [label="[1, 576, 1, 1]", style=solid]; +"427 Constant_9553" -> "194 Transpose_6438" [label="[1, 576, 1, 1]", style=solid]; +"428 Multiply_9545/fq_weights_1" -> "190 Multiply_9545" [label="[576, 96, 1, 1]", style=solid]; +"429 Constant_13657" -> "428 Multiply_9545/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"430 Constant_13656" -> "428 Multiply_9545/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"431 Constant_13655" -> "428 Multiply_9545/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"432 Constant_13654" -> "428 Multiply_9545/fq_weights_1" [label="[576, 1, 1, 1]", style=solid]; +"433 Multiply_9796" -> "428 Multiply_9545/fq_weights_1" [label="[576, 96, 1, 1]", style=solid]; +"434 Constant_13652" -> "187 Transpose_6414/fq_output_0" [label="[]", style=solid]; +"435 Constant_13651" -> "187 Transpose_6414/fq_output_0" [label="[]", style=solid]; +"436 Constant_13650" -> "187 Transpose_6414/fq_output_0" [label="[]", style=solid]; +"437 Constant_13649" -> "187 Transpose_6414/fq_output_0" [label="[]", style=solid]; +"438 Constant_9539" -> "184 Transpose_6414" [label="[1, 96, 1, 1]", style=solid]; +"439 Multiply_9531/fq_weights_1" -> "181 Multiply_9531" [label="[96, 288, 1, 1]", style=solid]; +"440 Constant_13647" -> "439 Multiply_9531/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"441 Constant_13646" -> "439 Multiply_9531/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"442 Constant_13645" -> "439 Multiply_9531/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"443 Constant_13644" -> "439 Multiply_9531/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"444 Multiply_9790" -> "439 Multiply_9531/fq_weights_1" [label="[96, 288, 1, 1]", style=solid]; +"445 Constant_13642" -> "177 Transpose_6390/fq_output_0" [label="[]", style=solid]; +"446 Constant_13641" -> "177 Transpose_6390/fq_output_0" [label="[]", style=solid]; +"447 Constant_13640" -> "177 Transpose_6390/fq_output_0" [label="[]", style=solid]; +"448 Constant_13639" -> "177 Transpose_6390/fq_output_0" [label="[]", style=solid]; +"449 Constant_13637" -> "205 Transpose_6386/fq_output_0" [label="[]", style=solid]; +"450 Constant_13636" -> "205 Transpose_6386/fq_output_0" [label="[]", style=solid]; +"451 Constant_13635" -> "205 Transpose_6386/fq_output_0" [label="[]", style=solid]; +"452 Constant_13634" -> "205 Transpose_6386/fq_output_0" [label="[]", style=solid]; +"453 Transpose_6370" -> "197 Transpose_6372" [label="[1, 288, 1, 1]", style=solid]; +"454 Convolution_1722/fq_weights_1" -> "193 Convolution_1722" [label="[288, 72, 1, 1]", style=solid]; +"455 Constant_13632" -> "454 Convolution_1722/fq_weights_1" [label="[288, 1, 1, 1]", style=solid]; +"456 Constant_13631" -> "454 Convolution_1722/fq_weights_1" [label="[288, 1, 1, 1]", style=solid]; +"457 Constant_13630" -> "454 Convolution_1722/fq_weights_1" [label="[288, 1, 1, 1]", style=solid]; +"458 Constant_13629" -> "454 Convolution_1722/fq_weights_1" [label="[288, 1, 1, 1]", style=solid]; +"459 Transpose_1721" -> "454 Convolution_1722/fq_weights_1" [label="[288, 72, 1, 1]", style=solid]; +"460 Constant_13627" -> "189 Transpose_6366/fq_output_0" [label="[]", style=solid]; +"461 Constant_13626" -> "189 Transpose_6366/fq_output_0" [label="[]", style=solid]; +"462 Constant_13625" -> "189 Transpose_6366/fq_output_0" [label="[]", style=solid]; +"463 Constant_13624" -> "189 Transpose_6366/fq_output_0" [label="[]", style=solid]; +"464 Transpose_6362" -> "183 Transpose_6364" [label="[1, 72, 1, 1]", style=solid]; +"465 Convolution_1713/fq_weights_1" -> "180 Convolution_1713" [label="[72, 288, 1, 1]", style=solid]; +"466 Constant_13622" -> "465 Convolution_1713/fq_weights_1" [label="[72, 1, 1, 1]", style=solid]; +"467 Constant_13621" -> "465 Convolution_1713/fq_weights_1" [label="[72, 1, 1, 1]", style=solid]; +"468 Constant_13620" -> "465 Convolution_1713/fq_weights_1" [label="[72, 1, 1, 1]", style=solid]; +"469 Constant_13619" -> "465 Convolution_1713/fq_weights_1" [label="[72, 1, 1, 1]", style=solid]; +"470 Transpose_1712" -> "465 Convolution_1713/fq_weights_1" [label="[72, 288, 1, 1]", style=solid]; +"471 Constant_13617" -> "176 Transpose_6358/fq_output_0" [label="[]", style=solid]; +"472 Constant_13616" -> "176 Transpose_6358/fq_output_0" [label="[]", style=solid]; +"473 Constant_13615" -> "176 Transpose_6358/fq_output_0" [label="[]", style=solid]; +"474 Constant_13614" -> "176 Transpose_6358/fq_output_0" [label="[]", style=solid]; +"475 Constant_6356" -> "172 Transpose_6358" [label="[2]", style=dashed]; +"476 Constant_13612" -> "169 Transpose_6354/fq_output_0" [label="[]", style=solid]; +"477 Constant_13611" -> "169 Transpose_6354/fq_output_0" [label="[]", style=solid]; +"478 Constant_13610" -> "169 Transpose_6354/fq_output_0" [label="[]", style=solid]; +"479 Constant_13609" -> "169 Transpose_6354/fq_output_0" [label="[]", style=solid]; +"480 Constant_9525" -> "163 Transpose_6336" [label="[1, 288, 1, 1]", style=solid]; +"481 Multiply_9517/fq_weights_1" -> "161 Multiply_9517" [label="[288, 1, 1, 5, 5]", style=solid]; +"482 Constant_13607" -> "481 Multiply_9517/fq_weights_1" [label="[288, 1, 1, 1, 1]", style=solid]; +"483 Constant_13606" -> "481 Multiply_9517/fq_weights_1" [label="[288, 1, 1, 1, 1]", style=solid]; +"484 Constant_13605" -> "481 Multiply_9517/fq_weights_1" [label="[288, 1, 1, 1, 1]", style=solid]; +"485 Constant_13604" -> "481 Multiply_9517/fq_weights_1" [label="[288, 1, 1, 1, 1]", style=solid]; +"486 Multiply_9785" -> "481 Multiply_9517/fq_weights_1" [label="[288, 1, 1, 5, 5]", style=solid]; +"487 Constant_13602" -> "159 Transpose_6285/fq_output_0" [label="[1, 288, 1, 1]", style=solid]; +"488 Constant_13601" -> "159 Transpose_6285/fq_output_0" [label="[1, 288, 1, 1]", style=solid]; +"489 Constant_13600" -> "159 Transpose_6285/fq_output_0" [label="[1, 288, 1, 1]", style=solid]; +"490 Constant_13599" -> "159 Transpose_6285/fq_output_0" [label="[1, 288, 1, 1]", style=solid]; +"491 Constant_9511" -> "154 Transpose_6267" [label="[1, 288, 1, 1]", style=solid]; +"492 Multiply_9503/fq_weights_1" -> "151 Multiply_9503" [label="[288, 48, 1, 1]", style=solid]; +"493 Constant_13597" -> "492 Multiply_9503/fq_weights_1" [label="[288, 1, 1, 1]", style=solid]; +"494 Constant_13596" -> "492 Multiply_9503/fq_weights_1" [label="[288, 1, 1, 1]", style=solid]; +"495 Constant_13595" -> "492 Multiply_9503/fq_weights_1" [label="[288, 1, 1, 1]", style=solid]; +"496 Constant_13594" -> "492 Multiply_9503/fq_weights_1" [label="[288, 1, 1, 1]", style=solid]; +"497 Multiply_9779" -> "492 Multiply_9503/fq_weights_1" [label="[288, 48, 1, 1]", style=solid]; +"498 Constant_13592" -> "147 Transpose_6243/fq_output_0" [label="[]", style=solid]; +"499 Constant_13591" -> "147 Transpose_6243/fq_output_0" [label="[]", style=solid]; +"500 Constant_13590" -> "147 Transpose_6243/fq_output_0" [label="[]", style=solid]; +"501 Constant_13589" -> "147 Transpose_6243/fq_output_0" [label="[]", style=solid]; +"502 Constant_13587" -> "179 Transpose_6239/fq_output_0" [label="[]", style=solid]; +"503 Constant_13586" -> "179 Transpose_6239/fq_output_0" [label="[]", style=solid]; +"504 Constant_13585" -> "179 Transpose_6239/fq_output_0" [label="[]", style=solid]; +"505 Constant_13584" -> "179 Transpose_6239/fq_output_0" [label="[]", style=solid]; +"506 Constant_9497" -> "175 Transpose_6239" [label="[1, 48, 1, 1]", style=solid]; +"507 Multiply_9489/fq_weights_1" -> "171 Multiply_9489" [label="[48, 144, 1, 1]", style=solid]; +"508 Constant_13582" -> "507 Multiply_9489/fq_weights_1" [label="[48, 1, 1, 1]", style=solid]; +"509 Constant_13581" -> "507 Multiply_9489/fq_weights_1" [label="[48, 1, 1, 1]", style=solid]; +"510 Constant_13580" -> "507 Multiply_9489/fq_weights_1" [label="[48, 1, 1, 1]", style=solid]; +"511 Constant_13579" -> "507 Multiply_9489/fq_weights_1" [label="[48, 1, 1, 1]", style=solid]; +"512 Multiply_9773" -> "507 Multiply_9489/fq_weights_1" [label="[48, 144, 1, 1]", style=solid]; +"513 Constant_13577" -> "168 Transpose_6215/fq_output_0" [label="[]", style=solid]; +"514 Constant_13576" -> "168 Transpose_6215/fq_output_0" [label="[]", style=solid]; +"515 Constant_13575" -> "168 Transpose_6215/fq_output_0" [label="[]", style=solid]; +"516 Constant_13574" -> "168 Transpose_6215/fq_output_0" [label="[]", style=solid]; +"517 Constant_13572" -> "196 Transpose_6211/fq_output_0" [label="[]", style=solid]; +"518 Constant_13571" -> "196 Transpose_6211/fq_output_0" [label="[]", style=solid]; +"519 Constant_13570" -> "196 Transpose_6211/fq_output_0" [label="[]", style=solid]; +"520 Constant_13569" -> "196 Transpose_6211/fq_output_0" [label="[]", style=solid]; +"521 Transpose_6195" -> "188 Transpose_6197" [label="[1, 144, 1, 1]", style=solid]; +"522 Convolution_1583/fq_weights_1" -> "185 Convolution_1583" [label="[144, 40, 1, 1]", style=solid]; +"523 Constant_13567" -> "522 Convolution_1583/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"524 Constant_13566" -> "522 Convolution_1583/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"525 Constant_13565" -> "522 Convolution_1583/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"526 Constant_13564" -> "522 Convolution_1583/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"527 Transpose_1582" -> "522 Convolution_1583/fq_weights_1" [label="[144, 40, 1, 1]", style=solid]; +"528 Constant_13562" -> "182 Transpose_6191/fq_output_0" [label="[]", style=solid]; +"529 Constant_13561" -> "182 Transpose_6191/fq_output_0" [label="[]", style=solid]; +"530 Constant_13560" -> "182 Transpose_6191/fq_output_0" [label="[]", style=solid]; +"531 Constant_13559" -> "182 Transpose_6191/fq_output_0" [label="[]", style=solid]; +"532 Transpose_6187" -> "174 Transpose_6189" [label="[1, 40, 1, 1]", style=solid]; +"533 Convolution_1574/fq_weights_1" -> "170 Convolution_1574" [label="[40, 144, 1, 1]", style=solid]; +"534 Constant_13557" -> "533 Convolution_1574/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; +"535 Constant_13556" -> "533 Convolution_1574/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; +"536 Constant_13555" -> "533 Convolution_1574/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; +"537 Constant_13554" -> "533 Convolution_1574/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; +"538 Transpose_1573" -> "533 Convolution_1574/fq_weights_1" [label="[40, 144, 1, 1]", style=solid]; +"539 Constant_13552" -> "167 Transpose_6183/fq_output_0" [label="[]", style=solid]; +"540 Constant_13551" -> "167 Transpose_6183/fq_output_0" [label="[]", style=solid]; +"541 Constant_13550" -> "167 Transpose_6183/fq_output_0" [label="[]", style=solid]; +"542 Constant_13549" -> "167 Transpose_6183/fq_output_0" [label="[]", style=solid]; +"543 Constant_6181" -> "164 Transpose_6183" [label="[2]", style=dashed]; +"544 Constant_13547" -> "162 Transpose_6179/fq_output_0" [label="[]", style=solid]; +"545 Constant_13546" -> "162 Transpose_6179/fq_output_0" [label="[]", style=solid]; +"546 Constant_13545" -> "162 Transpose_6179/fq_output_0" [label="[]", style=solid]; +"547 Constant_13544" -> "162 Transpose_6179/fq_output_0" [label="[]", style=solid]; +"548 Constant_9483" -> "158 Transpose_6161" [label="[1, 144, 1, 1]", style=solid]; +"549 Multiply_9475/fq_weights_1" -> "156 Multiply_9475" [label="[144, 1, 1, 5, 5]", style=solid]; +"550 Constant_13542" -> "549 Multiply_9475/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; +"551 Constant_13541" -> "549 Multiply_9475/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; +"552 Constant_13540" -> "549 Multiply_9475/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; +"553 Constant_13539" -> "549 Multiply_9475/fq_weights_1" [label="[144, 1, 1, 1, 1]", style=solid]; +"554 Multiply_9768" -> "549 Multiply_9475/fq_weights_1" [label="[144, 1, 1, 5, 5]", style=solid]; +"555 Constant_13537" -> "153 Transpose_6137/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; +"556 Constant_13536" -> "153 Transpose_6137/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; +"557 Constant_13535" -> "153 Transpose_6137/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; +"558 Constant_13534" -> "153 Transpose_6137/fq_output_0" [label="[1, 144, 1, 1]", style=solid]; +"559 Constant_9469" -> "146 Transpose_6119" [label="[1, 144, 1, 1]", style=solid]; +"560 Multiply_9461/fq_weights_1" -> "142 Multiply_9461" [label="[144, 48, 1, 1]", style=solid]; +"561 Constant_13532" -> "560 Multiply_9461/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"562 Constant_13531" -> "560 Multiply_9461/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"563 Constant_13530" -> "560 Multiply_9461/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"564 Constant_13529" -> "560 Multiply_9461/fq_weights_1" [label="[144, 1, 1, 1]", style=solid]; +"565 Multiply_9762" -> "560 Multiply_9461/fq_weights_1" [label="[144, 48, 1, 1]", style=solid]; +"566 Constant_13527" -> "138 Transpose_6095/fq_output_0" [label="[]", style=solid]; +"567 Constant_13526" -> "138 Transpose_6095/fq_output_0" [label="[]", style=solid]; +"568 Constant_13525" -> "138 Transpose_6095/fq_output_0" [label="[]", style=solid]; +"569 Constant_13524" -> "138 Transpose_6095/fq_output_0" [label="[]", style=solid]; +"570 Constant_9455" -> "134 Transpose_6095" [label="[1, 48, 1, 1]", style=solid]; +"571 Multiply_9447/fq_weights_1" -> "130 Multiply_9447" [label="[48, 120, 1, 1]", style=solid]; +"572 Constant_13522" -> "571 Multiply_9447/fq_weights_1" [label="[48, 1, 1, 1]", style=solid]; +"573 Constant_13521" -> "571 Multiply_9447/fq_weights_1" [label="[48, 1, 1, 1]", style=solid]; +"574 Constant_13520" -> "571 Multiply_9447/fq_weights_1" [label="[48, 1, 1, 1]", style=solid]; +"575 Constant_13519" -> "571 Multiply_9447/fq_weights_1" [label="[48, 1, 1, 1]", style=solid]; +"576 Multiply_9756" -> "571 Multiply_9447/fq_weights_1" [label="[48, 120, 1, 1]", style=solid]; +"577 Constant_13517" -> "125 Transpose_6071/fq_output_0" [label="[]", style=solid]; +"578 Constant_13516" -> "125 Transpose_6071/fq_output_0" [label="[]", style=solid]; +"579 Constant_13515" -> "125 Transpose_6071/fq_output_0" [label="[]", style=solid]; +"580 Constant_13514" -> "125 Transpose_6071/fq_output_0" [label="[]", style=solid]; +"581 Constant_13512" -> "155 Transpose_6067/fq_output_0" [label="[]", style=solid]; +"582 Constant_13511" -> "155 Transpose_6067/fq_output_0" [label="[]", style=solid]; +"583 Constant_13510" -> "155 Transpose_6067/fq_output_0" [label="[]", style=solid]; +"584 Constant_13509" -> "155 Transpose_6067/fq_output_0" [label="[]", style=solid]; +"585 Transpose_6051" -> "149 Transpose_6053" [label="[1, 120, 1, 1]", style=solid]; +"586 Convolution_1473/fq_weights_1" -> "145 Convolution_1473" [label="[120, 32, 1, 1]", style=solid]; +"587 Constant_13507" -> "586 Convolution_1473/fq_weights_1" [label="[120, 1, 1, 1]", style=solid]; +"588 Constant_13506" -> "586 Convolution_1473/fq_weights_1" [label="[120, 1, 1, 1]", style=solid]; +"589 Constant_13505" -> "586 Convolution_1473/fq_weights_1" [label="[120, 1, 1, 1]", style=solid]; +"590 Constant_13504" -> "586 Convolution_1473/fq_weights_1" [label="[120, 1, 1, 1]", style=solid]; +"591 Transpose_1472" -> "586 Convolution_1473/fq_weights_1" [label="[120, 32, 1, 1]", style=solid]; +"592 Constant_13502" -> "141 Transpose_6047/fq_output_0" [label="[]", style=solid]; +"593 Constant_13501" -> "141 Transpose_6047/fq_output_0" [label="[]", style=solid]; +"594 Constant_13500" -> "141 Transpose_6047/fq_output_0" [label="[]", style=solid]; +"595 Constant_13499" -> "141 Transpose_6047/fq_output_0" [label="[]", style=solid]; +"596 Transpose_6043" -> "133 Transpose_6045" [label="[1, 32, 1, 1]", style=solid]; +"597 Convolution_1464/fq_weights_1" -> "129 Convolution_1464" [label="[32, 120, 1, 1]", style=solid]; +"598 Constant_13497" -> "597 Convolution_1464/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"599 Constant_13496" -> "597 Convolution_1464/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"600 Constant_13495" -> "597 Convolution_1464/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"601 Constant_13494" -> "597 Convolution_1464/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"602 Transpose_1463" -> "597 Convolution_1464/fq_weights_1" [label="[32, 120, 1, 1]", style=solid]; +"603 Constant_13492" -> "124 Transpose_6039/fq_output_0" [label="[]", style=solid]; +"604 Constant_13491" -> "124 Transpose_6039/fq_output_0" [label="[]", style=solid]; +"605 Constant_13490" -> "124 Transpose_6039/fq_output_0" [label="[]", style=solid]; +"606 Constant_13489" -> "124 Transpose_6039/fq_output_0" [label="[]", style=solid]; +"607 Constant_6037" -> "119 Transpose_6039" [label="[2]", style=dashed]; +"608 Constant_13487" -> "114 Transpose_6035/fq_output_0" [label="[]", style=solid]; +"609 Constant_13486" -> "114 Transpose_6035/fq_output_0" [label="[]", style=solid]; +"610 Constant_13485" -> "114 Transpose_6035/fq_output_0" [label="[]", style=solid]; +"611 Constant_13484" -> "114 Transpose_6035/fq_output_0" [label="[]", style=solid]; +"612 Constant_9441" -> "104 Transpose_6017" [label="[1, 120, 1, 1]", style=solid]; +"613 Multiply_9433/fq_weights_1" -> "100 Multiply_9433" [label="[120, 1, 1, 5, 5]", style=solid]; +"614 Constant_13482" -> "613 Multiply_9433/fq_weights_1" [label="[120, 1, 1, 1, 1]", style=solid]; +"615 Constant_13481" -> "613 Multiply_9433/fq_weights_1" [label="[120, 1, 1, 1, 1]", style=solid]; +"616 Constant_13480" -> "613 Multiply_9433/fq_weights_1" [label="[120, 1, 1, 1, 1]", style=solid]; +"617 Constant_13479" -> "613 Multiply_9433/fq_weights_1" [label="[120, 1, 1, 1, 1]", style=solid]; +"618 Multiply_9751" -> "613 Multiply_9433/fq_weights_1" [label="[120, 1, 1, 5, 5]", style=solid]; +"619 Constant_13477" -> "96 Transpose_5993/fq_output_0" [label="[1, 120, 1, 1]", style=solid]; +"620 Constant_13476" -> "96 Transpose_5993/fq_output_0" [label="[1, 120, 1, 1]", style=solid]; +"621 Constant_13475" -> "96 Transpose_5993/fq_output_0" [label="[1, 120, 1, 1]", style=solid]; +"622 Constant_13474" -> "96 Transpose_5993/fq_output_0" [label="[1, 120, 1, 1]", style=solid]; +"623 Constant_9427" -> "90 Transpose_5975" [label="[1, 120, 1, 1]", style=solid]; +"624 Multiply_9419/fq_weights_1" -> "87 Multiply_9419" [label="[120, 40, 1, 1]", style=solid]; +"625 Constant_13472" -> "624 Multiply_9419/fq_weights_1" [label="[120, 1, 1, 1]", style=solid]; +"626 Constant_13471" -> "624 Multiply_9419/fq_weights_1" [label="[120, 1, 1, 1]", style=solid]; +"627 Constant_13470" -> "624 Multiply_9419/fq_weights_1" [label="[120, 1, 1, 1]", style=solid]; +"628 Constant_13469" -> "624 Multiply_9419/fq_weights_1" [label="[120, 1, 1, 1]", style=solid]; +"629 Multiply_9745" -> "624 Multiply_9419/fq_weights_1" [label="[120, 40, 1, 1]", style=solid]; +"630 Constant_13467" -> "83 Transpose_5951/fq_output_0" [label="[]", style=solid]; +"631 Constant_13466" -> "83 Transpose_5951/fq_output_0" [label="[]", style=solid]; +"632 Constant_13465" -> "83 Transpose_5951/fq_output_0" [label="[]", style=solid]; +"633 Constant_13464" -> "83 Transpose_5951/fq_output_0" [label="[]", style=solid]; +"634 Constant_13462" -> "128 Transpose_5947/fq_output_0" [label="[]", style=solid]; +"635 Constant_13461" -> "128 Transpose_5947/fq_output_0" [label="[]", style=solid]; +"636 Constant_13460" -> "128 Transpose_5947/fq_output_0" [label="[]", style=solid]; +"637 Constant_13459" -> "128 Transpose_5947/fq_output_0" [label="[]", style=solid]; +"638 Constant_9413" -> "123 Transpose_5947" [label="[1, 40, 1, 1]", style=solid]; +"639 Multiply_9405/fq_weights_1" -> "118 Multiply_9405" [label="[40, 240, 1, 1]", style=solid]; +"640 Constant_13457" -> "639 Multiply_9405/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; +"641 Constant_13456" -> "639 Multiply_9405/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; +"642 Constant_13455" -> "639 Multiply_9405/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; +"643 Constant_13454" -> "639 Multiply_9405/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; +"644 Multiply_9739" -> "639 Multiply_9405/fq_weights_1" [label="[40, 240, 1, 1]", style=solid]; +"645 Constant_13452" -> "113 Transpose_5923/fq_output_0" [label="[]", style=solid]; +"646 Constant_13451" -> "113 Transpose_5923/fq_output_0" [label="[]", style=solid]; +"647 Constant_13450" -> "113 Transpose_5923/fq_output_0" [label="[]", style=solid]; +"648 Constant_13449" -> "113 Transpose_5923/fq_output_0" [label="[]", style=solid]; +"649 Constant_13447" -> "148 Transpose_5919/fq_output_0" [label="[]", style=solid]; +"650 Constant_13446" -> "148 Transpose_5919/fq_output_0" [label="[]", style=solid]; +"651 Constant_13445" -> "148 Transpose_5919/fq_output_0" [label="[]", style=solid]; +"652 Constant_13444" -> "148 Transpose_5919/fq_output_0" [label="[]", style=solid]; +"653 Transpose_5903" -> "140 Transpose_5905" [label="[1, 240, 1, 1]", style=solid]; +"654 Convolution_1362/fq_weights_1" -> "136 Convolution_1362" [label="[240, 64, 1, 1]", style=solid]; +"655 Constant_13442" -> "654 Convolution_1362/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; +"656 Constant_13441" -> "654 Convolution_1362/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; +"657 Constant_13440" -> "654 Convolution_1362/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; +"658 Constant_13439" -> "654 Convolution_1362/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; +"659 Transpose_1361" -> "654 Convolution_1362/fq_weights_1" [label="[240, 64, 1, 1]", style=solid]; +"660 Constant_13437" -> "132 Transpose_5899/fq_output_0" [label="[]", style=solid]; +"661 Constant_13436" -> "132 Transpose_5899/fq_output_0" [label="[]", style=solid]; +"662 Constant_13435" -> "132 Transpose_5899/fq_output_0" [label="[]", style=solid]; +"663 Constant_13434" -> "132 Transpose_5899/fq_output_0" [label="[]", style=solid]; +"664 Transpose_5895" -> "122 Transpose_5897" [label="[1, 64, 1, 1]", style=solid]; +"665 Convolution_1353/fq_weights_1" -> "117 Convolution_1353" [label="[64, 240, 1, 1]", style=solid]; +"666 Constant_13432" -> "665 Convolution_1353/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"667 Constant_13431" -> "665 Convolution_1353/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"668 Constant_13430" -> "665 Convolution_1353/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"669 Constant_13429" -> "665 Convolution_1353/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"670 Transpose_1352" -> "665 Convolution_1353/fq_weights_1" [label="[64, 240, 1, 1]", style=solid]; +"671 Constant_13427" -> "112 Transpose_5891/fq_output_0" [label="[]", style=solid]; +"672 Constant_13426" -> "112 Transpose_5891/fq_output_0" [label="[]", style=solid]; +"673 Constant_13425" -> "112 Transpose_5891/fq_output_0" [label="[]", style=solid]; +"674 Constant_13424" -> "112 Transpose_5891/fq_output_0" [label="[]", style=solid]; +"675 Constant_5889" -> "107 Transpose_5891" [label="[2]", style=dashed]; +"676 Constant_13422" -> "103 Transpose_5887/fq_output_0" [label="[]", style=solid]; +"677 Constant_13421" -> "103 Transpose_5887/fq_output_0" [label="[]", style=solid]; +"678 Constant_13420" -> "103 Transpose_5887/fq_output_0" [label="[]", style=solid]; +"679 Constant_13419" -> "103 Transpose_5887/fq_output_0" [label="[]", style=solid]; +"680 Constant_9399" -> "95 Transpose_5869" [label="[1, 240, 1, 1]", style=solid]; +"681 Multiply_9391/fq_weights_1" -> "92 Multiply_9391" [label="[240, 1, 1, 5, 5]", style=solid]; +"682 Constant_13417" -> "681 Multiply_9391/fq_weights_1" [label="[240, 1, 1, 1, 1]", style=solid]; +"683 Constant_13416" -> "681 Multiply_9391/fq_weights_1" [label="[240, 1, 1, 1, 1]", style=solid]; +"684 Constant_13415" -> "681 Multiply_9391/fq_weights_1" [label="[240, 1, 1, 1, 1]", style=solid]; +"685 Constant_13414" -> "681 Multiply_9391/fq_weights_1" [label="[240, 1, 1, 1, 1]", style=solid]; +"686 Multiply_9734" -> "681 Multiply_9391/fq_weights_1" [label="[240, 1, 1, 5, 5]", style=solid]; +"687 Constant_13412" -> "89 Transpose_5845/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; +"688 Constant_13411" -> "89 Transpose_5845/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; +"689 Constant_13410" -> "89 Transpose_5845/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; +"690 Constant_13409" -> "89 Transpose_5845/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; +"691 Constant_9385" -> "82 Transpose_5827" [label="[1, 240, 1, 1]", style=solid]; +"692 Multiply_9377/fq_weights_1" -> "78 Multiply_9377" [label="[240, 40, 1, 1]", style=solid]; +"693 Constant_13407" -> "692 Multiply_9377/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; +"694 Constant_13406" -> "692 Multiply_9377/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; +"695 Constant_13405" -> "692 Multiply_9377/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; +"696 Constant_13404" -> "692 Multiply_9377/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; +"697 Multiply_9728" -> "692 Multiply_9377/fq_weights_1" [label="[240, 40, 1, 1]", style=solid]; +"698 Constant_13402" -> "75 Transpose_5803/fq_output_0" [label="[]", style=solid]; +"699 Constant_13401" -> "75 Transpose_5803/fq_output_0" [label="[]", style=solid]; +"700 Constant_13400" -> "75 Transpose_5803/fq_output_0" [label="[]", style=solid]; +"701 Constant_13399" -> "75 Transpose_5803/fq_output_0" [label="[]", style=solid]; +"702 Constant_13397" -> "116 Transpose_5799/fq_output_0" [label="[]", style=solid]; +"703 Constant_13396" -> "116 Transpose_5799/fq_output_0" [label="[]", style=solid]; +"704 Constant_13395" -> "116 Transpose_5799/fq_output_0" [label="[]", style=solid]; +"705 Constant_13394" -> "116 Transpose_5799/fq_output_0" [label="[]", style=solid]; +"706 Constant_9371" -> "111 Transpose_5799" [label="[1, 40, 1, 1]", style=solid]; +"707 Multiply_9363/fq_weights_1" -> "106 Multiply_9363" [label="[40, 240, 1, 1]", style=solid]; +"708 Constant_13392" -> "707 Multiply_9363/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; +"709 Constant_13391" -> "707 Multiply_9363/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; +"710 Constant_13390" -> "707 Multiply_9363/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; +"711 Constant_13389" -> "707 Multiply_9363/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; +"712 Multiply_9722" -> "707 Multiply_9363/fq_weights_1" [label="[40, 240, 1, 1]", style=solid]; +"713 Constant_13387" -> "102 Transpose_5775/fq_output_0" [label="[]", style=solid]; +"714 Constant_13386" -> "102 Transpose_5775/fq_output_0" [label="[]", style=solid]; +"715 Constant_13385" -> "102 Transpose_5775/fq_output_0" [label="[]", style=solid]; +"716 Constant_13384" -> "102 Transpose_5775/fq_output_0" [label="[]", style=solid]; +"717 Constant_13382" -> "139 Transpose_5771/fq_output_0" [label="[]", style=solid]; +"718 Constant_13381" -> "139 Transpose_5771/fq_output_0" [label="[]", style=solid]; +"719 Constant_13380" -> "139 Transpose_5771/fq_output_0" [label="[]", style=solid]; +"720 Constant_13379" -> "139 Transpose_5771/fq_output_0" [label="[]", style=solid]; +"721 Transpose_5755" -> "131 Transpose_5757" [label="[1, 240, 1, 1]", style=solid]; +"722 Convolution_1251/fq_weights_1" -> "126 Convolution_1251" [label="[240, 64, 1, 1]", style=solid]; +"723 Constant_13377" -> "722 Convolution_1251/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; +"724 Constant_13376" -> "722 Convolution_1251/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; +"725 Constant_13375" -> "722 Convolution_1251/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; +"726 Constant_13374" -> "722 Convolution_1251/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; +"727 Transpose_1250" -> "722 Convolution_1251/fq_weights_1" [label="[240, 64, 1, 1]", style=solid]; +"728 Constant_13372" -> "121 Transpose_5751/fq_output_0" [label="[]", style=solid]; +"729 Constant_13371" -> "121 Transpose_5751/fq_output_0" [label="[]", style=solid]; +"730 Constant_13370" -> "121 Transpose_5751/fq_output_0" [label="[]", style=solid]; +"731 Constant_13369" -> "121 Transpose_5751/fq_output_0" [label="[]", style=solid]; +"732 Transpose_5747" -> "110 Transpose_5749" [label="[1, 64, 1, 1]", style=solid]; +"733 Convolution_1242/fq_weights_1" -> "105 Convolution_1242" [label="[64, 240, 1, 1]", style=solid]; +"734 Constant_13367" -> "733 Convolution_1242/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"735 Constant_13366" -> "733 Convolution_1242/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"736 Constant_13365" -> "733 Convolution_1242/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"737 Constant_13364" -> "733 Convolution_1242/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"738 Transpose_1241" -> "733 Convolution_1242/fq_weights_1" [label="[64, 240, 1, 1]", style=solid]; +"739 Constant_13362" -> "101 Transpose_5743/fq_output_0" [label="[]", style=solid]; +"740 Constant_13361" -> "101 Transpose_5743/fq_output_0" [label="[]", style=solid]; +"741 Constant_13360" -> "101 Transpose_5743/fq_output_0" [label="[]", style=solid]; +"742 Constant_13359" -> "101 Transpose_5743/fq_output_0" [label="[]", style=solid]; +"743 Constant_5741" -> "97 Transpose_5743" [label="[2]", style=dashed]; +"744 Constant_13357" -> "94 Transpose_5739/fq_output_0" [label="[]", style=solid]; +"745 Constant_13356" -> "94 Transpose_5739/fq_output_0" [label="[]", style=solid]; +"746 Constant_13355" -> "94 Transpose_5739/fq_output_0" [label="[]", style=solid]; +"747 Constant_13354" -> "94 Transpose_5739/fq_output_0" [label="[]", style=solid]; +"748 Constant_9357" -> "88 Transpose_5721" [label="[1, 240, 1, 1]", style=solid]; +"749 Multiply_9349/fq_weights_1" -> "85 Multiply_9349" [label="[240, 1, 1, 5, 5]", style=solid]; +"750 Constant_13352" -> "749 Multiply_9349/fq_weights_1" [label="[240, 1, 1, 1, 1]", style=solid]; +"751 Constant_13351" -> "749 Multiply_9349/fq_weights_1" [label="[240, 1, 1, 1, 1]", style=solid]; +"752 Constant_13350" -> "749 Multiply_9349/fq_weights_1" [label="[240, 1, 1, 1, 1]", style=solid]; +"753 Constant_13349" -> "749 Multiply_9349/fq_weights_1" [label="[240, 1, 1, 1, 1]", style=solid]; +"754 Multiply_9717" -> "749 Multiply_9349/fq_weights_1" [label="[240, 1, 1, 5, 5]", style=solid]; +"755 Constant_13347" -> "81 Transpose_5697/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; +"756 Constant_13346" -> "81 Transpose_5697/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; +"757 Constant_13345" -> "81 Transpose_5697/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; +"758 Constant_13344" -> "81 Transpose_5697/fq_output_0" [label="[1, 240, 1, 1]", style=solid]; +"759 Constant_9343" -> "74 Transpose_5679" [label="[1, 240, 1, 1]", style=solid]; +"760 Multiply_9335/fq_weights_1" -> "71 Multiply_9335" [label="[240, 40, 1, 1]", style=solid]; +"761 Constant_13342" -> "760 Multiply_9335/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; +"762 Constant_13341" -> "760 Multiply_9335/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; +"763 Constant_13340" -> "760 Multiply_9335/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; +"764 Constant_13339" -> "760 Multiply_9335/fq_weights_1" [label="[240, 1, 1, 1]", style=solid]; +"765 Multiply_9711" -> "760 Multiply_9335/fq_weights_1" [label="[240, 40, 1, 1]", style=solid]; +"766 Constant_13337" -> "69 Transpose_5655/fq_output_0" [label="[]", style=solid]; +"767 Constant_13336" -> "69 Transpose_5655/fq_output_0" [label="[]", style=solid]; +"768 Constant_13335" -> "69 Transpose_5655/fq_output_0" [label="[]", style=solid]; +"769 Constant_13334" -> "69 Transpose_5655/fq_output_0" [label="[]", style=solid]; +"770 Constant_9329" -> "67 Transpose_5655" [label="[1, 40, 1, 1]", style=solid]; +"771 Multiply_9321/fq_weights_1" -> "65 Multiply_9321" [label="[40, 96, 1, 1]", style=solid]; +"772 Constant_13332" -> "771 Multiply_9321/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; +"773 Constant_13331" -> "771 Multiply_9321/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; +"774 Constant_13330" -> "771 Multiply_9321/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; +"775 Constant_13329" -> "771 Multiply_9321/fq_weights_1" [label="[40, 1, 1, 1]", style=solid]; +"776 Multiply_9705" -> "771 Multiply_9321/fq_weights_1" [label="[40, 96, 1, 1]", style=solid]; +"777 Constant_13327" -> "63 Transpose_5631/fq_output_0" [label="[]", style=solid]; +"778 Constant_13326" -> "63 Transpose_5631/fq_output_0" [label="[]", style=solid]; +"779 Constant_13325" -> "63 Transpose_5631/fq_output_0" [label="[]", style=solid]; +"780 Constant_13324" -> "63 Transpose_5631/fq_output_0" [label="[]", style=solid]; +"781 Constant_13322" -> "84 Transpose_5627/fq_output_0" [label="[]", style=solid]; +"782 Constant_13321" -> "84 Transpose_5627/fq_output_0" [label="[]", style=solid]; +"783 Constant_13320" -> "84 Transpose_5627/fq_output_0" [label="[]", style=solid]; +"784 Constant_13319" -> "84 Transpose_5627/fq_output_0" [label="[]", style=solid]; +"785 Transpose_5611" -> "76 Transpose_5613" [label="[1, 96, 1, 1]", style=solid]; +"786 Convolution_1141/fq_weights_1" -> "73 Convolution_1141" [label="[96, 24, 1, 1]", style=solid]; +"787 Constant_13317" -> "786 Convolution_1141/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"788 Constant_13316" -> "786 Convolution_1141/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"789 Constant_13315" -> "786 Convolution_1141/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"790 Constant_13314" -> "786 Convolution_1141/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"791 Transpose_1140" -> "786 Convolution_1141/fq_weights_1" [label="[96, 24, 1, 1]", style=solid]; +"792 Constant_13312" -> "70 Transpose_5607/fq_output_0" [label="[]", style=solid]; +"793 Constant_13311" -> "70 Transpose_5607/fq_output_0" [label="[]", style=solid]; +"794 Constant_13310" -> "70 Transpose_5607/fq_output_0" [label="[]", style=solid]; +"795 Constant_13309" -> "70 Transpose_5607/fq_output_0" [label="[]", style=solid]; +"796 Transpose_5603" -> "66 Transpose_5605" [label="[1, 24, 1, 1]", style=solid]; +"797 Convolution_1132/fq_weights_1" -> "64 Convolution_1132" [label="[24, 96, 1, 1]", style=solid]; +"798 Constant_13307" -> "797 Convolution_1132/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"799 Constant_13306" -> "797 Convolution_1132/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"800 Constant_13305" -> "797 Convolution_1132/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"801 Constant_13304" -> "797 Convolution_1132/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"802 Transpose_1131" -> "797 Convolution_1132/fq_weights_1" [label="[24, 96, 1, 1]", style=solid]; +"803 Constant_13302" -> "62 Transpose_5599/fq_output_0" [label="[]", style=solid]; +"804 Constant_13301" -> "62 Transpose_5599/fq_output_0" [label="[]", style=solid]; +"805 Constant_13300" -> "62 Transpose_5599/fq_output_0" [label="[]", style=solid]; +"806 Constant_13299" -> "62 Transpose_5599/fq_output_0" [label="[]", style=solid]; +"807 Constant_5597" -> "60 Transpose_5599" [label="[2]", style=dashed]; +"808 Constant_13297" -> "58 Transpose_5595/fq_output_0" [label="[]", style=solid]; +"809 Constant_13296" -> "58 Transpose_5595/fq_output_0" [label="[]", style=solid]; +"810 Constant_13295" -> "58 Transpose_5595/fq_output_0" [label="[]", style=solid]; +"811 Constant_13294" -> "58 Transpose_5595/fq_output_0" [label="[]", style=solid]; +"812 Constant_9315" -> "54 Transpose_5577" [label="[1, 96, 1, 1]", style=solid]; +"813 Multiply_9307/fq_weights_1" -> "52 Multiply_9307" [label="[96, 1, 1, 5, 5]", style=solid]; +"814 Constant_13292" -> "813 Multiply_9307/fq_weights_1" [label="[96, 1, 1, 1, 1]", style=solid]; +"815 Constant_13291" -> "813 Multiply_9307/fq_weights_1" [label="[96, 1, 1, 1, 1]", style=solid]; +"816 Constant_13290" -> "813 Multiply_9307/fq_weights_1" [label="[96, 1, 1, 1, 1]", style=solid]; +"817 Constant_13289" -> "813 Multiply_9307/fq_weights_1" [label="[96, 1, 1, 1, 1]", style=solid]; +"818 Multiply_9700" -> "813 Multiply_9307/fq_weights_1" [label="[96, 1, 1, 5, 5]", style=solid]; +"819 Constant_13287" -> "50 Transpose_5526/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; +"820 Constant_13286" -> "50 Transpose_5526/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; +"821 Constant_13285" -> "50 Transpose_5526/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; +"822 Constant_13284" -> "50 Transpose_5526/fq_output_0" [label="[1, 96, 1, 1]", style=solid]; +"823 Constant_9301" -> "46 Transpose_5508" [label="[1, 96, 1, 1]", style=solid]; +"824 Multiply_9293/fq_weights_1" -> "44 Multiply_9293" [label="[96, 24, 1, 1]", style=solid]; +"825 Constant_13282" -> "824 Multiply_9293/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"826 Constant_13281" -> "824 Multiply_9293/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"827 Constant_13280" -> "824 Multiply_9293/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"828 Constant_13279" -> "824 Multiply_9293/fq_weights_1" [label="[96, 1, 1, 1]", style=solid]; +"829 Multiply_9694" -> "824 Multiply_9293/fq_weights_1" [label="[96, 24, 1, 1]", style=solid]; +"830 Constant_13277" -> "42 Transpose_5484/fq_output_0" [label="[]", style=solid]; +"831 Constant_13276" -> "42 Transpose_5484/fq_output_0" [label="[]", style=solid]; +"832 Constant_13275" -> "42 Transpose_5484/fq_output_0" [label="[]", style=solid]; +"833 Constant_13274" -> "42 Transpose_5484/fq_output_0" [label="[]", style=solid]; +"834 Constant_13272" -> "59 Transpose_5480/fq_output_0" [label="[]", style=solid]; +"835 Constant_13271" -> "59 Transpose_5480/fq_output_0" [label="[]", style=solid]; +"836 Constant_13270" -> "59 Transpose_5480/fq_output_0" [label="[]", style=solid]; +"837 Constant_13269" -> "59 Transpose_5480/fq_output_0" [label="[]", style=solid]; +"838 Constant_9287" -> "57 Transpose_5480" [label="[1, 24, 1, 1]", style=solid]; +"839 Multiply_9279/fq_weights_1" -> "55 Multiply_9279" [label="[24, 88, 1, 1]", style=solid]; +"840 Constant_13267" -> "839 Multiply_9279/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"841 Constant_13266" -> "839 Multiply_9279/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"842 Constant_13265" -> "839 Multiply_9279/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"843 Constant_13264" -> "839 Multiply_9279/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"844 Multiply_9688" -> "839 Multiply_9279/fq_weights_1" [label="[24, 88, 1, 1]", style=solid]; +"845 Constant_13262" -> "53 Transpose_5456/fq_output_0" [label="[]", style=solid]; +"846 Constant_13261" -> "53 Transpose_5456/fq_output_0" [label="[]", style=solid]; +"847 Constant_13260" -> "53 Transpose_5456/fq_output_0" [label="[]", style=solid]; +"848 Constant_13259" -> "53 Transpose_5456/fq_output_0" [label="[]", style=solid]; +"849 Constant_9273" -> "49 Transpose_5454" [label="[1, 88, 1, 1]", style=solid]; +"850 Multiply_9265/fq_weights_1" -> "47 Multiply_9265" [label="[88, 1, 1, 3, 3]", style=solid]; +"851 Constant_13257" -> "850 Multiply_9265/fq_weights_1" [label="[88, 1, 1, 1, 1]", style=solid]; +"852 Constant_13256" -> "850 Multiply_9265/fq_weights_1" [label="[88, 1, 1, 1, 1]", style=solid]; +"853 Constant_13255" -> "850 Multiply_9265/fq_weights_1" [label="[88, 1, 1, 1, 1]", style=solid]; +"854 Constant_13254" -> "850 Multiply_9265/fq_weights_1" [label="[88, 1, 1, 1, 1]", style=solid]; +"855 Multiply_9683" -> "850 Multiply_9265/fq_weights_1" [label="[88, 1, 1, 3, 3]", style=solid]; +"856 Constant_13252" -> "45 Transpose_5430/fq_output_0" [label="[1, 88, 1, 1]", style=solid]; +"857 Constant_13251" -> "45 Transpose_5430/fq_output_0" [label="[1, 88, 1, 1]", style=solid]; +"858 Constant_13250" -> "45 Transpose_5430/fq_output_0" [label="[1, 88, 1, 1]", style=solid]; +"859 Constant_13249" -> "45 Transpose_5430/fq_output_0" [label="[1, 88, 1, 1]", style=solid]; +"860 Constant_9259" -> "41 Transpose_5428" [label="[1, 88, 1, 1]", style=solid]; +"861 Multiply_9251/fq_weights_1" -> "39 Multiply_9251" [label="[88, 24, 1, 1]", style=solid]; +"862 Constant_13247" -> "861 Multiply_9251/fq_weights_1" [label="[88, 1, 1, 1]", style=solid]; +"863 Constant_13246" -> "861 Multiply_9251/fq_weights_1" [label="[88, 1, 1, 1]", style=solid]; +"864 Constant_13245" -> "861 Multiply_9251/fq_weights_1" [label="[88, 1, 1, 1]", style=solid]; +"865 Constant_13244" -> "861 Multiply_9251/fq_weights_1" [label="[88, 1, 1, 1]", style=solid]; +"866 Multiply_9677" -> "861 Multiply_9251/fq_weights_1" [label="[88, 24, 1, 1]", style=solid]; +"867 Constant_13242" -> "38 Transpose_5404/fq_output_0" [label="[]", style=solid]; +"868 Constant_13241" -> "38 Transpose_5404/fq_output_0" [label="[]", style=solid]; +"869 Constant_13240" -> "38 Transpose_5404/fq_output_0" [label="[]", style=solid]; +"870 Constant_13239" -> "38 Transpose_5404/fq_output_0" [label="[]", style=solid]; +"871 Constant_9245" -> "37 Transpose_5404" [label="[1, 24, 1, 1]", style=solid]; +"872 Multiply_9237/fq_weights_1" -> "36 Multiply_9237" [label="[24, 72, 1, 1]", style=solid]; +"873 Constant_13237" -> "872 Multiply_9237/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"874 Constant_13236" -> "872 Multiply_9237/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"875 Constant_13235" -> "872 Multiply_9237/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"876 Constant_13234" -> "872 Multiply_9237/fq_weights_1" [label="[24, 1, 1, 1]", style=solid]; +"877 Multiply_9671" -> "872 Multiply_9237/fq_weights_1" [label="[24, 72, 1, 1]", style=solid]; +"878 Constant_13232" -> "35 Transpose_5380/fq_output_0" [label="[]", style=solid]; +"879 Constant_13231" -> "35 Transpose_5380/fq_output_0" [label="[]", style=solid]; +"880 Constant_13230" -> "35 Transpose_5380/fq_output_0" [label="[]", style=solid]; +"881 Constant_13229" -> "35 Transpose_5380/fq_output_0" [label="[]", style=solid]; +"882 Constant_9231" -> "33 Transpose_5378" [label="[1, 72, 1, 1]", style=solid]; +"883 Multiply_9223/fq_weights_1" -> "32 Multiply_9223" [label="[72, 1, 1, 3, 3]", style=solid]; +"884 Constant_13227" -> "883 Multiply_9223/fq_weights_1" [label="[72, 1, 1, 1, 1]", style=solid]; +"885 Constant_13226" -> "883 Multiply_9223/fq_weights_1" [label="[72, 1, 1, 1, 1]", style=solid]; +"886 Constant_13225" -> "883 Multiply_9223/fq_weights_1" [label="[72, 1, 1, 1, 1]", style=solid]; +"887 Constant_13224" -> "883 Multiply_9223/fq_weights_1" [label="[72, 1, 1, 1, 1]", style=solid]; +"888 Multiply_9666" -> "883 Multiply_9223/fq_weights_1" [label="[72, 1, 1, 3, 3]", style=solid]; +"889 Constant_13222" -> "30 Transpose_5327/fq_output_0" [label="[1, 72, 1, 1]", style=solid]; +"890 Constant_13221" -> "30 Transpose_5327/fq_output_0" [label="[1, 72, 1, 1]", style=solid]; +"891 Constant_13220" -> "30 Transpose_5327/fq_output_0" [label="[1, 72, 1, 1]", style=solid]; +"892 Constant_13219" -> "30 Transpose_5327/fq_output_0" [label="[1, 72, 1, 1]", style=solid]; +"893 Constant_9217" -> "26 Transpose_5325" [label="[1, 72, 1, 1]", style=solid]; +"894 Multiply_9209/fq_weights_1" -> "24 Multiply_9209" [label="[72, 16, 1, 1]", style=solid]; +"895 Constant_13217" -> "894 Multiply_9209/fq_weights_1" [label="[72, 1, 1, 1]", style=solid]; +"896 Constant_13216" -> "894 Multiply_9209/fq_weights_1" [label="[72, 1, 1, 1]", style=solid]; +"897 Constant_13215" -> "894 Multiply_9209/fq_weights_1" [label="[72, 1, 1, 1]", style=solid]; +"898 Constant_13214" -> "894 Multiply_9209/fq_weights_1" [label="[72, 1, 1, 1]", style=solid]; +"899 Multiply_9660" -> "894 Multiply_9209/fq_weights_1" [label="[72, 16, 1, 1]", style=solid]; +"900 Constant_13212" -> "22 Transpose_5301/fq_output_0" [label="[]", style=solid]; +"901 Constant_13211" -> "22 Transpose_5301/fq_output_0" [label="[]", style=solid]; +"902 Constant_13210" -> "22 Transpose_5301/fq_output_0" [label="[]", style=solid]; +"903 Constant_13209" -> "22 Transpose_5301/fq_output_0" [label="[]", style=solid]; +"904 Constant_9203" -> "20 Transpose_5301" [label="[1, 16, 1, 1]", style=solid]; +"905 Multiply_9195/fq_weights_1" -> "18 Multiply_9195" [label="[16, 16, 1, 1]", style=solid]; +"906 Constant_13207" -> "905 Multiply_9195/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; +"907 Constant_13206" -> "905 Multiply_9195/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; +"908 Constant_13205" -> "905 Multiply_9195/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; +"909 Constant_13204" -> "905 Multiply_9195/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; +"910 Multiply_9654" -> "905 Multiply_9195/fq_weights_1" [label="[16, 16, 1, 1]", style=solid]; +"911 Constant_13202" -> "16 Transpose_5277/fq_output_0" [label="[]", style=solid]; +"912 Constant_13201" -> "16 Transpose_5277/fq_output_0" [label="[]", style=solid]; +"913 Constant_13200" -> "16 Transpose_5277/fq_output_0" [label="[]", style=solid]; +"914 Constant_13199" -> "16 Transpose_5277/fq_output_0" [label="[]", style=solid]; +"915 Constant_13197" -> "31 Transpose_5273/fq_output_0" [label="[]", style=solid]; +"916 Constant_13196" -> "31 Transpose_5273/fq_output_0" [label="[]", style=solid]; +"917 Constant_13195" -> "31 Transpose_5273/fq_output_0" [label="[]", style=solid]; +"918 Constant_13194" -> "31 Transpose_5273/fq_output_0" [label="[]", style=solid]; +"919 Transpose_5257" -> "27 Transpose_5259" [label="[1, 16, 1, 1]", style=solid]; +"920 Convolution_810/fq_weights_1" -> "25 Convolution_810" [label="[16, 8, 1, 1]", style=solid]; +"921 Constant_13192" -> "920 Convolution_810/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; +"922 Constant_13191" -> "920 Convolution_810/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; +"923 Constant_13190" -> "920 Convolution_810/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; +"924 Constant_13189" -> "920 Convolution_810/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; +"925 Transpose_809" -> "920 Convolution_810/fq_weights_1" [label="[16, 8, 1, 1]", style=solid]; +"926 Constant_13187" -> "23 Transpose_5253/fq_output_0" [label="[]", style=solid]; +"927 Constant_13186" -> "23 Transpose_5253/fq_output_0" [label="[]", style=solid]; +"928 Constant_13185" -> "23 Transpose_5253/fq_output_0" [label="[]", style=solid]; +"929 Constant_13184" -> "23 Transpose_5253/fq_output_0" [label="[]", style=solid]; +"930 Transpose_5249" -> "19 Transpose_5251" [label="[1, 8, 1, 1]", style=solid]; +"931 Convolution_801/fq_weights_1" -> "17 Convolution_801" [label="[8, 16, 1, 1]", style=solid]; +"932 Constant_13182" -> "931 Convolution_801/fq_weights_1" [label="[8, 1, 1, 1]", style=solid]; +"933 Constant_13181" -> "931 Convolution_801/fq_weights_1" [label="[8, 1, 1, 1]", style=solid]; +"934 Constant_13180" -> "931 Convolution_801/fq_weights_1" [label="[8, 1, 1, 1]", style=solid]; +"935 Constant_13179" -> "931 Convolution_801/fq_weights_1" [label="[8, 1, 1, 1]", style=solid]; +"936 Transpose_800" -> "931 Convolution_801/fq_weights_1" [label="[8, 16, 1, 1]", style=solid]; +"937 Constant_13177" -> "15 Transpose_5245/fq_output_0" [label="[]", style=solid]; +"938 Constant_13176" -> "15 Transpose_5245/fq_output_0" [label="[]", style=solid]; +"939 Constant_13175" -> "15 Transpose_5245/fq_output_0" [label="[]", style=solid]; +"940 Constant_13174" -> "15 Transpose_5245/fq_output_0" [label="[]", style=solid]; +"941 Constant_5243" -> "13 Transpose_5245" [label="[2]", style=dashed]; +"942 Constant_13172" -> "12 Transpose_5241/fq_output_0" [label="[]", style=solid]; +"943 Constant_13171" -> "12 Transpose_5241/fq_output_0" [label="[]", style=solid]; +"944 Constant_13170" -> "12 Transpose_5241/fq_output_0" [label="[]", style=solid]; +"945 Constant_13169" -> "12 Transpose_5241/fq_output_0" [label="[]", style=solid]; +"946 Constant_9189" -> "10 Transpose_5239" [label="[1, 16, 1, 1]", style=solid]; +"947 Multiply_9181/fq_weights_1" -> "9 Multiply_9181" [label="[16, 1, 1, 3, 3]", style=solid]; +"948 Constant_13167" -> "947 Multiply_9181/fq_weights_1" [label="[16, 1, 1, 1, 1]", style=solid]; +"949 Constant_13166" -> "947 Multiply_9181/fq_weights_1" [label="[16, 1, 1, 1, 1]", style=solid]; +"950 Constant_13165" -> "947 Multiply_9181/fq_weights_1" [label="[16, 1, 1, 1, 1]", style=solid]; +"951 Constant_13164" -> "947 Multiply_9181/fq_weights_1" [label="[16, 1, 1, 1, 1]", style=solid]; +"952 Multiply_9649" -> "947 Multiply_9181/fq_weights_1" [label="[16, 1, 1, 3, 3]", style=solid]; +"953 Constant_13162" -> "8 Transpose_5188/fq_output_0" [label="[1, 16, 1, 1]", style=solid]; +"954 Constant_13161" -> "8 Transpose_5188/fq_output_0" [label="[1, 16, 1, 1]", style=solid]; +"955 Constant_13160" -> "8 Transpose_5188/fq_output_0" [label="[1, 16, 1, 1]", style=solid]; +"956 Constant_13159" -> "8 Transpose_5188/fq_output_0" [label="[1, 16, 1, 1]", style=solid]; +"957 Constant_9175" -> "6 Transpose_5170" [label="[1, 16, 1, 1]", style=solid]; +"958 Multiply_9167/fq_weights_1" -> "5 Multiply_9167" [label="[16, 3, 3, 3]", style=solid]; +"959 Constant_13157" -> "958 Multiply_9167/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; +"960 Constant_13156" -> "958 Multiply_9167/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; +"961 Constant_13155" -> "958 Multiply_9167/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; +"962 Constant_13154" -> "958 Multiply_9167/fq_weights_1" [label="[16, 1, 1, 1]", style=solid]; +"963 Gather_10068" -> "958 Multiply_9167/fq_weights_1" [label="[16, 3, 3, 3]", style=solid]; +"964 Constant_13152" -> "4 Transpose_710/fq_output_0" [label="[]", style=solid]; +"965 Constant_13151" -> "4 Transpose_710/fq_output_0" [label="[]", style=solid]; +"966 Constant_13150" -> "4 Transpose_710/fq_output_0" [label="[]", style=solid]; +"967 Constant_13149" -> "4 Transpose_710/fq_output_0" [label="[]", style=solid]; +"968 Unsqueeze_7776" -> "3 Transpose_710" [label="[1, 1, 1, 1]", style=solid]; +"969 Unsqueeze_7782" -> "2 Transpose_7774" [label="[1, 1, 1, 1]", style=solid]; +"970 Constant_7779" -> "1 Transpose_7780" [label="[4]", style=dashed]; +} diff --git a/tests/openvino/native/data/reference_graphs/quantized/resnet-18-pytorch.dot b/tests/openvino/native/data/reference_graphs/quantized/resnet-18-pytorch_performance.dot similarity index 53% rename from tests/openvino/native/data/reference_graphs/quantized/resnet-18-pytorch.dot rename to tests/openvino/native/data/reference_graphs/quantized/resnet-18-pytorch_performance.dot index 779230f0964..480119c4db2 100644 --- a/tests/openvino/native/data/reference_graphs/quantized/resnet-18-pytorch.dot +++ b/tests/openvino/native/data/reference_graphs/quantized/resnet-18-pytorch_performance.dot @@ -105,274 +105,274 @@ strict digraph { "103 prob/sink_port_0" [id=103, type=Result]; "104 Constant_1789" [id=104, type=Constant]; "105 /fc/Gemm/WithoutBiases/fq_weights_1" [id=105, type=FakeQuantize]; -"106 Constant_121242" [id=106, type=Constant]; -"107 Constant_121241" [id=107, type=Constant]; -"108 Constant_121240" [id=108, type=Constant]; -"109 Constant_121239" [id=109, type=Constant]; +"106 Constant_48641" [id=106, type=Constant]; +"107 Constant_48640" [id=107, type=Constant]; +"108 Constant_48639" [id=108, type=Constant]; +"109 Constant_48638" [id=109, type=Constant]; "110 fc.weight" [id=110, type=Constant]; "111 Constant_380" [id=111, type=Constant]; -"112 Constant_121237" [id=112, type=Constant]; -"113 Constant_121236" [id=113, type=Constant]; -"114 Constant_121235" [id=114, type=Constant]; -"115 Constant_121234" [id=115, type=Constant]; +"112 Constant_48636" [id=112, type=Constant]; +"113 Constant_48635" [id=113, type=Constant]; +"114 Constant_48634" [id=114, type=Constant]; +"115 Constant_48633" [id=115, type=Constant]; "116 Range_376" [id=116, type=Constant]; -"117 Constant_121232" [id=117, type=Constant]; -"118 Constant_121231" [id=118, type=Constant]; -"119 Constant_121230" [id=119, type=Constant]; -"120 Constant_121229" [id=120, type=Constant]; -"121 Constant_121207" [id=121, type=Constant]; -"122 Constant_121206" [id=122, type=Constant]; -"123 Constant_121205" [id=123, type=Constant]; -"124 Constant_121204" [id=124, type=Constant]; -"125 Constant_121182" [id=125, type=Constant]; -"126 Constant_121181" [id=126, type=Constant]; -"127 Constant_121180" [id=127, type=Constant]; -"128 Constant_121179" [id=128, type=Constant]; +"117 Constant_48631" [id=117, type=Constant]; +"118 Constant_48630" [id=118, type=Constant]; +"119 Constant_48629" [id=119, type=Constant]; +"120 Constant_48628" [id=120, type=Constant]; +"121 Constant_48606" [id=121, type=Constant]; +"122 Constant_48605" [id=122, type=Constant]; +"123 Constant_48604" [id=123, type=Constant]; +"124 Constant_48603" [id=124, type=Constant]; +"125 Constant_48581" [id=125, type=Constant]; +"126 Constant_48580" [id=126, type=Constant]; +"127 Constant_48579" [id=127, type=Constant]; +"128 Constant_48578" [id=128, type=Constant]; "129 Reshape_331" [id=129, type=Constant]; "130 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [id=130, type=FakeQuantize]; -"131 Constant_121177" [id=131, type=Constant]; -"132 Constant_121176" [id=132, type=Constant]; -"133 Constant_121175" [id=133, type=Constant]; -"134 Constant_121174" [id=134, type=Constant]; +"131 Constant_48576" [id=131, type=Constant]; +"132 Constant_48575" [id=132, type=Constant]; +"133 Constant_48574" [id=133, type=Constant]; +"134 Constant_48573" [id=134, type=Constant]; "135 onnx^^Conv_244" [id=135, label="135 onnx::Conv_244", type=Constant]; -"136 Constant_121172" [id=136, type=Constant]; -"137 Constant_121171" [id=137, type=Constant]; -"138 Constant_121170" [id=138, type=Constant]; -"139 Constant_121169" [id=139, type=Constant]; -"140 Constant_121147" [id=140, type=Constant]; -"141 Constant_121146" [id=141, type=Constant]; -"142 Constant_121145" [id=142, type=Constant]; -"143 Constant_121144" [id=143, type=Constant]; -"144 Constant_121122" [id=144, type=Constant]; -"145 Constant_121121" [id=145, type=Constant]; -"146 Constant_121120" [id=146, type=Constant]; -"147 Constant_121119" [id=147, type=Constant]; +"136 Constant_48571" [id=136, type=Constant]; +"137 Constant_48570" [id=137, type=Constant]; +"138 Constant_48569" [id=138, type=Constant]; +"139 Constant_48568" [id=139, type=Constant]; +"140 Constant_48546" [id=140, type=Constant]; +"141 Constant_48545" [id=141, type=Constant]; +"142 Constant_48544" [id=142, type=Constant]; +"143 Constant_48543" [id=143, type=Constant]; +"144 Constant_48521" [id=144, type=Constant]; +"145 Constant_48520" [id=145, type=Constant]; +"146 Constant_48519" [id=146, type=Constant]; +"147 Constant_48518" [id=147, type=Constant]; "148 Reshape_250" [id=148, type=Constant]; "149 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [id=149, type=FakeQuantize]; -"150 Constant_121117" [id=150, type=Constant]; -"151 Constant_121116" [id=151, type=Constant]; -"152 Constant_121115" [id=152, type=Constant]; -"153 Constant_121114" [id=153, type=Constant]; +"150 Constant_48516" [id=150, type=Constant]; +"151 Constant_48515" [id=151, type=Constant]; +"152 Constant_48514" [id=152, type=Constant]; +"153 Constant_48513" [id=153, type=Constant]; "154 onnx^^Conv_229" [id=154, label="154 onnx::Conv_229", type=Constant]; -"155 Constant_121112" [id=155, type=Constant]; -"156 Constant_121111" [id=156, type=Constant]; -"157 Constant_121110" [id=157, type=Constant]; -"158 Constant_121109" [id=158, type=Constant]; -"159 Constant_121087" [id=159, type=Constant]; -"160 Constant_121086" [id=160, type=Constant]; -"161 Constant_121085" [id=161, type=Constant]; -"162 Constant_121084" [id=162, type=Constant]; -"163 Constant_121062" [id=163, type=Constant]; -"164 Constant_121061" [id=164, type=Constant]; -"165 Constant_121060" [id=165, type=Constant]; -"166 Constant_121059" [id=166, type=Constant]; +"155 Constant_48511" [id=155, type=Constant]; +"156 Constant_48510" [id=156, type=Constant]; +"157 Constant_48509" [id=157, type=Constant]; +"158 Constant_48508" [id=158, type=Constant]; +"159 Constant_48486" [id=159, type=Constant]; +"160 Constant_48485" [id=160, type=Constant]; +"161 Constant_48484" [id=161, type=Constant]; +"162 Constant_48483" [id=162, type=Constant]; +"163 Constant_48461" [id=163, type=Constant]; +"164 Constant_48460" [id=164, type=Constant]; +"165 Constant_48459" [id=165, type=Constant]; +"166 Constant_48458" [id=166, type=Constant]; "167 Reshape_169" [id=167, type=Constant]; "168 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [id=168, type=FakeQuantize]; -"169 Constant_121057" [id=169, type=Constant]; -"170 Constant_121056" [id=170, type=Constant]; -"171 Constant_121055" [id=171, type=Constant]; -"172 Constant_121054" [id=172, type=Constant]; +"169 Constant_48456" [id=169, type=Constant]; +"170 Constant_48455" [id=170, type=Constant]; +"171 Constant_48454" [id=171, type=Constant]; +"172 Constant_48453" [id=172, type=Constant]; "173 onnx^^Conv_214" [id=173, label="173 onnx::Conv_214", type=Constant]; -"174 Constant_121052" [id=174, type=Constant]; -"175 Constant_121051" [id=175, type=Constant]; -"176 Constant_121050" [id=176, type=Constant]; -"177 Constant_121049" [id=177, type=Constant]; -"178 Constant_121027" [id=178, type=Constant]; -"179 Constant_121026" [id=179, type=Constant]; -"180 Constant_121025" [id=180, type=Constant]; -"181 Constant_121024" [id=181, type=Constant]; -"182 Constant_121002" [id=182, type=Constant]; -"183 Constant_121001" [id=183, type=Constant]; -"184 Constant_121000" [id=184, type=Constant]; -"185 Constant_120999" [id=185, type=Constant]; +"174 Constant_48451" [id=174, type=Constant]; +"175 Constant_48450" [id=175, type=Constant]; +"176 Constant_48449" [id=176, type=Constant]; +"177 Constant_48448" [id=177, type=Constant]; +"178 Constant_48426" [id=178, type=Constant]; +"179 Constant_48425" [id=179, type=Constant]; +"180 Constant_48424" [id=180, type=Constant]; +"181 Constant_48423" [id=181, type=Constant]; +"182 Constant_48401" [id=182, type=Constant]; +"183 Constant_48400" [id=183, type=Constant]; +"184 Constant_48399" [id=184, type=Constant]; +"185 Constant_48398" [id=185, type=Constant]; "186 Reshape_55" [id=186, type=Constant]; "187 /conv1/Conv/WithoutBiases/fq_weights_1" [id=187, type=FakeQuantize]; -"188 Constant_120997" [id=188, type=Constant]; -"189 Constant_120996" [id=189, type=Constant]; -"190 Constant_120995" [id=190, type=Constant]; -"191 Constant_120994" [id=191, type=Constant]; +"188 Constant_48396" [id=188, type=Constant]; +"189 Constant_48395" [id=189, type=Constant]; +"190 Constant_48394" [id=190, type=Constant]; +"191 Constant_48393" [id=191, type=Constant]; "192 Gather_1788" [id=192, type=Constant]; -"193 Constant_120992" [id=193, type=Constant]; -"194 Constant_120991" [id=194, type=Constant]; -"195 Constant_120990" [id=195, type=Constant]; -"196 Constant_120989" [id=196, type=Constant]; +"193 Constant_48391" [id=193, type=Constant]; +"194 Constant_48390" [id=194, type=Constant]; +"195 Constant_48389" [id=195, type=Constant]; +"196 Constant_48388" [id=196, type=Constant]; "197 Gather_1785" [id=197, type=Constant]; "198 Gather_1782" [id=198, type=Constant]; -"199 Constant_121022" [id=199, type=Constant]; -"200 Constant_121021" [id=200, type=Constant]; -"201 Constant_121020" [id=201, type=Constant]; -"202 Constant_121019" [id=202, type=Constant]; +"199 Constant_48421" [id=199, type=Constant]; +"200 Constant_48420" [id=200, type=Constant]; +"201 Constant_48419" [id=201, type=Constant]; +"202 Constant_48418" [id=202, type=Constant]; "203 Reshape_88" [id=203, type=Constant]; "204 /layer1/layer1.0/conv2/Conv/WithoutBiases/fq_weights_1" [id=204, type=FakeQuantize]; -"205 Constant_121017" [id=205, type=Constant]; -"206 Constant_121016" [id=206, type=Constant]; -"207 Constant_121015" [id=207, type=Constant]; -"208 Constant_121014" [id=208, type=Constant]; +"205 Constant_48416" [id=205, type=Constant]; +"206 Constant_48415" [id=206, type=Constant]; +"207 Constant_48414" [id=207, type=Constant]; +"208 Constant_48413" [id=208, type=Constant]; "209 onnx^^Conv_199" [id=209, label="209 onnx::Conv_199", type=Constant]; -"210 Constant_121012" [id=210, type=Constant]; -"211 Constant_121011" [id=211, type=Constant]; -"212 Constant_121010" [id=212, type=Constant]; -"213 Constant_121009" [id=213, type=Constant]; +"210 Constant_48411" [id=210, type=Constant]; +"211 Constant_48410" [id=211, type=Constant]; +"212 Constant_48409" [id=212, type=Constant]; +"213 Constant_48408" [id=213, type=Constant]; "214 Reshape_72" [id=214, type=Constant]; "215 /layer1/layer1.0/conv1/Conv/WithoutBiases/fq_weights_1" [id=215, type=FakeQuantize]; -"216 Constant_121007" [id=216, type=Constant]; -"217 Constant_121006" [id=217, type=Constant]; -"218 Constant_121005" [id=218, type=Constant]; -"219 Constant_121004" [id=219, type=Constant]; +"216 Constant_48406" [id=216, type=Constant]; +"217 Constant_48405" [id=217, type=Constant]; +"218 Constant_48404" [id=218, type=Constant]; +"219 Constant_48403" [id=219, type=Constant]; "220 onnx^^Conv_196" [id=220, label="220 onnx::Conv_196", type=Constant]; -"221 Constant_121047" [id=221, type=Constant]; -"222 Constant_121046" [id=222, type=Constant]; -"223 Constant_121045" [id=223, type=Constant]; -"224 Constant_121044" [id=224, type=Constant]; +"221 Constant_48446" [id=221, type=Constant]; +"222 Constant_48445" [id=222, type=Constant]; +"223 Constant_48444" [id=223, type=Constant]; +"224 Constant_48443" [id=224, type=Constant]; "225 Reshape_121" [id=225, type=Constant]; "226 /layer1/layer1.1/conv2/Conv/WithoutBiases/fq_weights_1" [id=226, type=FakeQuantize]; -"227 Constant_121042" [id=227, type=Constant]; -"228 Constant_121041" [id=228, type=Constant]; -"229 Constant_121040" [id=229, type=Constant]; -"230 Constant_121039" [id=230, type=Constant]; +"227 Constant_48441" [id=227, type=Constant]; +"228 Constant_48440" [id=228, type=Constant]; +"229 Constant_48439" [id=229, type=Constant]; +"230 Constant_48438" [id=230, type=Constant]; "231 onnx^^Conv_205" [id=231, label="231 onnx::Conv_205", type=Constant]; -"232 Constant_121037" [id=232, type=Constant]; -"233 Constant_121036" [id=233, type=Constant]; -"234 Constant_121035" [id=234, type=Constant]; -"235 Constant_121034" [id=235, type=Constant]; +"232 Constant_48436" [id=232, type=Constant]; +"233 Constant_48435" [id=233, type=Constant]; +"234 Constant_48434" [id=234, type=Constant]; +"235 Constant_48433" [id=235, type=Constant]; "236 Reshape_105" [id=236, type=Constant]; "237 /layer1/layer1.1/conv1/Conv/WithoutBiases/fq_weights_1" [id=237, type=FakeQuantize]; -"238 Constant_121032" [id=238, type=Constant]; -"239 Constant_121031" [id=239, type=Constant]; -"240 Constant_121030" [id=240, type=Constant]; -"241 Constant_121029" [id=241, type=Constant]; +"238 Constant_48431" [id=238, type=Constant]; +"239 Constant_48430" [id=239, type=Constant]; +"240 Constant_48429" [id=240, type=Constant]; +"241 Constant_48428" [id=241, type=Constant]; "242 onnx^^Conv_202" [id=242, label="242 onnx::Conv_202", type=Constant]; -"243 Constant_121082" [id=243, type=Constant]; -"244 Constant_121081" [id=244, type=Constant]; -"245 Constant_121080" [id=245, type=Constant]; -"246 Constant_121079" [id=246, type=Constant]; +"243 Constant_48481" [id=243, type=Constant]; +"244 Constant_48480" [id=244, type=Constant]; +"245 Constant_48479" [id=245, type=Constant]; +"246 Constant_48478" [id=246, type=Constant]; "247 Reshape_154" [id=247, type=Constant]; "248 /layer2/layer2.0/conv2/Conv/WithoutBiases/fq_weights_1" [id=248, type=FakeQuantize]; -"249 Constant_121077" [id=249, type=Constant]; -"250 Constant_121076" [id=250, type=Constant]; -"251 Constant_121075" [id=251, type=Constant]; -"252 Constant_121074" [id=252, type=Constant]; +"249 Constant_48476" [id=249, type=Constant]; +"250 Constant_48475" [id=250, type=Constant]; +"251 Constant_48474" [id=251, type=Constant]; +"252 Constant_48473" [id=252, type=Constant]; "253 onnx^^Conv_211" [id=253, label="253 onnx::Conv_211", type=Constant]; -"254 Constant_121072" [id=254, type=Constant]; -"255 Constant_121071" [id=255, type=Constant]; -"256 Constant_121070" [id=256, type=Constant]; -"257 Constant_121069" [id=257, type=Constant]; +"254 Constant_48471" [id=254, type=Constant]; +"255 Constant_48470" [id=255, type=Constant]; +"256 Constant_48469" [id=256, type=Constant]; +"257 Constant_48468" [id=257, type=Constant]; "258 Reshape_138" [id=258, type=Constant]; "259 /layer2/layer2.0/conv1/Conv/WithoutBiases/fq_weights_1" [id=259, type=FakeQuantize]; -"260 Constant_121067" [id=260, type=Constant]; -"261 Constant_121066" [id=261, type=Constant]; -"262 Constant_121065" [id=262, type=Constant]; -"263 Constant_121064" [id=263, type=Constant]; +"260 Constant_48466" [id=260, type=Constant]; +"261 Constant_48465" [id=261, type=Constant]; +"262 Constant_48464" [id=262, type=Constant]; +"263 Constant_48463" [id=263, type=Constant]; "264 onnx^^Conv_208" [id=264, label="264 onnx::Conv_208", type=Constant]; -"265 Constant_121107" [id=265, type=Constant]; -"266 Constant_121106" [id=266, type=Constant]; -"267 Constant_121105" [id=267, type=Constant]; -"268 Constant_121104" [id=268, type=Constant]; +"265 Constant_48506" [id=265, type=Constant]; +"266 Constant_48505" [id=266, type=Constant]; +"267 Constant_48504" [id=267, type=Constant]; +"268 Constant_48503" [id=268, type=Constant]; "269 Reshape_202" [id=269, type=Constant]; "270 /layer2/layer2.1/conv2/Conv/WithoutBiases/fq_weights_1" [id=270, type=FakeQuantize]; -"271 Constant_121102" [id=271, type=Constant]; -"272 Constant_121101" [id=272, type=Constant]; -"273 Constant_121100" [id=273, type=Constant]; -"274 Constant_121099" [id=274, type=Constant]; +"271 Constant_48501" [id=271, type=Constant]; +"272 Constant_48500" [id=272, type=Constant]; +"273 Constant_48499" [id=273, type=Constant]; +"274 Constant_48498" [id=274, type=Constant]; "275 onnx^^Conv_220" [id=275, label="275 onnx::Conv_220", type=Constant]; -"276 Constant_121097" [id=276, type=Constant]; -"277 Constant_121096" [id=277, type=Constant]; -"278 Constant_121095" [id=278, type=Constant]; -"279 Constant_121094" [id=279, type=Constant]; +"276 Constant_48496" [id=276, type=Constant]; +"277 Constant_48495" [id=277, type=Constant]; +"278 Constant_48494" [id=278, type=Constant]; +"279 Constant_48493" [id=279, type=Constant]; "280 Reshape_186" [id=280, type=Constant]; "281 /layer2/layer2.1/conv1/Conv/WithoutBiases/fq_weights_1" [id=281, type=FakeQuantize]; -"282 Constant_121092" [id=282, type=Constant]; -"283 Constant_121091" [id=283, type=Constant]; -"284 Constant_121090" [id=284, type=Constant]; -"285 Constant_121089" [id=285, type=Constant]; +"282 Constant_48491" [id=282, type=Constant]; +"283 Constant_48490" [id=283, type=Constant]; +"284 Constant_48489" [id=284, type=Constant]; +"285 Constant_48488" [id=285, type=Constant]; "286 onnx^^Conv_217" [id=286, label="286 onnx::Conv_217", type=Constant]; -"287 Constant_121142" [id=287, type=Constant]; -"288 Constant_121141" [id=288, type=Constant]; -"289 Constant_121140" [id=289, type=Constant]; -"290 Constant_121139" [id=290, type=Constant]; +"287 Constant_48541" [id=287, type=Constant]; +"288 Constant_48540" [id=288, type=Constant]; +"289 Constant_48539" [id=289, type=Constant]; +"290 Constant_48538" [id=290, type=Constant]; "291 Reshape_235" [id=291, type=Constant]; "292 /layer3/layer3.0/conv2/Conv/WithoutBiases/fq_weights_1" [id=292, type=FakeQuantize]; -"293 Constant_121137" [id=293, type=Constant]; -"294 Constant_121136" [id=294, type=Constant]; -"295 Constant_121135" [id=295, type=Constant]; -"296 Constant_121134" [id=296, type=Constant]; +"293 Constant_48536" [id=293, type=Constant]; +"294 Constant_48535" [id=294, type=Constant]; +"295 Constant_48534" [id=295, type=Constant]; +"296 Constant_48533" [id=296, type=Constant]; "297 onnx^^Conv_226" [id=297, label="297 onnx::Conv_226", type=Constant]; -"298 Constant_121132" [id=298, type=Constant]; -"299 Constant_121131" [id=299, type=Constant]; -"300 Constant_121130" [id=300, type=Constant]; -"301 Constant_121129" [id=301, type=Constant]; +"298 Constant_48531" [id=298, type=Constant]; +"299 Constant_48530" [id=299, type=Constant]; +"300 Constant_48529" [id=300, type=Constant]; +"301 Constant_48528" [id=301, type=Constant]; "302 Reshape_219" [id=302, type=Constant]; "303 /layer3/layer3.0/conv1/Conv/WithoutBiases/fq_weights_1" [id=303, type=FakeQuantize]; -"304 Constant_121127" [id=304, type=Constant]; -"305 Constant_121126" [id=305, type=Constant]; -"306 Constant_121125" [id=306, type=Constant]; -"307 Constant_121124" [id=307, type=Constant]; +"304 Constant_48526" [id=304, type=Constant]; +"305 Constant_48525" [id=305, type=Constant]; +"306 Constant_48524" [id=306, type=Constant]; +"307 Constant_48523" [id=307, type=Constant]; "308 onnx^^Conv_223" [id=308, label="308 onnx::Conv_223", type=Constant]; -"309 Constant_121167" [id=309, type=Constant]; -"310 Constant_121166" [id=310, type=Constant]; -"311 Constant_121165" [id=311, type=Constant]; -"312 Constant_121164" [id=312, type=Constant]; +"309 Constant_48566" [id=309, type=Constant]; +"310 Constant_48565" [id=310, type=Constant]; +"311 Constant_48564" [id=311, type=Constant]; +"312 Constant_48563" [id=312, type=Constant]; "313 Reshape_283" [id=313, type=Constant]; "314 /layer3/layer3.1/conv2/Conv/WithoutBiases/fq_weights_1" [id=314, type=FakeQuantize]; -"315 Constant_121162" [id=315, type=Constant]; -"316 Constant_121161" [id=316, type=Constant]; -"317 Constant_121160" [id=317, type=Constant]; -"318 Constant_121159" [id=318, type=Constant]; +"315 Constant_48561" [id=315, type=Constant]; +"316 Constant_48560" [id=316, type=Constant]; +"317 Constant_48559" [id=317, type=Constant]; +"318 Constant_48558" [id=318, type=Constant]; "319 onnx^^Conv_235" [id=319, label="319 onnx::Conv_235", type=Constant]; -"320 Constant_121157" [id=320, type=Constant]; -"321 Constant_121156" [id=321, type=Constant]; -"322 Constant_121155" [id=322, type=Constant]; -"323 Constant_121154" [id=323, type=Constant]; +"320 Constant_48556" [id=320, type=Constant]; +"321 Constant_48555" [id=321, type=Constant]; +"322 Constant_48554" [id=322, type=Constant]; +"323 Constant_48553" [id=323, type=Constant]; "324 Reshape_267" [id=324, type=Constant]; "325 /layer3/layer3.1/conv1/Conv/WithoutBiases/fq_weights_1" [id=325, type=FakeQuantize]; -"326 Constant_121152" [id=326, type=Constant]; -"327 Constant_121151" [id=327, type=Constant]; -"328 Constant_121150" [id=328, type=Constant]; -"329 Constant_121149" [id=329, type=Constant]; +"326 Constant_48551" [id=326, type=Constant]; +"327 Constant_48550" [id=327, type=Constant]; +"328 Constant_48549" [id=328, type=Constant]; +"329 Constant_48548" [id=329, type=Constant]; "330 onnx^^Conv_232" [id=330, label="330 onnx::Conv_232", type=Constant]; -"331 Constant_121202" [id=331, type=Constant]; -"332 Constant_121201" [id=332, type=Constant]; -"333 Constant_121200" [id=333, type=Constant]; -"334 Constant_121199" [id=334, type=Constant]; +"331 Constant_48601" [id=331, type=Constant]; +"332 Constant_48600" [id=332, type=Constant]; +"333 Constant_48599" [id=333, type=Constant]; +"334 Constant_48598" [id=334, type=Constant]; "335 Reshape_316" [id=335, type=Constant]; "336 /layer4/layer4.0/conv2/Conv/WithoutBiases/fq_weights_1" [id=336, type=FakeQuantize]; -"337 Constant_121197" [id=337, type=Constant]; -"338 Constant_121196" [id=338, type=Constant]; -"339 Constant_121195" [id=339, type=Constant]; -"340 Constant_121194" [id=340, type=Constant]; +"337 Constant_48596" [id=337, type=Constant]; +"338 Constant_48595" [id=338, type=Constant]; +"339 Constant_48594" [id=339, type=Constant]; +"340 Constant_48593" [id=340, type=Constant]; "341 onnx^^Conv_241" [id=341, label="341 onnx::Conv_241", type=Constant]; -"342 Constant_121192" [id=342, type=Constant]; -"343 Constant_121191" [id=343, type=Constant]; -"344 Constant_121190" [id=344, type=Constant]; -"345 Constant_121189" [id=345, type=Constant]; +"342 Constant_48591" [id=342, type=Constant]; +"343 Constant_48590" [id=343, type=Constant]; +"344 Constant_48589" [id=344, type=Constant]; +"345 Constant_48588" [id=345, type=Constant]; "346 Reshape_300" [id=346, type=Constant]; "347 /layer4/layer4.0/conv1/Conv/WithoutBiases/fq_weights_1" [id=347, type=FakeQuantize]; -"348 Constant_121187" [id=348, type=Constant]; -"349 Constant_121186" [id=349, type=Constant]; -"350 Constant_121185" [id=350, type=Constant]; -"351 Constant_121184" [id=351, type=Constant]; +"348 Constant_48586" [id=348, type=Constant]; +"349 Constant_48585" [id=349, type=Constant]; +"350 Constant_48584" [id=350, type=Constant]; +"351 Constant_48583" [id=351, type=Constant]; "352 onnx^^Conv_238" [id=352, label="352 onnx::Conv_238", type=Constant]; -"353 Constant_121227" [id=353, type=Constant]; -"354 Constant_121226" [id=354, type=Constant]; -"355 Constant_121225" [id=355, type=Constant]; -"356 Constant_121224" [id=356, type=Constant]; +"353 Constant_48626" [id=353, type=Constant]; +"354 Constant_48625" [id=354, type=Constant]; +"355 Constant_48624" [id=355, type=Constant]; +"356 Constant_48623" [id=356, type=Constant]; "357 Reshape_364" [id=357, type=Constant]; "358 /layer4/layer4.1/conv2/Conv/WithoutBiases/fq_weights_1" [id=358, type=FakeQuantize]; -"359 Constant_121222" [id=359, type=Constant]; -"360 Constant_121221" [id=360, type=Constant]; -"361 Constant_121220" [id=361, type=Constant]; -"362 Constant_121219" [id=362, type=Constant]; +"359 Constant_48621" [id=359, type=Constant]; +"360 Constant_48620" [id=360, type=Constant]; +"361 Constant_48619" [id=361, type=Constant]; +"362 Constant_48618" [id=362, type=Constant]; "363 onnx^^Conv_250" [id=363, label="363 onnx::Conv_250", type=Constant]; -"364 Constant_121217" [id=364, type=Constant]; -"365 Constant_121216" [id=365, type=Constant]; -"366 Constant_121215" [id=366, type=Constant]; -"367 Constant_121214" [id=367, type=Constant]; +"364 Constant_48616" [id=364, type=Constant]; +"365 Constant_48615" [id=365, type=Constant]; +"366 Constant_48614" [id=366, type=Constant]; +"367 Constant_48613" [id=367, type=Constant]; "368 Reshape_348" [id=368, type=Constant]; "369 /layer4/layer4.1/conv1/Conv/WithoutBiases/fq_weights_1" [id=369, type=FakeQuantize]; -"370 Constant_121212" [id=370, type=Constant]; -"371 Constant_121211" [id=371, type=Constant]; -"372 Constant_121210" [id=372, type=Constant]; -"373 Constant_121209" [id=373, type=Constant]; +"370 Constant_48611" [id=370, type=Constant]; +"371 Constant_48610" [id=371, type=Constant]; +"372 Constant_48609" [id=372, type=Constant]; +"373 Constant_48608" [id=373, type=Constant]; "374 onnx^^Conv_247" [id=374, label="374 onnx::Conv_247", type=Constant]; "0 data" -> "1 Multiply_1715" [label="[1, 3, 224, 224]", style=solid]; "1 Multiply_1715" -> "2 Divide_401" [label="[1, 3, 224, 224]", style=solid]; @@ -487,273 +487,273 @@ strict digraph { "102 prob" -> "103 prob/sink_port_0" [label="[1, 1000]", style=solid]; "104 Constant_1789" -> "102 prob" [label="[1, 1000]", style=solid]; "105 /fc/Gemm/WithoutBiases/fq_weights_1" -> "100 /fc/Gemm/WithoutBiases" [label="[1000, 512]", style=solid]; -"106 Constant_121242" -> "105 /fc/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; -"107 Constant_121241" -> "105 /fc/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; -"108 Constant_121240" -> "105 /fc/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; -"109 Constant_121239" -> "105 /fc/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; +"106 Constant_48641" -> "105 /fc/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; +"107 Constant_48640" -> "105 /fc/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; +"108 Constant_48639" -> "105 /fc/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; +"109 Constant_48638" -> "105 /fc/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; "110 fc.weight" -> "105 /fc/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 512]", style=solid]; "111 Constant_380" -> "98 /Flatten" [label="[2]", style=dashed]; -"112 Constant_121237" -> "96 /avgpool/GlobalAveragePool/fq_output_0" [label="[]", style=solid]; -"113 Constant_121236" -> "96 /avgpool/GlobalAveragePool/fq_output_0" [label="[]", style=solid]; -"114 Constant_121235" -> "96 /avgpool/GlobalAveragePool/fq_output_0" [label="[]", style=solid]; -"115 Constant_121234" -> "96 /avgpool/GlobalAveragePool/fq_output_0" [label="[]", style=solid]; +"112 Constant_48636" -> "96 /avgpool/GlobalAveragePool/fq_output_0" [label="[]", style=solid]; +"113 Constant_48635" -> "96 /avgpool/GlobalAveragePool/fq_output_0" [label="[]", style=solid]; +"114 Constant_48634" -> "96 /avgpool/GlobalAveragePool/fq_output_0" [label="[]", style=solid]; +"115 Constant_48633" -> "96 /avgpool/GlobalAveragePool/fq_output_0" [label="[]", style=solid]; "116 Range_376" -> "94 /avgpool/GlobalAveragePool" [label="[2]", style=dashed]; -"117 Constant_121232" -> "92 /layer4/layer4.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"118 Constant_121231" -> "92 /layer4/layer4.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"119 Constant_121230" -> "92 /layer4/layer4.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"120 Constant_121229" -> "92 /layer4/layer4.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"121 Constant_121207" -> "86 /layer4/layer4.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"122 Constant_121206" -> "86 /layer4/layer4.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"123 Constant_121205" -> "86 /layer4/layer4.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"124 Constant_121204" -> "86 /layer4/layer4.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"125 Constant_121182" -> "78 /layer4/layer4.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; -"126 Constant_121181" -> "78 /layer4/layer4.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; -"127 Constant_121180" -> "78 /layer4/layer4.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; -"128 Constant_121179" -> "78 /layer4/layer4.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; +"117 Constant_48631" -> "92 /layer4/layer4.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"118 Constant_48630" -> "92 /layer4/layer4.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"119 Constant_48629" -> "92 /layer4/layer4.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"120 Constant_48628" -> "92 /layer4/layer4.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"121 Constant_48606" -> "86 /layer4/layer4.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"122 Constant_48605" -> "86 /layer4/layer4.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"123 Constant_48604" -> "86 /layer4/layer4.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"124 Constant_48603" -> "86 /layer4/layer4.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"125 Constant_48581" -> "78 /layer4/layer4.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; +"126 Constant_48580" -> "78 /layer4/layer4.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; +"127 Constant_48579" -> "78 /layer4/layer4.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; +"128 Constant_48578" -> "78 /layer4/layer4.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; "129 Reshape_331" -> "75 /layer4/layer4.0/downsample/downsample.0/Conv" [label="[1, 512, 1, 1]", style=solid]; "130 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" -> "72 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases" [label="[512, 256, 1, 1]", style=solid]; -"131 Constant_121177" -> "130 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"132 Constant_121176" -> "130 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"133 Constant_121175" -> "130 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"134 Constant_121174" -> "130 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"131 Constant_48576" -> "130 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"132 Constant_48575" -> "130 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"133 Constant_48574" -> "130 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"134 Constant_48573" -> "130 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; "135 onnx^^Conv_244" -> "130 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[512, 256, 1, 1]", style=solid]; -"136 Constant_121172" -> "69 /layer3/layer3.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"137 Constant_121171" -> "69 /layer3/layer3.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"138 Constant_121170" -> "69 /layer3/layer3.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"139 Constant_121169" -> "69 /layer3/layer3.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"140 Constant_121147" -> "63 /layer3/layer3.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"141 Constant_121146" -> "63 /layer3/layer3.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"142 Constant_121145" -> "63 /layer3/layer3.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"143 Constant_121144" -> "63 /layer3/layer3.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"144 Constant_121122" -> "55 /layer3/layer3.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; -"145 Constant_121121" -> "55 /layer3/layer3.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; -"146 Constant_121120" -> "55 /layer3/layer3.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; -"147 Constant_121119" -> "55 /layer3/layer3.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; +"136 Constant_48571" -> "69 /layer3/layer3.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"137 Constant_48570" -> "69 /layer3/layer3.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"138 Constant_48569" -> "69 /layer3/layer3.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"139 Constant_48568" -> "69 /layer3/layer3.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"140 Constant_48546" -> "63 /layer3/layer3.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"141 Constant_48545" -> "63 /layer3/layer3.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"142 Constant_48544" -> "63 /layer3/layer3.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"143 Constant_48543" -> "63 /layer3/layer3.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"144 Constant_48521" -> "55 /layer3/layer3.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; +"145 Constant_48520" -> "55 /layer3/layer3.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; +"146 Constant_48519" -> "55 /layer3/layer3.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; +"147 Constant_48518" -> "55 /layer3/layer3.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; "148 Reshape_250" -> "52 /layer3/layer3.0/downsample/downsample.0/Conv" [label="[1, 256, 1, 1]", style=solid]; "149 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" -> "49 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases" [label="[256, 128, 1, 1]", style=solid]; -"150 Constant_121117" -> "149 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"151 Constant_121116" -> "149 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"152 Constant_121115" -> "149 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"153 Constant_121114" -> "149 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"150 Constant_48516" -> "149 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"151 Constant_48515" -> "149 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"152 Constant_48514" -> "149 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"153 Constant_48513" -> "149 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; "154 onnx^^Conv_229" -> "149 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[256, 128, 1, 1]", style=solid]; -"155 Constant_121112" -> "46 /layer2/layer2.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"156 Constant_121111" -> "46 /layer2/layer2.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"157 Constant_121110" -> "46 /layer2/layer2.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"158 Constant_121109" -> "46 /layer2/layer2.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"159 Constant_121087" -> "40 /layer2/layer2.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"160 Constant_121086" -> "40 /layer2/layer2.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"161 Constant_121085" -> "40 /layer2/layer2.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"162 Constant_121084" -> "40 /layer2/layer2.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"163 Constant_121062" -> "32 /layer2/layer2.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; -"164 Constant_121061" -> "32 /layer2/layer2.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; -"165 Constant_121060" -> "32 /layer2/layer2.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; -"166 Constant_121059" -> "32 /layer2/layer2.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; +"155 Constant_48511" -> "46 /layer2/layer2.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"156 Constant_48510" -> "46 /layer2/layer2.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"157 Constant_48509" -> "46 /layer2/layer2.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"158 Constant_48508" -> "46 /layer2/layer2.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"159 Constant_48486" -> "40 /layer2/layer2.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"160 Constant_48485" -> "40 /layer2/layer2.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"161 Constant_48484" -> "40 /layer2/layer2.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"162 Constant_48483" -> "40 /layer2/layer2.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"163 Constant_48461" -> "32 /layer2/layer2.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; +"164 Constant_48460" -> "32 /layer2/layer2.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; +"165 Constant_48459" -> "32 /layer2/layer2.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; +"166 Constant_48458" -> "32 /layer2/layer2.0/downsample/downsample.0/Conv/fq_output_0" [label="[]", style=solid]; "167 Reshape_169" -> "29 /layer2/layer2.0/downsample/downsample.0/Conv" [label="[1, 128, 1, 1]", style=solid]; "168 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" -> "25 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases" [label="[128, 64, 1, 1]", style=solid]; -"169 Constant_121057" -> "168 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"170 Constant_121056" -> "168 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"171 Constant_121055" -> "168 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"172 Constant_121054" -> "168 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"169 Constant_48456" -> "168 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"170 Constant_48455" -> "168 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"171 Constant_48454" -> "168 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"172 Constant_48453" -> "168 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; "173 onnx^^Conv_214" -> "168 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[128, 64, 1, 1]", style=solid]; -"174 Constant_121052" -> "21 /layer1/layer1.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"175 Constant_121051" -> "21 /layer1/layer1.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"176 Constant_121050" -> "21 /layer1/layer1.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"177 Constant_121049" -> "21 /layer1/layer1.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"178 Constant_121027" -> "13 /layer1/layer1.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"179 Constant_121026" -> "13 /layer1/layer1.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"180 Constant_121025" -> "13 /layer1/layer1.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"181 Constant_121024" -> "13 /layer1/layer1.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; -"182 Constant_121002" -> "7 /relu/Relu/fq_output_0" [label="[]", style=solid]; -"183 Constant_121001" -> "7 /relu/Relu/fq_output_0" [label="[]", style=solid]; -"184 Constant_121000" -> "7 /relu/Relu/fq_output_0" [label="[]", style=solid]; -"185 Constant_120999" -> "7 /relu/Relu/fq_output_0" [label="[]", style=solid]; +"174 Constant_48451" -> "21 /layer1/layer1.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"175 Constant_48450" -> "21 /layer1/layer1.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"176 Constant_48449" -> "21 /layer1/layer1.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"177 Constant_48448" -> "21 /layer1/layer1.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"178 Constant_48426" -> "13 /layer1/layer1.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"179 Constant_48425" -> "13 /layer1/layer1.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"180 Constant_48424" -> "13 /layer1/layer1.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"181 Constant_48423" -> "13 /layer1/layer1.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"182 Constant_48401" -> "7 /relu/Relu/fq_output_0" [label="[]", style=solid]; +"183 Constant_48400" -> "7 /relu/Relu/fq_output_0" [label="[]", style=solid]; +"184 Constant_48399" -> "7 /relu/Relu/fq_output_0" [label="[]", style=solid]; +"185 Constant_48398" -> "7 /relu/Relu/fq_output_0" [label="[]", style=solid]; "186 Reshape_55" -> "5 /conv1/Conv" [label="[1, 64, 1, 1]", style=solid]; "187 /conv1/Conv/WithoutBiases/fq_weights_1" -> "4 /conv1/Conv/WithoutBiases" [label="[64, 3, 7, 7]", style=solid]; -"188 Constant_120997" -> "187 /conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"189 Constant_120996" -> "187 /conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"190 Constant_120995" -> "187 /conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"191 Constant_120994" -> "187 /conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"188 Constant_48396" -> "187 /conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"189 Constant_48395" -> "187 /conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"190 Constant_48394" -> "187 /conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"191 Constant_48393" -> "187 /conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; "192 Gather_1788" -> "187 /conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 3, 7, 7]", style=solid]; -"193 Constant_120992" -> "3 Divide_401/fq_output_0" [label="[]", style=solid]; -"194 Constant_120991" -> "3 Divide_401/fq_output_0" [label="[]", style=solid]; -"195 Constant_120990" -> "3 Divide_401/fq_output_0" [label="[]", style=solid]; -"196 Constant_120989" -> "3 Divide_401/fq_output_0" [label="[]", style=solid]; +"193 Constant_48391" -> "3 Divide_401/fq_output_0" [label="[]", style=solid]; +"194 Constant_48390" -> "3 Divide_401/fq_output_0" [label="[]", style=solid]; +"195 Constant_48389" -> "3 Divide_401/fq_output_0" [label="[]", style=solid]; +"196 Constant_48388" -> "3 Divide_401/fq_output_0" [label="[]", style=solid]; "197 Gather_1785" -> "2 Divide_401" [label="[1, 3, 1, 1]", style=solid]; "198 Gather_1782" -> "1 Multiply_1715" [label="[1, 3, 1, 1]", style=solid]; -"199 Constant_121022" -> "27 /layer1/layer1.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"200 Constant_121021" -> "27 /layer1/layer1.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"201 Constant_121020" -> "27 /layer1/layer1.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"202 Constant_121019" -> "27 /layer1/layer1.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"199 Constant_48421" -> "27 /layer1/layer1.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"200 Constant_48420" -> "27 /layer1/layer1.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"201 Constant_48419" -> "27 /layer1/layer1.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"202 Constant_48418" -> "27 /layer1/layer1.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; "203 Reshape_88" -> "23 /layer1/layer1.0/conv2/Conv" [label="[1, 64, 1, 1]", style=solid]; "204 /layer1/layer1.0/conv2/Conv/WithoutBiases/fq_weights_1" -> "20 /layer1/layer1.0/conv2/Conv/WithoutBiases" [label="[64, 64, 3, 3]", style=solid]; -"205 Constant_121017" -> "204 /layer1/layer1.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"206 Constant_121016" -> "204 /layer1/layer1.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"207 Constant_121015" -> "204 /layer1/layer1.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"208 Constant_121014" -> "204 /layer1/layer1.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"205 Constant_48416" -> "204 /layer1/layer1.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"206 Constant_48415" -> "204 /layer1/layer1.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"207 Constant_48414" -> "204 /layer1/layer1.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"208 Constant_48413" -> "204 /layer1/layer1.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; "209 onnx^^Conv_199" -> "204 /layer1/layer1.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 64, 3, 3]", style=solid]; -"210 Constant_121012" -> "17 /layer1/layer1.0/relu/Relu/fq_output_0" [label="[]", style=solid]; -"211 Constant_121011" -> "17 /layer1/layer1.0/relu/Relu/fq_output_0" [label="[]", style=solid]; -"212 Constant_121010" -> "17 /layer1/layer1.0/relu/Relu/fq_output_0" [label="[]", style=solid]; -"213 Constant_121009" -> "17 /layer1/layer1.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"210 Constant_48411" -> "17 /layer1/layer1.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"211 Constant_48410" -> "17 /layer1/layer1.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"212 Constant_48409" -> "17 /layer1/layer1.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"213 Constant_48408" -> "17 /layer1/layer1.0/relu/Relu/fq_output_0" [label="[]", style=solid]; "214 Reshape_72" -> "12 /layer1/layer1.0/conv1/Conv" [label="[1, 64, 1, 1]", style=solid]; "215 /layer1/layer1.0/conv1/Conv/WithoutBiases/fq_weights_1" -> "10 /layer1/layer1.0/conv1/Conv/WithoutBiases" [label="[64, 64, 3, 3]", style=solid]; -"216 Constant_121007" -> "215 /layer1/layer1.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"217 Constant_121006" -> "215 /layer1/layer1.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"218 Constant_121005" -> "215 /layer1/layer1.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"219 Constant_121004" -> "215 /layer1/layer1.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"216 Constant_48406" -> "215 /layer1/layer1.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"217 Constant_48405" -> "215 /layer1/layer1.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"218 Constant_48404" -> "215 /layer1/layer1.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"219 Constant_48403" -> "215 /layer1/layer1.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; "220 onnx^^Conv_196" -> "215 /layer1/layer1.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 64, 3, 3]", style=solid]; -"221 Constant_121047" -> "36 /layer1/layer1.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"222 Constant_121046" -> "36 /layer1/layer1.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"223 Constant_121045" -> "36 /layer1/layer1.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"224 Constant_121044" -> "36 /layer1/layer1.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"221 Constant_48446" -> "36 /layer1/layer1.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"222 Constant_48445" -> "36 /layer1/layer1.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"223 Constant_48444" -> "36 /layer1/layer1.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"224 Constant_48443" -> "36 /layer1/layer1.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; "225 Reshape_121" -> "33 /layer1/layer1.1/conv2/Conv" [label="[1, 64, 1, 1]", style=solid]; "226 /layer1/layer1.1/conv2/Conv/WithoutBiases/fq_weights_1" -> "30 /layer1/layer1.1/conv2/Conv/WithoutBiases" [label="[64, 64, 3, 3]", style=solid]; -"227 Constant_121042" -> "226 /layer1/layer1.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"228 Constant_121041" -> "226 /layer1/layer1.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"229 Constant_121040" -> "226 /layer1/layer1.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"230 Constant_121039" -> "226 /layer1/layer1.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"227 Constant_48441" -> "226 /layer1/layer1.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"228 Constant_48440" -> "226 /layer1/layer1.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"229 Constant_48439" -> "226 /layer1/layer1.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"230 Constant_48438" -> "226 /layer1/layer1.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; "231 onnx^^Conv_205" -> "226 /layer1/layer1.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 64, 3, 3]", style=solid]; -"232 Constant_121037" -> "26 /layer1/layer1.1/relu/Relu/fq_output_0" [label="[]", style=solid]; -"233 Constant_121036" -> "26 /layer1/layer1.1/relu/Relu/fq_output_0" [label="[]", style=solid]; -"234 Constant_121035" -> "26 /layer1/layer1.1/relu/Relu/fq_output_0" [label="[]", style=solid]; -"235 Constant_121034" -> "26 /layer1/layer1.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"232 Constant_48436" -> "26 /layer1/layer1.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"233 Constant_48435" -> "26 /layer1/layer1.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"234 Constant_48434" -> "26 /layer1/layer1.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"235 Constant_48433" -> "26 /layer1/layer1.1/relu/Relu/fq_output_0" [label="[]", style=solid]; "236 Reshape_105" -> "19 /layer1/layer1.1/conv1/Conv" [label="[1, 64, 1, 1]", style=solid]; "237 /layer1/layer1.1/conv1/Conv/WithoutBiases/fq_weights_1" -> "16 /layer1/layer1.1/conv1/Conv/WithoutBiases" [label="[64, 64, 3, 3]", style=solid]; -"238 Constant_121032" -> "237 /layer1/layer1.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"239 Constant_121031" -> "237 /layer1/layer1.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"240 Constant_121030" -> "237 /layer1/layer1.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"241 Constant_121029" -> "237 /layer1/layer1.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"238 Constant_48431" -> "237 /layer1/layer1.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"239 Constant_48430" -> "237 /layer1/layer1.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"240 Constant_48429" -> "237 /layer1/layer1.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"241 Constant_48428" -> "237 /layer1/layer1.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; "242 onnx^^Conv_202" -> "237 /layer1/layer1.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 64, 3, 3]", style=solid]; -"243 Constant_121082" -> "41 /layer2/layer2.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"244 Constant_121081" -> "41 /layer2/layer2.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"245 Constant_121080" -> "41 /layer2/layer2.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"246 Constant_121079" -> "41 /layer2/layer2.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"243 Constant_48481" -> "41 /layer2/layer2.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"244 Constant_48480" -> "41 /layer2/layer2.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"245 Constant_48479" -> "41 /layer2/layer2.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"246 Constant_48478" -> "41 /layer2/layer2.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; "247 Reshape_154" -> "39 /layer2/layer2.0/conv2/Conv" [label="[1, 128, 1, 1]", style=solid]; "248 /layer2/layer2.0/conv2/Conv/WithoutBiases/fq_weights_1" -> "37 /layer2/layer2.0/conv2/Conv/WithoutBiases" [label="[128, 128, 3, 3]", style=solid]; -"249 Constant_121077" -> "248 /layer2/layer2.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"250 Constant_121076" -> "248 /layer2/layer2.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"251 Constant_121075" -> "248 /layer2/layer2.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"252 Constant_121074" -> "248 /layer2/layer2.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"249 Constant_48476" -> "248 /layer2/layer2.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"250 Constant_48475" -> "248 /layer2/layer2.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"251 Constant_48474" -> "248 /layer2/layer2.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"252 Constant_48473" -> "248 /layer2/layer2.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; "253 onnx^^Conv_211" -> "248 /layer2/layer2.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 128, 3, 3]", style=solid]; -"254 Constant_121072" -> "34 /layer2/layer2.0/relu/Relu/fq_output_0" [label="[]", style=solid]; -"255 Constant_121071" -> "34 /layer2/layer2.0/relu/Relu/fq_output_0" [label="[]", style=solid]; -"256 Constant_121070" -> "34 /layer2/layer2.0/relu/Relu/fq_output_0" [label="[]", style=solid]; -"257 Constant_121069" -> "34 /layer2/layer2.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"254 Constant_48471" -> "34 /layer2/layer2.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"255 Constant_48470" -> "34 /layer2/layer2.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"256 Constant_48469" -> "34 /layer2/layer2.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"257 Constant_48468" -> "34 /layer2/layer2.0/relu/Relu/fq_output_0" [label="[]", style=solid]; "258 Reshape_138" -> "28 /layer2/layer2.0/conv1/Conv" [label="[1, 128, 1, 1]", style=solid]; "259 /layer2/layer2.0/conv1/Conv/WithoutBiases/fq_weights_1" -> "24 /layer2/layer2.0/conv1/Conv/WithoutBiases" [label="[128, 64, 3, 3]", style=solid]; -"260 Constant_121067" -> "259 /layer2/layer2.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"261 Constant_121066" -> "259 /layer2/layer2.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"262 Constant_121065" -> "259 /layer2/layer2.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"263 Constant_121064" -> "259 /layer2/layer2.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"260 Constant_48466" -> "259 /layer2/layer2.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"261 Constant_48465" -> "259 /layer2/layer2.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"262 Constant_48464" -> "259 /layer2/layer2.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"263 Constant_48463" -> "259 /layer2/layer2.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; "264 onnx^^Conv_208" -> "259 /layer2/layer2.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 64, 3, 3]", style=solid]; -"265 Constant_121107" -> "59 /layer2/layer2.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"266 Constant_121106" -> "59 /layer2/layer2.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"267 Constant_121105" -> "59 /layer2/layer2.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"268 Constant_121104" -> "59 /layer2/layer2.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"265 Constant_48506" -> "59 /layer2/layer2.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"266 Constant_48505" -> "59 /layer2/layer2.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"267 Constant_48504" -> "59 /layer2/layer2.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"268 Constant_48503" -> "59 /layer2/layer2.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; "269 Reshape_202" -> "56 /layer2/layer2.1/conv2/Conv" [label="[1, 128, 1, 1]", style=solid]; "270 /layer2/layer2.1/conv2/Conv/WithoutBiases/fq_weights_1" -> "53 /layer2/layer2.1/conv2/Conv/WithoutBiases" [label="[128, 128, 3, 3]", style=solid]; -"271 Constant_121102" -> "270 /layer2/layer2.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"272 Constant_121101" -> "270 /layer2/layer2.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"273 Constant_121100" -> "270 /layer2/layer2.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"274 Constant_121099" -> "270 /layer2/layer2.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"271 Constant_48501" -> "270 /layer2/layer2.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"272 Constant_48500" -> "270 /layer2/layer2.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"273 Constant_48499" -> "270 /layer2/layer2.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"274 Constant_48498" -> "270 /layer2/layer2.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; "275 onnx^^Conv_220" -> "270 /layer2/layer2.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 128, 3, 3]", style=solid]; -"276 Constant_121097" -> "50 /layer2/layer2.1/relu/Relu/fq_output_0" [label="[]", style=solid]; -"277 Constant_121096" -> "50 /layer2/layer2.1/relu/Relu/fq_output_0" [label="[]", style=solid]; -"278 Constant_121095" -> "50 /layer2/layer2.1/relu/Relu/fq_output_0" [label="[]", style=solid]; -"279 Constant_121094" -> "50 /layer2/layer2.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"276 Constant_48496" -> "50 /layer2/layer2.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"277 Constant_48495" -> "50 /layer2/layer2.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"278 Constant_48494" -> "50 /layer2/layer2.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"279 Constant_48493" -> "50 /layer2/layer2.1/relu/Relu/fq_output_0" [label="[]", style=solid]; "280 Reshape_186" -> "45 /layer2/layer2.1/conv1/Conv" [label="[1, 128, 1, 1]", style=solid]; "281 /layer2/layer2.1/conv1/Conv/WithoutBiases/fq_weights_1" -> "43 /layer2/layer2.1/conv1/Conv/WithoutBiases" [label="[128, 128, 3, 3]", style=solid]; -"282 Constant_121092" -> "281 /layer2/layer2.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"283 Constant_121091" -> "281 /layer2/layer2.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"284 Constant_121090" -> "281 /layer2/layer2.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"285 Constant_121089" -> "281 /layer2/layer2.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"282 Constant_48491" -> "281 /layer2/layer2.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"283 Constant_48490" -> "281 /layer2/layer2.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"284 Constant_48489" -> "281 /layer2/layer2.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"285 Constant_48488" -> "281 /layer2/layer2.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; "286 onnx^^Conv_217" -> "281 /layer2/layer2.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 128, 3, 3]", style=solid]; -"287 Constant_121142" -> "64 /layer3/layer3.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"288 Constant_121141" -> "64 /layer3/layer3.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"289 Constant_121140" -> "64 /layer3/layer3.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"290 Constant_121139" -> "64 /layer3/layer3.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"287 Constant_48541" -> "64 /layer3/layer3.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"288 Constant_48540" -> "64 /layer3/layer3.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"289 Constant_48539" -> "64 /layer3/layer3.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"290 Constant_48538" -> "64 /layer3/layer3.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; "291 Reshape_235" -> "62 /layer3/layer3.0/conv2/Conv" [label="[1, 256, 1, 1]", style=solid]; "292 /layer3/layer3.0/conv2/Conv/WithoutBiases/fq_weights_1" -> "60 /layer3/layer3.0/conv2/Conv/WithoutBiases" [label="[256, 256, 3, 3]", style=solid]; -"293 Constant_121137" -> "292 /layer3/layer3.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"294 Constant_121136" -> "292 /layer3/layer3.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"295 Constant_121135" -> "292 /layer3/layer3.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"296 Constant_121134" -> "292 /layer3/layer3.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"293 Constant_48536" -> "292 /layer3/layer3.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"294 Constant_48535" -> "292 /layer3/layer3.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"295 Constant_48534" -> "292 /layer3/layer3.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"296 Constant_48533" -> "292 /layer3/layer3.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; "297 onnx^^Conv_226" -> "292 /layer3/layer3.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 256, 3, 3]", style=solid]; -"298 Constant_121132" -> "57 /layer3/layer3.0/relu/Relu/fq_output_0" [label="[]", style=solid]; -"299 Constant_121131" -> "57 /layer3/layer3.0/relu/Relu/fq_output_0" [label="[]", style=solid]; -"300 Constant_121130" -> "57 /layer3/layer3.0/relu/Relu/fq_output_0" [label="[]", style=solid]; -"301 Constant_121129" -> "57 /layer3/layer3.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"298 Constant_48531" -> "57 /layer3/layer3.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"299 Constant_48530" -> "57 /layer3/layer3.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"300 Constant_48529" -> "57 /layer3/layer3.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"301 Constant_48528" -> "57 /layer3/layer3.0/relu/Relu/fq_output_0" [label="[]", style=solid]; "302 Reshape_219" -> "51 /layer3/layer3.0/conv1/Conv" [label="[1, 256, 1, 1]", style=solid]; "303 /layer3/layer3.0/conv1/Conv/WithoutBiases/fq_weights_1" -> "48 /layer3/layer3.0/conv1/Conv/WithoutBiases" [label="[256, 128, 3, 3]", style=solid]; -"304 Constant_121127" -> "303 /layer3/layer3.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"305 Constant_121126" -> "303 /layer3/layer3.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"306 Constant_121125" -> "303 /layer3/layer3.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"307 Constant_121124" -> "303 /layer3/layer3.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"304 Constant_48526" -> "303 /layer3/layer3.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"305 Constant_48525" -> "303 /layer3/layer3.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"306 Constant_48524" -> "303 /layer3/layer3.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"307 Constant_48523" -> "303 /layer3/layer3.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; "308 onnx^^Conv_223" -> "303 /layer3/layer3.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 128, 3, 3]", style=solid]; -"309 Constant_121167" -> "82 /layer3/layer3.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"310 Constant_121166" -> "82 /layer3/layer3.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"311 Constant_121165" -> "82 /layer3/layer3.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"312 Constant_121164" -> "82 /layer3/layer3.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"309 Constant_48566" -> "82 /layer3/layer3.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"310 Constant_48565" -> "82 /layer3/layer3.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"311 Constant_48564" -> "82 /layer3/layer3.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"312 Constant_48563" -> "82 /layer3/layer3.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; "313 Reshape_283" -> "79 /layer3/layer3.1/conv2/Conv" [label="[1, 256, 1, 1]", style=solid]; "314 /layer3/layer3.1/conv2/Conv/WithoutBiases/fq_weights_1" -> "76 /layer3/layer3.1/conv2/Conv/WithoutBiases" [label="[256, 256, 3, 3]", style=solid]; -"315 Constant_121162" -> "314 /layer3/layer3.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"316 Constant_121161" -> "314 /layer3/layer3.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"317 Constant_121160" -> "314 /layer3/layer3.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"318 Constant_121159" -> "314 /layer3/layer3.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"315 Constant_48561" -> "314 /layer3/layer3.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"316 Constant_48560" -> "314 /layer3/layer3.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"317 Constant_48559" -> "314 /layer3/layer3.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"318 Constant_48558" -> "314 /layer3/layer3.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; "319 onnx^^Conv_235" -> "314 /layer3/layer3.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 256, 3, 3]", style=solid]; -"320 Constant_121157" -> "73 /layer3/layer3.1/relu/Relu/fq_output_0" [label="[]", style=solid]; -"321 Constant_121156" -> "73 /layer3/layer3.1/relu/Relu/fq_output_0" [label="[]", style=solid]; -"322 Constant_121155" -> "73 /layer3/layer3.1/relu/Relu/fq_output_0" [label="[]", style=solid]; -"323 Constant_121154" -> "73 /layer3/layer3.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"320 Constant_48556" -> "73 /layer3/layer3.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"321 Constant_48555" -> "73 /layer3/layer3.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"322 Constant_48554" -> "73 /layer3/layer3.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"323 Constant_48553" -> "73 /layer3/layer3.1/relu/Relu/fq_output_0" [label="[]", style=solid]; "324 Reshape_267" -> "68 /layer3/layer3.1/conv1/Conv" [label="[1, 256, 1, 1]", style=solid]; "325 /layer3/layer3.1/conv1/Conv/WithoutBiases/fq_weights_1" -> "66 /layer3/layer3.1/conv1/Conv/WithoutBiases" [label="[256, 256, 3, 3]", style=solid]; -"326 Constant_121152" -> "325 /layer3/layer3.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"327 Constant_121151" -> "325 /layer3/layer3.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"328 Constant_121150" -> "325 /layer3/layer3.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"329 Constant_121149" -> "325 /layer3/layer3.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"326 Constant_48551" -> "325 /layer3/layer3.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"327 Constant_48550" -> "325 /layer3/layer3.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"328 Constant_48549" -> "325 /layer3/layer3.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"329 Constant_48548" -> "325 /layer3/layer3.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; "330 onnx^^Conv_232" -> "325 /layer3/layer3.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 256, 3, 3]", style=solid]; -"331 Constant_121202" -> "87 /layer4/layer4.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"332 Constant_121201" -> "87 /layer4/layer4.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"333 Constant_121200" -> "87 /layer4/layer4.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"334 Constant_121199" -> "87 /layer4/layer4.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"331 Constant_48601" -> "87 /layer4/layer4.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"332 Constant_48600" -> "87 /layer4/layer4.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"333 Constant_48599" -> "87 /layer4/layer4.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"334 Constant_48598" -> "87 /layer4/layer4.0/conv2/Conv/fq_output_0" [label="[]", style=solid]; "335 Reshape_316" -> "85 /layer4/layer4.0/conv2/Conv" [label="[1, 512, 1, 1]", style=solid]; "336 /layer4/layer4.0/conv2/Conv/WithoutBiases/fq_weights_1" -> "83 /layer4/layer4.0/conv2/Conv/WithoutBiases" [label="[512, 512, 3, 3]", style=solid]; -"337 Constant_121197" -> "336 /layer4/layer4.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"338 Constant_121196" -> "336 /layer4/layer4.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"339 Constant_121195" -> "336 /layer4/layer4.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"340 Constant_121194" -> "336 /layer4/layer4.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"337 Constant_48596" -> "336 /layer4/layer4.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"338 Constant_48595" -> "336 /layer4/layer4.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"339 Constant_48594" -> "336 /layer4/layer4.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"340 Constant_48593" -> "336 /layer4/layer4.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; "341 onnx^^Conv_241" -> "336 /layer4/layer4.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 512, 3, 3]", style=solid]; -"342 Constant_121192" -> "80 /layer4/layer4.0/relu/Relu/fq_output_0" [label="[]", style=solid]; -"343 Constant_121191" -> "80 /layer4/layer4.0/relu/Relu/fq_output_0" [label="[]", style=solid]; -"344 Constant_121190" -> "80 /layer4/layer4.0/relu/Relu/fq_output_0" [label="[]", style=solid]; -"345 Constant_121189" -> "80 /layer4/layer4.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"342 Constant_48591" -> "80 /layer4/layer4.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"343 Constant_48590" -> "80 /layer4/layer4.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"344 Constant_48589" -> "80 /layer4/layer4.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"345 Constant_48588" -> "80 /layer4/layer4.0/relu/Relu/fq_output_0" [label="[]", style=solid]; "346 Reshape_300" -> "74 /layer4/layer4.0/conv1/Conv" [label="[1, 512, 1, 1]", style=solid]; "347 /layer4/layer4.0/conv1/Conv/WithoutBiases/fq_weights_1" -> "71 /layer4/layer4.0/conv1/Conv/WithoutBiases" [label="[512, 256, 3, 3]", style=solid]; -"348 Constant_121187" -> "347 /layer4/layer4.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"349 Constant_121186" -> "347 /layer4/layer4.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"350 Constant_121185" -> "347 /layer4/layer4.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"351 Constant_121184" -> "347 /layer4/layer4.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"348 Constant_48586" -> "347 /layer4/layer4.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"349 Constant_48585" -> "347 /layer4/layer4.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"350 Constant_48584" -> "347 /layer4/layer4.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"351 Constant_48583" -> "347 /layer4/layer4.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; "352 onnx^^Conv_238" -> "347 /layer4/layer4.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 256, 3, 3]", style=solid]; -"353 Constant_121227" -> "101 /layer4/layer4.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"354 Constant_121226" -> "101 /layer4/layer4.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"355 Constant_121225" -> "101 /layer4/layer4.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; -"356 Constant_121224" -> "101 /layer4/layer4.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"353 Constant_48626" -> "101 /layer4/layer4.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"354 Constant_48625" -> "101 /layer4/layer4.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"355 Constant_48624" -> "101 /layer4/layer4.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; +"356 Constant_48623" -> "101 /layer4/layer4.1/conv2/Conv/fq_output_0" [label="[]", style=solid]; "357 Reshape_364" -> "99 /layer4/layer4.1/conv2/Conv" [label="[1, 512, 1, 1]", style=solid]; "358 /layer4/layer4.1/conv2/Conv/WithoutBiases/fq_weights_1" -> "97 /layer4/layer4.1/conv2/Conv/WithoutBiases" [label="[512, 512, 3, 3]", style=solid]; -"359 Constant_121222" -> "358 /layer4/layer4.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"360 Constant_121221" -> "358 /layer4/layer4.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"361 Constant_121220" -> "358 /layer4/layer4.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"362 Constant_121219" -> "358 /layer4/layer4.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"359 Constant_48621" -> "358 /layer4/layer4.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"360 Constant_48620" -> "358 /layer4/layer4.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"361 Constant_48619" -> "358 /layer4/layer4.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"362 Constant_48618" -> "358 /layer4/layer4.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; "363 onnx^^Conv_250" -> "358 /layer4/layer4.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 512, 3, 3]", style=solid]; -"364 Constant_121217" -> "95 /layer4/layer4.1/relu/Relu/fq_output_0" [label="[]", style=solid]; -"365 Constant_121216" -> "95 /layer4/layer4.1/relu/Relu/fq_output_0" [label="[]", style=solid]; -"366 Constant_121215" -> "95 /layer4/layer4.1/relu/Relu/fq_output_0" [label="[]", style=solid]; -"367 Constant_121214" -> "95 /layer4/layer4.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"364 Constant_48616" -> "95 /layer4/layer4.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"365 Constant_48615" -> "95 /layer4/layer4.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"366 Constant_48614" -> "95 /layer4/layer4.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"367 Constant_48613" -> "95 /layer4/layer4.1/relu/Relu/fq_output_0" [label="[]", style=solid]; "368 Reshape_348" -> "91 /layer4/layer4.1/conv1/Conv" [label="[1, 512, 1, 1]", style=solid]; "369 /layer4/layer4.1/conv1/Conv/WithoutBiases/fq_weights_1" -> "89 /layer4/layer4.1/conv1/Conv/WithoutBiases" [label="[512, 512, 3, 3]", style=solid]; -"370 Constant_121212" -> "369 /layer4/layer4.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"371 Constant_121211" -> "369 /layer4/layer4.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"372 Constant_121210" -> "369 /layer4/layer4.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"373 Constant_121209" -> "369 /layer4/layer4.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"370 Constant_48611" -> "369 /layer4/layer4.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"371 Constant_48610" -> "369 /layer4/layer4.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"372 Constant_48609" -> "369 /layer4/layer4.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"373 Constant_48608" -> "369 /layer4/layer4.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; "374 onnx^^Conv_247" -> "369 /layer4/layer4.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 512, 3, 3]", style=solid]; } diff --git a/tests/openvino/native/data/reference_graphs/quantized/resnet-50-pytorch_performance_CPU_SPR.dot b/tests/openvino/native/data/reference_graphs/quantized/resnet-50-pytorch_performance_CPU_SPR.dot new file mode 100644 index 00000000000..e318d57f89b --- /dev/null +++ b/tests/openvino/native/data/reference_graphs/quantized/resnet-50-pytorch_performance_CPU_SPR.dot @@ -0,0 +1,1691 @@ +strict digraph { +"0 data" [id=0, type=Parameter]; +"1 Multiply_2745" [id=1, type=Multiply]; +"2 Divide_1002" [id=2, type=Add]; +"3 Divide_1002/fq_output_0" [id=3, type=FakeQuantize]; +"4 /conv1/Conv/WithoutBiases" [id=4, type=Convolution]; +"5 /conv1/Conv" [id=5, type=Add]; +"6 /relu/Relu" [id=6, type=Relu]; +"7 /relu/Relu/fq_output_0" [id=7, type=FakeQuantize]; +"8 /maxpool/MaxPool" [id=8, type=MaxPool]; +"9 /layer1/layer1.0/conv1/Conv/WithoutBiases" [id=9, type=Convolution]; +"10 /layer1/layer1.0/downsample/downsample.0/Conv/WithoutBiases" [id=10, type=Convolution]; +"11 /layer1/layer1.0/conv1/Conv" [id=11, type=Add]; +"12 /layer1/layer1.0/downsample/downsample.0/Conv" [id=12, type=Add]; +"13 /layer1/layer1.0/relu/Relu" [id=13, type=Relu]; +"14 /layer1/layer1.0/Add" [id=14, type=Add]; +"15 /layer1/layer1.0/relu/Relu/fq_output_0" [id=15, type=FakeQuantize]; +"16 /layer1/layer1.0/relu_2/Relu" [id=16, type=Relu]; +"17 /layer1/layer1.0/conv2/Conv/WithoutBiases" [id=17, type=Convolution]; +"18 /layer1/layer1.0/relu_2/Relu/fq_output_0" [id=18, type=FakeQuantize]; +"19 /layer1/layer1.0/conv2/Conv" [id=19, type=Add]; +"20 /layer1/layer1.1/Add" [id=20, type=Add]; +"21 /layer1/layer1.1/conv1/Conv/WithoutBiases" [id=21, type=Convolution]; +"22 /layer1/layer1.0/relu_1/Relu" [id=22, type=Relu]; +"23 /layer1/layer1.1/relu_2/Relu" [id=23, type=Relu]; +"24 /layer1/layer1.1/conv1/Conv" [id=24, type=Add]; +"25 /layer1/layer1.0/relu_1/Relu/fq_output_0" [id=25, type=FakeQuantize]; +"26 /layer1/layer1.1/relu_2/Relu/fq_output_0" [id=26, type=FakeQuantize]; +"27 /layer1/layer1.1/relu/Relu" [id=27, type=Relu]; +"28 /layer1/layer1.0/conv3/Conv/WithoutBiases" [id=28, type=Convolution]; +"29 /layer1/layer1.2/Add" [id=29, type=Add]; +"30 /layer1/layer1.2/conv1/Conv/WithoutBiases" [id=30, type=Convolution]; +"31 /layer1/layer1.1/relu/Relu/fq_output_0" [id=31, type=FakeQuantize]; +"32 /layer1/layer1.0/conv3/Conv" [id=32, type=Add]; +"33 /layer1/layer1.2/relu_2/Relu" [id=33, type=Relu]; +"34 /layer1/layer1.2/conv1/Conv" [id=34, type=Add]; +"35 /layer1/layer1.1/conv2/Conv/WithoutBiases" [id=35, type=Convolution]; +"36 /layer1/layer1.0/conv3/Conv/fq_output_0" [id=36, type=FakeQuantize]; +"37 /layer1/layer1.2/relu_2/Relu/fq_output_0" [id=37, type=FakeQuantize]; +"38 /layer1/layer1.2/relu/Relu" [id=38, type=Relu]; +"39 /layer1/layer1.1/conv2/Conv" [id=39, type=Add]; +"40 /layer2/layer2.0/conv1/Conv/WithoutBiases" [id=40, type=Convolution]; +"41 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases" [id=41, type=Convolution]; +"42 /layer1/layer1.2/relu/Relu/fq_output_0" [id=42, type=FakeQuantize]; +"43 /layer1/layer1.1/relu_1/Relu" [id=43, type=Relu]; +"44 /layer2/layer2.0/conv1/Conv" [id=44, type=Add]; +"45 /layer2/layer2.0/downsample/downsample.0/Conv" [id=45, type=Add]; +"46 /layer1/layer1.2/conv2/Conv/WithoutBiases" [id=46, type=Convolution]; +"47 /layer1/layer1.1/relu_1/Relu/fq_output_0" [id=47, type=FakeQuantize]; +"48 /layer2/layer2.0/relu/Relu" [id=48, type=Relu]; +"49 /layer2/layer2.0/Add" [id=49, type=Add]; +"50 /layer1/layer1.2/conv2/Conv" [id=50, type=Add]; +"51 /layer1/layer1.1/conv3/Conv/WithoutBiases" [id=51, type=Convolution]; +"52 /layer2/layer2.0/relu/Relu/fq_output_0" [id=52, type=FakeQuantize]; +"53 /layer2/layer2.0/relu_2/Relu" [id=53, type=Relu]; +"54 /layer1/layer1.2/relu_1/Relu" [id=54, type=Relu]; +"55 /layer1/layer1.1/conv3/Conv" [id=55, type=Add]; +"56 /layer2/layer2.0/conv2/Conv/WithoutBiases" [id=56, type=Convolution]; +"57 /layer2/layer2.0/relu_2/Relu/fq_output_0" [id=57, type=FakeQuantize]; +"58 /layer1/layer1.2/relu_1/Relu/fq_output_0" [id=58, type=FakeQuantize]; +"59 /layer2/layer2.0/conv2/Conv" [id=59, type=Add]; +"60 /layer2/layer2.1/Add" [id=60, type=Add]; +"61 /layer2/layer2.1/conv1/Conv/WithoutBiases" [id=61, type=Convolution]; +"62 /layer1/layer1.2/conv3/Conv/WithoutBiases" [id=62, type=Convolution]; +"63 /layer2/layer2.0/relu_1/Relu" [id=63, type=Relu]; +"64 /layer2/layer2.1/relu_2/Relu" [id=64, type=Relu]; +"65 /layer2/layer2.1/conv1/Conv" [id=65, type=Add]; +"66 /layer1/layer1.2/conv3/Conv" [id=66, type=Add]; +"67 /layer2/layer2.0/relu_1/Relu/fq_output_0" [id=67, type=FakeQuantize]; +"68 /layer2/layer2.1/relu_2/Relu/fq_output_0" [id=68, type=FakeQuantize]; +"69 /layer2/layer2.1/relu/Relu" [id=69, type=Relu]; +"70 /layer2/layer2.0/conv3/Conv/WithoutBiases" [id=70, type=Convolution]; +"71 /layer2/layer2.2/Add" [id=71, type=Add]; +"72 /layer2/layer2.2/conv1/Conv/WithoutBiases" [id=72, type=Convolution]; +"73 /layer2/layer2.1/relu/Relu/fq_output_0" [id=73, type=FakeQuantize]; +"74 /layer2/layer2.0/conv3/Conv" [id=74, type=Add]; +"75 /layer2/layer2.2/relu_2/Relu" [id=75, type=Relu]; +"76 /layer2/layer2.2/conv1/Conv" [id=76, type=Add]; +"77 /layer2/layer2.1/conv2/Conv/WithoutBiases" [id=77, type=Convolution]; +"78 /layer2/layer2.0/conv3/Conv/fq_output_0" [id=78, type=FakeQuantize]; +"79 /layer2/layer2.2/relu_2/Relu/fq_output_0" [id=79, type=FakeQuantize]; +"80 /layer2/layer2.2/relu/Relu" [id=80, type=Relu]; +"81 /layer2/layer2.1/conv2/Conv" [id=81, type=Add]; +"82 /layer2/layer2.3/Add" [id=82, type=Add]; +"83 /layer2/layer2.3/conv1/Conv/WithoutBiases" [id=83, type=Convolution]; +"84 /layer2/layer2.2/relu/Relu/fq_output_0" [id=84, type=FakeQuantize]; +"85 /layer2/layer2.1/relu_1/Relu" [id=85, type=Relu]; +"86 /layer2/layer2.3/relu_2/Relu" [id=86, type=Relu]; +"87 /layer2/layer2.3/conv1/Conv" [id=87, type=Add]; +"88 /layer2/layer2.2/conv2/Conv/WithoutBiases" [id=88, type=Convolution]; +"89 /layer2/layer2.1/relu_1/Relu/fq_output_0" [id=89, type=FakeQuantize]; +"90 /layer2/layer2.3/relu_2/Relu/fq_output_0" [id=90, type=FakeQuantize]; +"91 /layer2/layer2.3/relu/Relu" [id=91, type=Relu]; +"92 /layer2/layer2.2/conv2/Conv" [id=92, type=Add]; +"93 /layer2/layer2.1/conv3/Conv/WithoutBiases" [id=93, type=Convolution]; +"94 /layer3/layer3.0/conv1/Conv/WithoutBiases" [id=94, type=Convolution]; +"95 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases" [id=95, type=Convolution]; +"96 /layer2/layer2.3/relu/Relu/fq_output_0" [id=96, type=FakeQuantize]; +"97 /layer2/layer2.2/relu_1/Relu" [id=97, type=Relu]; +"98 /layer2/layer2.1/conv3/Conv" [id=98, type=Add]; +"99 /layer3/layer3.0/conv1/Conv" [id=99, type=Add]; +"100 /layer3/layer3.0/downsample/downsample.0/Conv" [id=100, type=Add]; +"101 /layer2/layer2.3/conv2/Conv/WithoutBiases" [id=101, type=Convolution]; +"102 /layer2/layer2.2/relu_1/Relu/fq_output_0" [id=102, type=FakeQuantize]; +"103 /layer3/layer3.0/relu/Relu" [id=103, type=Relu]; +"104 /layer3/layer3.0/Add" [id=104, type=Add]; +"105 /layer2/layer2.3/conv2/Conv" [id=105, type=Add]; +"106 /layer2/layer2.2/conv3/Conv/WithoutBiases" [id=106, type=Convolution]; +"107 /layer3/layer3.0/relu/Relu/fq_output_0" [id=107, type=FakeQuantize]; +"108 /layer3/layer3.0/relu_2/Relu" [id=108, type=Relu]; +"109 /layer2/layer2.3/relu_1/Relu" [id=109, type=Relu]; +"110 /layer2/layer2.2/conv3/Conv" [id=110, type=Add]; +"111 /layer3/layer3.0/conv2/Conv/WithoutBiases" [id=111, type=Convolution]; +"112 /layer3/layer3.0/relu_2/Relu/fq_output_0" [id=112, type=FakeQuantize]; +"113 /layer2/layer2.3/relu_1/Relu/fq_output_0" [id=113, type=FakeQuantize]; +"114 /layer3/layer3.0/conv2/Conv" [id=114, type=Add]; +"115 /layer3/layer3.1/Add" [id=115, type=Add]; +"116 /layer3/layer3.1/conv1/Conv/WithoutBiases" [id=116, type=Convolution]; +"117 /layer2/layer2.3/conv3/Conv/WithoutBiases" [id=117, type=Convolution]; +"118 /layer3/layer3.0/relu_1/Relu" [id=118, type=Relu]; +"119 /layer3/layer3.1/relu_2/Relu" [id=119, type=Relu]; +"120 /layer3/layer3.1/conv1/Conv" [id=120, type=Add]; +"121 /layer2/layer2.3/conv3/Conv" [id=121, type=Add]; +"122 /layer3/layer3.0/relu_1/Relu/fq_output_0" [id=122, type=FakeQuantize]; +"123 /layer3/layer3.1/relu_2/Relu/fq_output_0" [id=123, type=FakeQuantize]; +"124 /layer3/layer3.1/relu/Relu" [id=124, type=Relu]; +"125 /layer3/layer3.0/conv3/Conv/WithoutBiases" [id=125, type=Convolution]; +"126 /layer3/layer3.2/Add" [id=126, type=Add]; +"127 /layer3/layer3.2/conv1/Conv/WithoutBiases" [id=127, type=Convolution]; +"128 /layer3/layer3.1/relu/Relu/fq_output_0" [id=128, type=FakeQuantize]; +"129 /layer3/layer3.0/conv3/Conv" [id=129, type=Add]; +"130 /layer3/layer3.2/relu_2/Relu" [id=130, type=Relu]; +"131 /layer3/layer3.2/conv1/Conv" [id=131, type=Add]; +"132 /layer3/layer3.1/conv2/Conv/WithoutBiases" [id=132, type=Convolution]; +"133 /layer3/layer3.0/conv3/Conv/fq_output_0" [id=133, type=FakeQuantize]; +"134 /layer3/layer3.2/relu_2/Relu/fq_output_0" [id=134, type=FakeQuantize]; +"135 /layer3/layer3.2/relu/Relu" [id=135, type=Relu]; +"136 /layer3/layer3.1/conv2/Conv" [id=136, type=Add]; +"137 /layer3/layer3.3/Add" [id=137, type=Add]; +"138 /layer3/layer3.3/conv1/Conv/WithoutBiases" [id=138, type=Convolution]; +"139 /layer3/layer3.2/relu/Relu/fq_output_0" [id=139, type=FakeQuantize]; +"140 /layer3/layer3.1/relu_1/Relu" [id=140, type=Relu]; +"141 /layer3/layer3.3/relu_2/Relu" [id=141, type=Relu]; +"142 /layer3/layer3.3/conv1/Conv" [id=142, type=Add]; +"143 /layer3/layer3.2/conv2/Conv/WithoutBiases" [id=143, type=Convolution]; +"144 /layer3/layer3.1/relu_1/Relu/fq_output_0" [id=144, type=FakeQuantize]; +"145 /layer3/layer3.3/relu_2/Relu/fq_output_0" [id=145, type=FakeQuantize]; +"146 /layer3/layer3.3/relu/Relu" [id=146, type=Relu]; +"147 /layer3/layer3.2/conv2/Conv" [id=147, type=Add]; +"148 /layer3/layer3.1/conv3/Conv/WithoutBiases" [id=148, type=Convolution]; +"149 /layer3/layer3.4/Add" [id=149, type=Add]; +"150 /layer3/layer3.4/conv1/Conv/WithoutBiases" [id=150, type=Convolution]; +"151 /layer3/layer3.3/relu/Relu/fq_output_0" [id=151, type=FakeQuantize]; +"152 /layer3/layer3.2/relu_1/Relu" [id=152, type=Relu]; +"153 /layer3/layer3.1/conv3/Conv" [id=153, type=Add]; +"154 /layer3/layer3.4/relu_2/Relu" [id=154, type=Relu]; +"155 /layer3/layer3.4/conv1/Conv" [id=155, type=Add]; +"156 /layer3/layer3.3/conv2/Conv/WithoutBiases" [id=156, type=Convolution]; +"157 /layer3/layer3.2/relu_1/Relu/fq_output_0" [id=157, type=FakeQuantize]; +"158 /layer3/layer3.4/relu_2/Relu/fq_output_0" [id=158, type=FakeQuantize]; +"159 /layer3/layer3.4/relu/Relu" [id=159, type=Relu]; +"160 /layer3/layer3.3/conv2/Conv" [id=160, type=Add]; +"161 /layer3/layer3.2/conv3/Conv/WithoutBiases" [id=161, type=Convolution]; +"162 /layer3/layer3.5/Add" [id=162, type=Add]; +"163 /layer3/layer3.5/conv1/Conv/WithoutBiases" [id=163, type=Convolution]; +"164 /layer3/layer3.4/relu/Relu/fq_output_0" [id=164, type=FakeQuantize]; +"165 /layer3/layer3.3/relu_1/Relu" [id=165, type=Relu]; +"166 /layer3/layer3.2/conv3/Conv" [id=166, type=Add]; +"167 /layer3/layer3.5/relu_2/Relu" [id=167, type=Relu]; +"168 /layer3/layer3.5/conv1/Conv" [id=168, type=Add]; +"169 /layer3/layer3.4/conv2/Conv/WithoutBiases" [id=169, type=Convolution]; +"170 /layer3/layer3.3/relu_1/Relu/fq_output_0" [id=170, type=FakeQuantize]; +"171 /layer3/layer3.5/relu_2/Relu/fq_output_0" [id=171, type=FakeQuantize]; +"172 /layer3/layer3.5/relu/Relu" [id=172, type=Relu]; +"173 /layer3/layer3.4/conv2/Conv" [id=173, type=Add]; +"174 /layer3/layer3.3/conv3/Conv/WithoutBiases" [id=174, type=Convolution]; +"175 /layer4/layer4.0/conv1/Conv/WithoutBiases" [id=175, type=Convolution]; +"176 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases" [id=176, type=Convolution]; +"177 /layer3/layer3.5/relu/Relu/fq_output_0" [id=177, type=FakeQuantize]; +"178 /layer3/layer3.4/relu_1/Relu" [id=178, type=Relu]; +"179 /layer3/layer3.3/conv3/Conv" [id=179, type=Add]; +"180 /layer4/layer4.0/conv1/Conv" [id=180, type=Add]; +"181 /layer4/layer4.0/downsample/downsample.0/Conv" [id=181, type=Add]; +"182 /layer3/layer3.5/conv2/Conv/WithoutBiases" [id=182, type=Convolution]; +"183 /layer3/layer3.4/relu_1/Relu/fq_output_0" [id=183, type=FakeQuantize]; +"184 /layer4/layer4.0/relu/Relu" [id=184, type=Relu]; +"185 /layer4/layer4.0/Add" [id=185, type=Add]; +"186 /layer3/layer3.5/conv2/Conv" [id=186, type=Add]; +"187 /layer3/layer3.4/conv3/Conv/WithoutBiases" [id=187, type=Convolution]; +"188 /layer4/layer4.0/relu/Relu/fq_output_0" [id=188, type=FakeQuantize]; +"189 /layer4/layer4.0/relu_2/Relu" [id=189, type=Relu]; +"190 /layer3/layer3.5/relu_1/Relu" [id=190, type=Relu]; +"191 /layer3/layer3.4/conv3/Conv" [id=191, type=Add]; +"192 /layer4/layer4.0/conv2/Conv/WithoutBiases" [id=192, type=Convolution]; +"193 /layer4/layer4.0/relu_2/Relu/fq_output_0" [id=193, type=FakeQuantize]; +"194 /layer3/layer3.5/relu_1/Relu/fq_output_0" [id=194, type=FakeQuantize]; +"195 /layer4/layer4.0/conv2/Conv" [id=195, type=Add]; +"196 /layer4/layer4.1/Add" [id=196, type=Add]; +"197 /layer4/layer4.1/conv1/Conv/WithoutBiases" [id=197, type=Convolution]; +"198 /layer3/layer3.5/conv3/Conv/WithoutBiases" [id=198, type=Convolution]; +"199 /layer4/layer4.0/relu_1/Relu" [id=199, type=Relu]; +"200 /layer4/layer4.1/relu_2/Relu" [id=200, type=Relu]; +"201 /layer4/layer4.1/conv1/Conv" [id=201, type=Add]; +"202 /layer3/layer3.5/conv3/Conv" [id=202, type=Add]; +"203 /layer4/layer4.0/relu_1/Relu/fq_output_0" [id=203, type=FakeQuantize]; +"204 /layer4/layer4.1/relu_2/Relu/fq_output_0" [id=204, type=FakeQuantize]; +"205 /layer4/layer4.1/relu/Relu" [id=205, type=Relu]; +"206 /layer4/layer4.0/conv3/Conv/WithoutBiases" [id=206, type=Convolution]; +"207 /layer4/layer4.2/Add" [id=207, type=Add]; +"208 /layer4/layer4.2/conv1/Conv/WithoutBiases" [id=208, type=Convolution]; +"209 /layer4/layer4.1/relu/Relu/fq_output_0" [id=209, type=FakeQuantize]; +"210 /layer4/layer4.0/conv3/Conv" [id=210, type=Add]; +"211 /layer4/layer4.2/relu_2/Relu" [id=211, type=Relu]; +"212 /layer4/layer4.2/conv1/Conv" [id=212, type=Add]; +"213 /layer4/layer4.1/conv2/Conv/WithoutBiases" [id=213, type=Convolution]; +"214 /layer4/layer4.0/conv3/Conv/fq_output_0" [id=214, type=FakeQuantize]; +"215 /layer4/layer4.2/relu_2/Relu/fq_output_0" [id=215, type=FakeQuantize]; +"216 /layer4/layer4.2/relu/Relu" [id=216, type=Relu]; +"217 /layer4/layer4.1/conv2/Conv" [id=217, type=Add]; +"218 /avgpool/GlobalAveragePool" [id=218, type=ReduceMean]; +"219 /layer4/layer4.2/relu/Relu/fq_output_0" [id=219, type=FakeQuantize]; +"220 /layer4/layer4.1/relu_1/Relu" [id=220, type=Relu]; +"221 /avgpool/GlobalAveragePool/fq_output_0" [id=221, type=FakeQuantize]; +"222 /layer4/layer4.2/conv2/Conv/WithoutBiases" [id=222, type=Convolution]; +"223 /layer4/layer4.1/relu_1/Relu/fq_output_0" [id=223, type=FakeQuantize]; +"224 /Flatten" [id=224, type=Reshape]; +"225 /layer4/layer4.2/conv2/Conv" [id=225, type=Add]; +"226 /layer4/layer4.1/conv3/Conv/WithoutBiases" [id=226, type=Convolution]; +"227 /fc/Gemm/WithoutBiases" [id=227, type=MatMul]; +"228 /layer4/layer4.2/relu_1/Relu" [id=228, type=Relu]; +"229 /layer4/layer4.1/conv3/Conv" [id=229, type=Add]; +"230 prob" [id=230, type=Add]; +"231 /layer4/layer4.2/relu_1/Relu/fq_output_0" [id=231, type=FakeQuantize]; +"232 prob/sink_port_0" [id=232, type=Result]; +"233 /layer4/layer4.2/conv3/Conv/WithoutBiases" [id=233, type=Convolution]; +"234 /layer4/layer4.2/conv3/Conv" [id=234, type=Add]; +"235 Constant_2819" [id=235, type=Constant]; +"236 /fc/Gemm/WithoutBiases/fq_weights_1" [id=236, type=FakeQuantize]; +"237 Constant_62386" [id=237, type=Constant]; +"238 Constant_62385" [id=238, type=Constant]; +"239 Constant_62384" [id=239, type=Constant]; +"240 Constant_62383" [id=240, type=Constant]; +"241 fc.weight" [id=241, type=Constant]; +"242 Constant_981" [id=242, type=Constant]; +"243 Constant_62381" [id=243, type=Constant]; +"244 Constant_62380" [id=244, type=Constant]; +"245 Constant_62379" [id=245, type=Constant]; +"246 Constant_62378" [id=246, type=Constant]; +"247 Range_977" [id=247, type=Constant]; +"248 Constant_62376" [id=248, type=Constant]; +"249 Constant_62375" [id=249, type=Constant]; +"250 Constant_62374" [id=250, type=Constant]; +"251 Constant_62373" [id=251, type=Constant]; +"252 Constant_62341" [id=252, type=Constant]; +"253 Constant_62340" [id=253, type=Constant]; +"254 Constant_62339" [id=254, type=Constant]; +"255 Constant_62338" [id=255, type=Constant]; +"256 Constant_62306" [id=256, type=Constant]; +"257 Constant_62305" [id=257, type=Constant]; +"258 Constant_62304" [id=258, type=Constant]; +"259 Constant_62303" [id=259, type=Constant]; +"260 Reshape_867" [id=260, type=Constant]; +"261 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [id=261, type=FakeQuantize]; +"262 Constant_62266" [id=262, type=Constant]; +"263 Constant_62265" [id=263, type=Constant]; +"264 Constant_62264" [id=264, type=Constant]; +"265 Constant_62263" [id=265, type=Constant]; +"266 onnx^^Conv_635" [id=266, label="266 onnx::Conv_635", type=Constant]; +"267 Constant_62261" [id=267, type=Constant]; +"268 Constant_62260" [id=268, type=Constant]; +"269 Constant_62259" [id=269, type=Constant]; +"270 Constant_62258" [id=270, type=Constant]; +"271 Constant_62226" [id=271, type=Constant]; +"272 Constant_62225" [id=272, type=Constant]; +"273 Constant_62224" [id=273, type=Constant]; +"274 Constant_62223" [id=274, type=Constant]; +"275 Constant_62191" [id=275, type=Constant]; +"276 Constant_62190" [id=276, type=Constant]; +"277 Constant_62189" [id=277, type=Constant]; +"278 Constant_62188" [id=278, type=Constant]; +"279 Constant_62156" [id=279, type=Constant]; +"280 Constant_62155" [id=280, type=Constant]; +"281 Constant_62154" [id=281, type=Constant]; +"282 Constant_62153" [id=282, type=Constant]; +"283 Constant_62121" [id=283, type=Constant]; +"284 Constant_62120" [id=284, type=Constant]; +"285 Constant_62119" [id=285, type=Constant]; +"286 Constant_62118" [id=286, type=Constant]; +"287 Constant_62086" [id=287, type=Constant]; +"288 Constant_62085" [id=288, type=Constant]; +"289 Constant_62084" [id=289, type=Constant]; +"290 Constant_62083" [id=290, type=Constant]; +"291 Reshape_558" [id=291, type=Constant]; +"292 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [id=292, type=FakeQuantize]; +"293 Constant_62046" [id=293, type=Constant]; +"294 Constant_62045" [id=294, type=Constant]; +"295 Constant_62044" [id=295, type=Constant]; +"296 Constant_62043" [id=296, type=Constant]; +"297 onnx^^Conv_578" [id=297, label="297 onnx::Conv_578", type=Constant]; +"298 Constant_62041" [id=298, type=Constant]; +"299 Constant_62040" [id=299, type=Constant]; +"300 Constant_62039" [id=300, type=Constant]; +"301 Constant_62038" [id=301, type=Constant]; +"302 Constant_62006" [id=302, type=Constant]; +"303 Constant_62005" [id=303, type=Constant]; +"304 Constant_62004" [id=304, type=Constant]; +"305 Constant_62003" [id=305, type=Constant]; +"306 Constant_61971" [id=306, type=Constant]; +"307 Constant_61970" [id=307, type=Constant]; +"308 Constant_61969" [id=308, type=Constant]; +"309 Constant_61968" [id=309, type=Constant]; +"310 Constant_61936" [id=310, type=Constant]; +"311 Constant_61935" [id=311, type=Constant]; +"312 Constant_61934" [id=312, type=Constant]; +"313 Constant_61933" [id=313, type=Constant]; +"314 Reshape_347" [id=314, type=Constant]; +"315 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [id=315, type=FakeQuantize]; +"316 Constant_61896" [id=316, type=Constant]; +"317 Constant_61895" [id=317, type=Constant]; +"318 Constant_61894" [id=318, type=Constant]; +"319 Constant_61893" [id=319, type=Constant]; +"320 onnx^^Conv_539" [id=320, label="320 onnx::Conv_539", type=Constant]; +"321 Constant_61891" [id=321, type=Constant]; +"322 Constant_61890" [id=322, type=Constant]; +"323 Constant_61889" [id=323, type=Constant]; +"324 Constant_61888" [id=324, type=Constant]; +"325 Constant_61856" [id=325, type=Constant]; +"326 Constant_61855" [id=326, type=Constant]; +"327 Constant_61854" [id=327, type=Constant]; +"328 Constant_61853" [id=328, type=Constant]; +"329 Constant_61821" [id=329, type=Constant]; +"330 Constant_61820" [id=330, type=Constant]; +"331 Constant_61819" [id=331, type=Constant]; +"332 Constant_61818" [id=332, type=Constant]; +"333 Reshape_185" [id=333, type=Constant]; +"334 /layer1/layer1.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [id=334, type=FakeQuantize]; +"335 Constant_61781" [id=335, type=Constant]; +"336 Constant_61780" [id=336, type=Constant]; +"337 Constant_61779" [id=337, type=Constant]; +"338 Constant_61778" [id=338, type=Constant]; +"339 onnx^^Conv_509" [id=339, label="339 onnx::Conv_509", type=Constant]; +"340 Constant_61776" [id=340, type=Constant]; +"341 Constant_61775" [id=341, type=Constant]; +"342 Constant_61774" [id=342, type=Constant]; +"343 Constant_61773" [id=343, type=Constant]; +"344 Reshape_121" [id=344, type=Constant]; +"345 /conv1/Conv/WithoutBiases/fq_weights_1" [id=345, type=FakeQuantize]; +"346 Constant_61771" [id=346, type=Constant]; +"347 Constant_61770" [id=347, type=Constant]; +"348 Constant_61769" [id=348, type=Constant]; +"349 Constant_61768" [id=349, type=Constant]; +"350 Gather_2818" [id=350, type=Constant]; +"351 Constant_61766" [id=351, type=Constant]; +"352 Constant_61765" [id=352, type=Constant]; +"353 Constant_61764" [id=353, type=Constant]; +"354 Constant_61763" [id=354, type=Constant]; +"355 Gather_2815" [id=355, type=Constant]; +"356 Gather_2812" [id=356, type=Constant]; +"357 Constant_61816" [id=357, type=Constant]; +"358 Constant_61815" [id=358, type=Constant]; +"359 Constant_61814" [id=359, type=Constant]; +"360 Constant_61813" [id=360, type=Constant]; +"361 Reshape_170" [id=361, type=Constant]; +"362 /layer1/layer1.0/conv3/Conv/WithoutBiases/fq_weights_1" [id=362, type=FakeQuantize]; +"363 Constant_61811" [id=363, type=Constant]; +"364 Constant_61810" [id=364, type=Constant]; +"365 Constant_61809" [id=365, type=Constant]; +"366 Constant_61808" [id=366, type=Constant]; +"367 onnx^^Conv_506" [id=367, label="367 onnx::Conv_506", type=Constant]; +"368 Constant_61806" [id=368, type=Constant]; +"369 Constant_61805" [id=369, type=Constant]; +"370 Constant_61804" [id=370, type=Constant]; +"371 Constant_61803" [id=371, type=Constant]; +"372 Reshape_154" [id=372, type=Constant]; +"373 /layer1/layer1.0/conv2/Conv/WithoutBiases/fq_weights_1" [id=373, type=FakeQuantize]; +"374 Constant_61801" [id=374, type=Constant]; +"375 Constant_61800" [id=375, type=Constant]; +"376 Constant_61799" [id=376, type=Constant]; +"377 Constant_61798" [id=377, type=Constant]; +"378 onnx^^Conv_503" [id=378, label="378 onnx::Conv_503", type=Constant]; +"379 Constant_61796" [id=379, type=Constant]; +"380 Constant_61795" [id=380, type=Constant]; +"381 Constant_61794" [id=381, type=Constant]; +"382 Constant_61793" [id=382, type=Constant]; +"383 Reshape_138" [id=383, type=Constant]; +"384 /layer1/layer1.0/conv1/Conv/WithoutBiases/fq_weights_1" [id=384, type=FakeQuantize]; +"385 Constant_61791" [id=385, type=Constant]; +"386 Constant_61790" [id=386, type=Constant]; +"387 Constant_61789" [id=387, type=Constant]; +"388 Constant_61788" [id=388, type=Constant]; +"389 onnx^^Conv_500" [id=389, label="389 onnx::Conv_500", type=Constant]; +"390 Reshape_234" [id=390, type=Constant]; +"391 /layer1/layer1.1/conv3/Conv/WithoutBiases/fq_weights_1" [id=391, type=FakeQuantize]; +"392 Constant_61846" [id=392, type=Constant]; +"393 Constant_61845" [id=393, type=Constant]; +"394 Constant_61844" [id=394, type=Constant]; +"395 Constant_61843" [id=395, type=Constant]; +"396 onnx^^Conv_518" [id=396, label="396 onnx::Conv_518", type=Constant]; +"397 Constant_61841" [id=397, type=Constant]; +"398 Constant_61840" [id=398, type=Constant]; +"399 Constant_61839" [id=399, type=Constant]; +"400 Constant_61838" [id=400, type=Constant]; +"401 Reshape_218" [id=401, type=Constant]; +"402 /layer1/layer1.1/conv2/Conv/WithoutBiases/fq_weights_1" [id=402, type=FakeQuantize]; +"403 Constant_61836" [id=403, type=Constant]; +"404 Constant_61835" [id=404, type=Constant]; +"405 Constant_61834" [id=405, type=Constant]; +"406 Constant_61833" [id=406, type=Constant]; +"407 onnx^^Conv_515" [id=407, label="407 onnx::Conv_515", type=Constant]; +"408 Constant_61831" [id=408, type=Constant]; +"409 Constant_61830" [id=409, type=Constant]; +"410 Constant_61829" [id=410, type=Constant]; +"411 Constant_61828" [id=411, type=Constant]; +"412 Reshape_202" [id=412, type=Constant]; +"413 /layer1/layer1.1/conv1/Conv/WithoutBiases/fq_weights_1" [id=413, type=FakeQuantize]; +"414 Constant_61826" [id=414, type=Constant]; +"415 Constant_61825" [id=415, type=Constant]; +"416 Constant_61824" [id=416, type=Constant]; +"417 Constant_61823" [id=417, type=Constant]; +"418 onnx^^Conv_512" [id=418, label="418 onnx::Conv_512", type=Constant]; +"419 Reshape_283" [id=419, type=Constant]; +"420 /layer1/layer1.2/conv3/Conv/WithoutBiases/fq_weights_1" [id=420, type=FakeQuantize]; +"421 Constant_61881" [id=421, type=Constant]; +"422 Constant_61880" [id=422, type=Constant]; +"423 Constant_61879" [id=423, type=Constant]; +"424 Constant_61878" [id=424, type=Constant]; +"425 onnx^^Conv_527" [id=425, label="425 onnx::Conv_527", type=Constant]; +"426 Constant_61876" [id=426, type=Constant]; +"427 Constant_61875" [id=427, type=Constant]; +"428 Constant_61874" [id=428, type=Constant]; +"429 Constant_61873" [id=429, type=Constant]; +"430 Reshape_267" [id=430, type=Constant]; +"431 /layer1/layer1.2/conv2/Conv/WithoutBiases/fq_weights_1" [id=431, type=FakeQuantize]; +"432 Constant_61871" [id=432, type=Constant]; +"433 Constant_61870" [id=433, type=Constant]; +"434 Constant_61869" [id=434, type=Constant]; +"435 Constant_61868" [id=435, type=Constant]; +"436 onnx^^Conv_524" [id=436, label="436 onnx::Conv_524", type=Constant]; +"437 Constant_61866" [id=437, type=Constant]; +"438 Constant_61865" [id=438, type=Constant]; +"439 Constant_61864" [id=439, type=Constant]; +"440 Constant_61863" [id=440, type=Constant]; +"441 Reshape_251" [id=441, type=Constant]; +"442 /layer1/layer1.2/conv1/Conv/WithoutBiases/fq_weights_1" [id=442, type=FakeQuantize]; +"443 Constant_61861" [id=443, type=Constant]; +"444 Constant_61860" [id=444, type=Constant]; +"445 Constant_61859" [id=445, type=Constant]; +"446 Constant_61858" [id=446, type=Constant]; +"447 onnx^^Conv_521" [id=447, label="447 onnx::Conv_521", type=Constant]; +"448 Constant_61931" [id=448, type=Constant]; +"449 Constant_61930" [id=449, type=Constant]; +"450 Constant_61929" [id=450, type=Constant]; +"451 Constant_61928" [id=451, type=Constant]; +"452 Reshape_332" [id=452, type=Constant]; +"453 /layer2/layer2.0/conv3/Conv/WithoutBiases/fq_weights_1" [id=453, type=FakeQuantize]; +"454 Constant_61926" [id=454, type=Constant]; +"455 Constant_61925" [id=455, type=Constant]; +"456 Constant_61924" [id=456, type=Constant]; +"457 Constant_61923" [id=457, type=Constant]; +"458 onnx^^Conv_536" [id=458, label="458 onnx::Conv_536", type=Constant]; +"459 Constant_61921" [id=459, type=Constant]; +"460 Constant_61920" [id=460, type=Constant]; +"461 Constant_61919" [id=461, type=Constant]; +"462 Constant_61918" [id=462, type=Constant]; +"463 Reshape_316" [id=463, type=Constant]; +"464 /layer2/layer2.0/conv2/Conv/WithoutBiases/fq_weights_1" [id=464, type=FakeQuantize]; +"465 Constant_61916" [id=465, type=Constant]; +"466 Constant_61915" [id=466, type=Constant]; +"467 Constant_61914" [id=467, type=Constant]; +"468 Constant_61913" [id=468, type=Constant]; +"469 onnx^^Conv_533" [id=469, label="469 onnx::Conv_533", type=Constant]; +"470 Constant_61911" [id=470, type=Constant]; +"471 Constant_61910" [id=471, type=Constant]; +"472 Constant_61909" [id=472, type=Constant]; +"473 Constant_61908" [id=473, type=Constant]; +"474 Reshape_300" [id=474, type=Constant]; +"475 /layer2/layer2.0/conv1/Conv/WithoutBiases/fq_weights_1" [id=475, type=FakeQuantize]; +"476 Constant_61906" [id=476, type=Constant]; +"477 Constant_61905" [id=477, type=Constant]; +"478 Constant_61904" [id=478, type=Constant]; +"479 Constant_61903" [id=479, type=Constant]; +"480 onnx^^Conv_530" [id=480, label="480 onnx::Conv_530", type=Constant]; +"481 Reshape_396" [id=481, type=Constant]; +"482 /layer2/layer2.1/conv3/Conv/WithoutBiases/fq_weights_1" [id=482, type=FakeQuantize]; +"483 Constant_61961" [id=483, type=Constant]; +"484 Constant_61960" [id=484, type=Constant]; +"485 Constant_61959" [id=485, type=Constant]; +"486 Constant_61958" [id=486, type=Constant]; +"487 onnx^^Conv_548" [id=487, label="487 onnx::Conv_548", type=Constant]; +"488 Constant_61956" [id=488, type=Constant]; +"489 Constant_61955" [id=489, type=Constant]; +"490 Constant_61954" [id=490, type=Constant]; +"491 Constant_61953" [id=491, type=Constant]; +"492 Reshape_380" [id=492, type=Constant]; +"493 /layer2/layer2.1/conv2/Conv/WithoutBiases/fq_weights_1" [id=493, type=FakeQuantize]; +"494 Constant_61951" [id=494, type=Constant]; +"495 Constant_61950" [id=495, type=Constant]; +"496 Constant_61949" [id=496, type=Constant]; +"497 Constant_61948" [id=497, type=Constant]; +"498 onnx^^Conv_545" [id=498, label="498 onnx::Conv_545", type=Constant]; +"499 Constant_61946" [id=499, type=Constant]; +"500 Constant_61945" [id=500, type=Constant]; +"501 Constant_61944" [id=501, type=Constant]; +"502 Constant_61943" [id=502, type=Constant]; +"503 Reshape_364" [id=503, type=Constant]; +"504 /layer2/layer2.1/conv1/Conv/WithoutBiases/fq_weights_1" [id=504, type=FakeQuantize]; +"505 Constant_61941" [id=505, type=Constant]; +"506 Constant_61940" [id=506, type=Constant]; +"507 Constant_61939" [id=507, type=Constant]; +"508 Constant_61938" [id=508, type=Constant]; +"509 onnx^^Conv_542" [id=509, label="509 onnx::Conv_542", type=Constant]; +"510 Reshape_445" [id=510, type=Constant]; +"511 /layer2/layer2.2/conv3/Conv/WithoutBiases/fq_weights_1" [id=511, type=FakeQuantize]; +"512 Constant_61996" [id=512, type=Constant]; +"513 Constant_61995" [id=513, type=Constant]; +"514 Constant_61994" [id=514, type=Constant]; +"515 Constant_61993" [id=515, type=Constant]; +"516 onnx^^Conv_557" [id=516, label="516 onnx::Conv_557", type=Constant]; +"517 Constant_61991" [id=517, type=Constant]; +"518 Constant_61990" [id=518, type=Constant]; +"519 Constant_61989" [id=519, type=Constant]; +"520 Constant_61988" [id=520, type=Constant]; +"521 Reshape_429" [id=521, type=Constant]; +"522 /layer2/layer2.2/conv2/Conv/WithoutBiases/fq_weights_1" [id=522, type=FakeQuantize]; +"523 Constant_61986" [id=523, type=Constant]; +"524 Constant_61985" [id=524, type=Constant]; +"525 Constant_61984" [id=525, type=Constant]; +"526 Constant_61983" [id=526, type=Constant]; +"527 onnx^^Conv_554" [id=527, label="527 onnx::Conv_554", type=Constant]; +"528 Constant_61981" [id=528, type=Constant]; +"529 Constant_61980" [id=529, type=Constant]; +"530 Constant_61979" [id=530, type=Constant]; +"531 Constant_61978" [id=531, type=Constant]; +"532 Reshape_413" [id=532, type=Constant]; +"533 /layer2/layer2.2/conv1/Conv/WithoutBiases/fq_weights_1" [id=533, type=FakeQuantize]; +"534 Constant_61976" [id=534, type=Constant]; +"535 Constant_61975" [id=535, type=Constant]; +"536 Constant_61974" [id=536, type=Constant]; +"537 Constant_61973" [id=537, type=Constant]; +"538 onnx^^Conv_551" [id=538, label="538 onnx::Conv_551", type=Constant]; +"539 Reshape_494" [id=539, type=Constant]; +"540 /layer2/layer2.3/conv3/Conv/WithoutBiases/fq_weights_1" [id=540, type=FakeQuantize]; +"541 Constant_62031" [id=541, type=Constant]; +"542 Constant_62030" [id=542, type=Constant]; +"543 Constant_62029" [id=543, type=Constant]; +"544 Constant_62028" [id=544, type=Constant]; +"545 onnx^^Conv_566" [id=545, label="545 onnx::Conv_566", type=Constant]; +"546 Constant_62026" [id=546, type=Constant]; +"547 Constant_62025" [id=547, type=Constant]; +"548 Constant_62024" [id=548, type=Constant]; +"549 Constant_62023" [id=549, type=Constant]; +"550 Reshape_478" [id=550, type=Constant]; +"551 /layer2/layer2.3/conv2/Conv/WithoutBiases/fq_weights_1" [id=551, type=FakeQuantize]; +"552 Constant_62021" [id=552, type=Constant]; +"553 Constant_62020" [id=553, type=Constant]; +"554 Constant_62019" [id=554, type=Constant]; +"555 Constant_62018" [id=555, type=Constant]; +"556 onnx^^Conv_563" [id=556, label="556 onnx::Conv_563", type=Constant]; +"557 Constant_62016" [id=557, type=Constant]; +"558 Constant_62015" [id=558, type=Constant]; +"559 Constant_62014" [id=559, type=Constant]; +"560 Constant_62013" [id=560, type=Constant]; +"561 Reshape_462" [id=561, type=Constant]; +"562 /layer2/layer2.3/conv1/Conv/WithoutBiases/fq_weights_1" [id=562, type=FakeQuantize]; +"563 Constant_62011" [id=563, type=Constant]; +"564 Constant_62010" [id=564, type=Constant]; +"565 Constant_62009" [id=565, type=Constant]; +"566 Constant_62008" [id=566, type=Constant]; +"567 onnx^^Conv_560" [id=567, label="567 onnx::Conv_560", type=Constant]; +"568 Constant_62081" [id=568, type=Constant]; +"569 Constant_62080" [id=569, type=Constant]; +"570 Constant_62079" [id=570, type=Constant]; +"571 Constant_62078" [id=571, type=Constant]; +"572 Reshape_543" [id=572, type=Constant]; +"573 /layer3/layer3.0/conv3/Conv/WithoutBiases/fq_weights_1" [id=573, type=FakeQuantize]; +"574 Constant_62076" [id=574, type=Constant]; +"575 Constant_62075" [id=575, type=Constant]; +"576 Constant_62074" [id=576, type=Constant]; +"577 Constant_62073" [id=577, type=Constant]; +"578 onnx^^Conv_575" [id=578, label="578 onnx::Conv_575", type=Constant]; +"579 Constant_62071" [id=579, type=Constant]; +"580 Constant_62070" [id=580, type=Constant]; +"581 Constant_62069" [id=581, type=Constant]; +"582 Constant_62068" [id=582, type=Constant]; +"583 Reshape_527" [id=583, type=Constant]; +"584 /layer3/layer3.0/conv2/Conv/WithoutBiases/fq_weights_1" [id=584, type=FakeQuantize]; +"585 Constant_62066" [id=585, type=Constant]; +"586 Constant_62065" [id=586, type=Constant]; +"587 Constant_62064" [id=587, type=Constant]; +"588 Constant_62063" [id=588, type=Constant]; +"589 onnx^^Conv_572" [id=589, label="589 onnx::Conv_572", type=Constant]; +"590 Constant_62061" [id=590, type=Constant]; +"591 Constant_62060" [id=591, type=Constant]; +"592 Constant_62059" [id=592, type=Constant]; +"593 Constant_62058" [id=593, type=Constant]; +"594 Reshape_511" [id=594, type=Constant]; +"595 /layer3/layer3.0/conv1/Conv/WithoutBiases/fq_weights_1" [id=595, type=FakeQuantize]; +"596 Constant_62056" [id=596, type=Constant]; +"597 Constant_62055" [id=597, type=Constant]; +"598 Constant_62054" [id=598, type=Constant]; +"599 Constant_62053" [id=599, type=Constant]; +"600 onnx^^Conv_569" [id=600, label="600 onnx::Conv_569", type=Constant]; +"601 Reshape_607" [id=601, type=Constant]; +"602 /layer3/layer3.1/conv3/Conv/WithoutBiases/fq_weights_1" [id=602, type=FakeQuantize]; +"603 Constant_62111" [id=603, type=Constant]; +"604 Constant_62110" [id=604, type=Constant]; +"605 Constant_62109" [id=605, type=Constant]; +"606 Constant_62108" [id=606, type=Constant]; +"607 onnx^^Conv_587" [id=607, label="607 onnx::Conv_587", type=Constant]; +"608 Constant_62106" [id=608, type=Constant]; +"609 Constant_62105" [id=609, type=Constant]; +"610 Constant_62104" [id=610, type=Constant]; +"611 Constant_62103" [id=611, type=Constant]; +"612 Reshape_591" [id=612, type=Constant]; +"613 /layer3/layer3.1/conv2/Conv/WithoutBiases/fq_weights_1" [id=613, type=FakeQuantize]; +"614 Constant_62101" [id=614, type=Constant]; +"615 Constant_62100" [id=615, type=Constant]; +"616 Constant_62099" [id=616, type=Constant]; +"617 Constant_62098" [id=617, type=Constant]; +"618 onnx^^Conv_584" [id=618, label="618 onnx::Conv_584", type=Constant]; +"619 Constant_62096" [id=619, type=Constant]; +"620 Constant_62095" [id=620, type=Constant]; +"621 Constant_62094" [id=621, type=Constant]; +"622 Constant_62093" [id=622, type=Constant]; +"623 Reshape_575" [id=623, type=Constant]; +"624 /layer3/layer3.1/conv1/Conv/WithoutBiases/fq_weights_1" [id=624, type=FakeQuantize]; +"625 Constant_62091" [id=625, type=Constant]; +"626 Constant_62090" [id=626, type=Constant]; +"627 Constant_62089" [id=627, type=Constant]; +"628 Constant_62088" [id=628, type=Constant]; +"629 onnx^^Conv_581" [id=629, label="629 onnx::Conv_581", type=Constant]; +"630 Reshape_656" [id=630, type=Constant]; +"631 /layer3/layer3.2/conv3/Conv/WithoutBiases/fq_weights_1" [id=631, type=FakeQuantize]; +"632 Constant_62146" [id=632, type=Constant]; +"633 Constant_62145" [id=633, type=Constant]; +"634 Constant_62144" [id=634, type=Constant]; +"635 Constant_62143" [id=635, type=Constant]; +"636 onnx^^Conv_596" [id=636, label="636 onnx::Conv_596", type=Constant]; +"637 Constant_62141" [id=637, type=Constant]; +"638 Constant_62140" [id=638, type=Constant]; +"639 Constant_62139" [id=639, type=Constant]; +"640 Constant_62138" [id=640, type=Constant]; +"641 Reshape_640" [id=641, type=Constant]; +"642 /layer3/layer3.2/conv2/Conv/WithoutBiases/fq_weights_1" [id=642, type=FakeQuantize]; +"643 Constant_62136" [id=643, type=Constant]; +"644 Constant_62135" [id=644, type=Constant]; +"645 Constant_62134" [id=645, type=Constant]; +"646 Constant_62133" [id=646, type=Constant]; +"647 onnx^^Conv_593" [id=647, label="647 onnx::Conv_593", type=Constant]; +"648 Constant_62131" [id=648, type=Constant]; +"649 Constant_62130" [id=649, type=Constant]; +"650 Constant_62129" [id=650, type=Constant]; +"651 Constant_62128" [id=651, type=Constant]; +"652 Reshape_624" [id=652, type=Constant]; +"653 /layer3/layer3.2/conv1/Conv/WithoutBiases/fq_weights_1" [id=653, type=FakeQuantize]; +"654 Constant_62126" [id=654, type=Constant]; +"655 Constant_62125" [id=655, type=Constant]; +"656 Constant_62124" [id=656, type=Constant]; +"657 Constant_62123" [id=657, type=Constant]; +"658 onnx^^Conv_590" [id=658, label="658 onnx::Conv_590", type=Constant]; +"659 Reshape_705" [id=659, type=Constant]; +"660 /layer3/layer3.3/conv3/Conv/WithoutBiases/fq_weights_1" [id=660, type=FakeQuantize]; +"661 Constant_62181" [id=661, type=Constant]; +"662 Constant_62180" [id=662, type=Constant]; +"663 Constant_62179" [id=663, type=Constant]; +"664 Constant_62178" [id=664, type=Constant]; +"665 onnx^^Conv_605" [id=665, label="665 onnx::Conv_605", type=Constant]; +"666 Constant_62176" [id=666, type=Constant]; +"667 Constant_62175" [id=667, type=Constant]; +"668 Constant_62174" [id=668, type=Constant]; +"669 Constant_62173" [id=669, type=Constant]; +"670 Reshape_689" [id=670, type=Constant]; +"671 /layer3/layer3.3/conv2/Conv/WithoutBiases/fq_weights_1" [id=671, type=FakeQuantize]; +"672 Constant_62171" [id=672, type=Constant]; +"673 Constant_62170" [id=673, type=Constant]; +"674 Constant_62169" [id=674, type=Constant]; +"675 Constant_62168" [id=675, type=Constant]; +"676 onnx^^Conv_602" [id=676, label="676 onnx::Conv_602", type=Constant]; +"677 Constant_62166" [id=677, type=Constant]; +"678 Constant_62165" [id=678, type=Constant]; +"679 Constant_62164" [id=679, type=Constant]; +"680 Constant_62163" [id=680, type=Constant]; +"681 Reshape_673" [id=681, type=Constant]; +"682 /layer3/layer3.3/conv1/Conv/WithoutBiases/fq_weights_1" [id=682, type=FakeQuantize]; +"683 Constant_62161" [id=683, type=Constant]; +"684 Constant_62160" [id=684, type=Constant]; +"685 Constant_62159" [id=685, type=Constant]; +"686 Constant_62158" [id=686, type=Constant]; +"687 onnx^^Conv_599" [id=687, label="687 onnx::Conv_599", type=Constant]; +"688 Reshape_754" [id=688, type=Constant]; +"689 /layer3/layer3.4/conv3/Conv/WithoutBiases/fq_weights_1" [id=689, type=FakeQuantize]; +"690 Constant_62216" [id=690, type=Constant]; +"691 Constant_62215" [id=691, type=Constant]; +"692 Constant_62214" [id=692, type=Constant]; +"693 Constant_62213" [id=693, type=Constant]; +"694 onnx^^Conv_614" [id=694, label="694 onnx::Conv_614", type=Constant]; +"695 Constant_62211" [id=695, type=Constant]; +"696 Constant_62210" [id=696, type=Constant]; +"697 Constant_62209" [id=697, type=Constant]; +"698 Constant_62208" [id=698, type=Constant]; +"699 Reshape_738" [id=699, type=Constant]; +"700 /layer3/layer3.4/conv2/Conv/WithoutBiases/fq_weights_1" [id=700, type=FakeQuantize]; +"701 Constant_62206" [id=701, type=Constant]; +"702 Constant_62205" [id=702, type=Constant]; +"703 Constant_62204" [id=703, type=Constant]; +"704 Constant_62203" [id=704, type=Constant]; +"705 onnx^^Conv_611" [id=705, label="705 onnx::Conv_611", type=Constant]; +"706 Constant_62201" [id=706, type=Constant]; +"707 Constant_62200" [id=707, type=Constant]; +"708 Constant_62199" [id=708, type=Constant]; +"709 Constant_62198" [id=709, type=Constant]; +"710 Reshape_722" [id=710, type=Constant]; +"711 /layer3/layer3.4/conv1/Conv/WithoutBiases/fq_weights_1" [id=711, type=FakeQuantize]; +"712 Constant_62196" [id=712, type=Constant]; +"713 Constant_62195" [id=713, type=Constant]; +"714 Constant_62194" [id=714, type=Constant]; +"715 Constant_62193" [id=715, type=Constant]; +"716 onnx^^Conv_608" [id=716, label="716 onnx::Conv_608", type=Constant]; +"717 Reshape_803" [id=717, type=Constant]; +"718 /layer3/layer3.5/conv3/Conv/WithoutBiases/fq_weights_1" [id=718, type=FakeQuantize]; +"719 Constant_62251" [id=719, type=Constant]; +"720 Constant_62250" [id=720, type=Constant]; +"721 Constant_62249" [id=721, type=Constant]; +"722 Constant_62248" [id=722, type=Constant]; +"723 onnx^^Conv_623" [id=723, label="723 onnx::Conv_623", type=Constant]; +"724 Constant_62246" [id=724, type=Constant]; +"725 Constant_62245" [id=725, type=Constant]; +"726 Constant_62244" [id=726, type=Constant]; +"727 Constant_62243" [id=727, type=Constant]; +"728 Reshape_787" [id=728, type=Constant]; +"729 /layer3/layer3.5/conv2/Conv/WithoutBiases/fq_weights_1" [id=729, type=FakeQuantize]; +"730 Constant_62241" [id=730, type=Constant]; +"731 Constant_62240" [id=731, type=Constant]; +"732 Constant_62239" [id=732, type=Constant]; +"733 Constant_62238" [id=733, type=Constant]; +"734 onnx^^Conv_620" [id=734, label="734 onnx::Conv_620", type=Constant]; +"735 Constant_62236" [id=735, type=Constant]; +"736 Constant_62235" [id=736, type=Constant]; +"737 Constant_62234" [id=737, type=Constant]; +"738 Constant_62233" [id=738, type=Constant]; +"739 Reshape_771" [id=739, type=Constant]; +"740 /layer3/layer3.5/conv1/Conv/WithoutBiases/fq_weights_1" [id=740, type=FakeQuantize]; +"741 Constant_62231" [id=741, type=Constant]; +"742 Constant_62230" [id=742, type=Constant]; +"743 Constant_62229" [id=743, type=Constant]; +"744 Constant_62228" [id=744, type=Constant]; +"745 onnx^^Conv_617" [id=745, label="745 onnx::Conv_617", type=Constant]; +"746 Constant_62301" [id=746, type=Constant]; +"747 Constant_62300" [id=747, type=Constant]; +"748 Constant_62299" [id=748, type=Constant]; +"749 Constant_62298" [id=749, type=Constant]; +"750 Reshape_852" [id=750, type=Constant]; +"751 /layer4/layer4.0/conv3/Conv/WithoutBiases/fq_weights_1" [id=751, type=FakeQuantize]; +"752 Constant_62296" [id=752, type=Constant]; +"753 Constant_62295" [id=753, type=Constant]; +"754 Constant_62294" [id=754, type=Constant]; +"755 Constant_62293" [id=755, type=Constant]; +"756 onnx^^Conv_632" [id=756, label="756 onnx::Conv_632", type=Constant]; +"757 Constant_62291" [id=757, type=Constant]; +"758 Constant_62290" [id=758, type=Constant]; +"759 Constant_62289" [id=759, type=Constant]; +"760 Constant_62288" [id=760, type=Constant]; +"761 Reshape_836" [id=761, type=Constant]; +"762 /layer4/layer4.0/conv2/Conv/WithoutBiases/fq_weights_1" [id=762, type=FakeQuantize]; +"763 Constant_62286" [id=763, type=Constant]; +"764 Constant_62285" [id=764, type=Constant]; +"765 Constant_62284" [id=765, type=Constant]; +"766 Constant_62283" [id=766, type=Constant]; +"767 onnx^^Conv_629" [id=767, label="767 onnx::Conv_629", type=Constant]; +"768 Constant_62281" [id=768, type=Constant]; +"769 Constant_62280" [id=769, type=Constant]; +"770 Constant_62279" [id=770, type=Constant]; +"771 Constant_62278" [id=771, type=Constant]; +"772 Reshape_820" [id=772, type=Constant]; +"773 /layer4/layer4.0/conv1/Conv/WithoutBiases/fq_weights_1" [id=773, type=FakeQuantize]; +"774 Constant_62276" [id=774, type=Constant]; +"775 Constant_62275" [id=775, type=Constant]; +"776 Constant_62274" [id=776, type=Constant]; +"777 Constant_62273" [id=777, type=Constant]; +"778 onnx^^Conv_626" [id=778, label="778 onnx::Conv_626", type=Constant]; +"779 Reshape_916" [id=779, type=Constant]; +"780 /layer4/layer4.1/conv3/Conv/WithoutBiases/fq_weights_1" [id=780, type=FakeQuantize]; +"781 Constant_62331" [id=781, type=Constant]; +"782 Constant_62330" [id=782, type=Constant]; +"783 Constant_62329" [id=783, type=Constant]; +"784 Constant_62328" [id=784, type=Constant]; +"785 onnx^^Conv_644" [id=785, label="785 onnx::Conv_644", type=Constant]; +"786 Constant_62326" [id=786, type=Constant]; +"787 Constant_62325" [id=787, type=Constant]; +"788 Constant_62324" [id=788, type=Constant]; +"789 Constant_62323" [id=789, type=Constant]; +"790 Reshape_900" [id=790, type=Constant]; +"791 /layer4/layer4.1/conv2/Conv/WithoutBiases/fq_weights_1" [id=791, type=FakeQuantize]; +"792 Constant_62321" [id=792, type=Constant]; +"793 Constant_62320" [id=793, type=Constant]; +"794 Constant_62319" [id=794, type=Constant]; +"795 Constant_62318" [id=795, type=Constant]; +"796 onnx^^Conv_641" [id=796, label="796 onnx::Conv_641", type=Constant]; +"797 Constant_62316" [id=797, type=Constant]; +"798 Constant_62315" [id=798, type=Constant]; +"799 Constant_62314" [id=799, type=Constant]; +"800 Constant_62313" [id=800, type=Constant]; +"801 Reshape_884" [id=801, type=Constant]; +"802 /layer4/layer4.1/conv1/Conv/WithoutBiases/fq_weights_1" [id=802, type=FakeQuantize]; +"803 Constant_62311" [id=803, type=Constant]; +"804 Constant_62310" [id=804, type=Constant]; +"805 Constant_62309" [id=805, type=Constant]; +"806 Constant_62308" [id=806, type=Constant]; +"807 onnx^^Conv_638" [id=807, label="807 onnx::Conv_638", type=Constant]; +"808 Reshape_965" [id=808, type=Constant]; +"809 /layer4/layer4.2/conv3/Conv/WithoutBiases/fq_weights_1" [id=809, type=FakeQuantize]; +"810 Constant_62366" [id=810, type=Constant]; +"811 Constant_62365" [id=811, type=Constant]; +"812 Constant_62364" [id=812, type=Constant]; +"813 Constant_62363" [id=813, type=Constant]; +"814 onnx^^Conv_653" [id=814, label="814 onnx::Conv_653", type=Constant]; +"815 Constant_62361" [id=815, type=Constant]; +"816 Constant_62360" [id=816, type=Constant]; +"817 Constant_62359" [id=817, type=Constant]; +"818 Constant_62358" [id=818, type=Constant]; +"819 Reshape_949" [id=819, type=Constant]; +"820 /layer4/layer4.2/conv2/Conv/WithoutBiases/fq_weights_1" [id=820, type=FakeQuantize]; +"821 Constant_62356" [id=821, type=Constant]; +"822 Constant_62355" [id=822, type=Constant]; +"823 Constant_62354" [id=823, type=Constant]; +"824 Constant_62353" [id=824, type=Constant]; +"825 onnx^^Conv_650" [id=825, label="825 onnx::Conv_650", type=Constant]; +"826 Constant_62351" [id=826, type=Constant]; +"827 Constant_62350" [id=827, type=Constant]; +"828 Constant_62349" [id=828, type=Constant]; +"829 Constant_62348" [id=829, type=Constant]; +"830 Reshape_933" [id=830, type=Constant]; +"831 /layer4/layer4.2/conv1/Conv/WithoutBiases/fq_weights_1" [id=831, type=FakeQuantize]; +"832 Constant_62346" [id=832, type=Constant]; +"833 Constant_62345" [id=833, type=Constant]; +"834 Constant_62344" [id=834, type=Constant]; +"835 Constant_62343" [id=835, type=Constant]; +"836 onnx^^Conv_647" [id=836, label="836 onnx::Conv_647", type=Constant]; +"0 data" -> "1 Multiply_2745" [label="[1, 3, 224, 224]", style=solid]; +"1 Multiply_2745" -> "2 Divide_1002" [label="[1, 3, 224, 224]", style=solid]; +"2 Divide_1002" -> "3 Divide_1002/fq_output_0" [label="[1, 3, 224, 224]", style=solid]; +"3 Divide_1002/fq_output_0" -> "4 /conv1/Conv/WithoutBiases" [label="[1, 3, 224, 224]", style=solid]; +"4 /conv1/Conv/WithoutBiases" -> "5 /conv1/Conv" [label="[1, 64, 112, 112]", style=solid]; +"5 /conv1/Conv" -> "6 /relu/Relu" [label="[1, 64, 112, 112]", style=solid]; +"6 /relu/Relu" -> "7 /relu/Relu/fq_output_0" [label="[1, 64, 112, 112]", style=solid]; +"7 /relu/Relu/fq_output_0" -> "8 /maxpool/MaxPool" [label="[1, 64, 112, 112]", style=solid]; +"8 /maxpool/MaxPool" -> "9 /layer1/layer1.0/conv1/Conv/WithoutBiases" [label="[1, 64, 56, 56]", style=solid]; +"8 /maxpool/MaxPool" -> "10 /layer1/layer1.0/downsample/downsample.0/Conv/WithoutBiases" [label="[1, 64, 56, 56]", style=solid]; +"9 /layer1/layer1.0/conv1/Conv/WithoutBiases" -> "11 /layer1/layer1.0/conv1/Conv" [label="[1, 64, 56, 56]", style=solid]; +"10 /layer1/layer1.0/downsample/downsample.0/Conv/WithoutBiases" -> "12 /layer1/layer1.0/downsample/downsample.0/Conv" [label="[1, 256, 56, 56]", style=solid]; +"11 /layer1/layer1.0/conv1/Conv" -> "13 /layer1/layer1.0/relu/Relu" [label="[1, 64, 56, 56]", style=solid]; +"12 /layer1/layer1.0/downsample/downsample.0/Conv" -> "14 /layer1/layer1.0/Add" [label="[1, 256, 56, 56]", style=solid]; +"13 /layer1/layer1.0/relu/Relu" -> "15 /layer1/layer1.0/relu/Relu/fq_output_0" [label="[1, 64, 56, 56]", style=solid]; +"14 /layer1/layer1.0/Add" -> "16 /layer1/layer1.0/relu_2/Relu" [label="[1, 256, 56, 56]", style=solid]; +"15 /layer1/layer1.0/relu/Relu/fq_output_0" -> "17 /layer1/layer1.0/conv2/Conv/WithoutBiases" [label="[1, 64, 56, 56]", style=solid]; +"16 /layer1/layer1.0/relu_2/Relu" -> "18 /layer1/layer1.0/relu_2/Relu/fq_output_0" [label="[1, 256, 56, 56]", style=solid]; +"17 /layer1/layer1.0/conv2/Conv/WithoutBiases" -> "19 /layer1/layer1.0/conv2/Conv" [label="[1, 64, 56, 56]", style=solid]; +"18 /layer1/layer1.0/relu_2/Relu/fq_output_0" -> "20 /layer1/layer1.1/Add" [label="[1, 256, 56, 56]", style=solid]; +"18 /layer1/layer1.0/relu_2/Relu/fq_output_0" -> "21 /layer1/layer1.1/conv1/Conv/WithoutBiases" [label="[1, 256, 56, 56]", style=solid]; +"19 /layer1/layer1.0/conv2/Conv" -> "22 /layer1/layer1.0/relu_1/Relu" [label="[1, 64, 56, 56]", style=solid]; +"20 /layer1/layer1.1/Add" -> "23 /layer1/layer1.1/relu_2/Relu" [label="[1, 256, 56, 56]", style=solid]; +"21 /layer1/layer1.1/conv1/Conv/WithoutBiases" -> "24 /layer1/layer1.1/conv1/Conv" [label="[1, 64, 56, 56]", style=solid]; +"22 /layer1/layer1.0/relu_1/Relu" -> "25 /layer1/layer1.0/relu_1/Relu/fq_output_0" [label="[1, 64, 56, 56]", style=solid]; +"23 /layer1/layer1.1/relu_2/Relu" -> "26 /layer1/layer1.1/relu_2/Relu/fq_output_0" [label="[1, 256, 56, 56]", style=solid]; +"24 /layer1/layer1.1/conv1/Conv" -> "27 /layer1/layer1.1/relu/Relu" [label="[1, 64, 56, 56]", style=solid]; +"25 /layer1/layer1.0/relu_1/Relu/fq_output_0" -> "28 /layer1/layer1.0/conv3/Conv/WithoutBiases" [label="[1, 64, 56, 56]", style=solid]; +"26 /layer1/layer1.1/relu_2/Relu/fq_output_0" -> "29 /layer1/layer1.2/Add" [label="[1, 256, 56, 56]", style=solid]; +"26 /layer1/layer1.1/relu_2/Relu/fq_output_0" -> "30 /layer1/layer1.2/conv1/Conv/WithoutBiases" [label="[1, 256, 56, 56]", style=solid]; +"27 /layer1/layer1.1/relu/Relu" -> "31 /layer1/layer1.1/relu/Relu/fq_output_0" [label="[1, 64, 56, 56]", style=solid]; +"28 /layer1/layer1.0/conv3/Conv/WithoutBiases" -> "32 /layer1/layer1.0/conv3/Conv" [label="[1, 256, 56, 56]", style=solid]; +"29 /layer1/layer1.2/Add" -> "33 /layer1/layer1.2/relu_2/Relu" [label="[1, 256, 56, 56]", style=solid]; +"30 /layer1/layer1.2/conv1/Conv/WithoutBiases" -> "34 /layer1/layer1.2/conv1/Conv" [label="[1, 64, 56, 56]", style=solid]; +"31 /layer1/layer1.1/relu/Relu/fq_output_0" -> "35 /layer1/layer1.1/conv2/Conv/WithoutBiases" [label="[1, 64, 56, 56]", style=solid]; +"32 /layer1/layer1.0/conv3/Conv" -> "36 /layer1/layer1.0/conv3/Conv/fq_output_0" [label="[1, 256, 56, 56]", style=solid]; +"33 /layer1/layer1.2/relu_2/Relu" -> "37 /layer1/layer1.2/relu_2/Relu/fq_output_0" [label="[1, 256, 56, 56]", style=solid]; +"34 /layer1/layer1.2/conv1/Conv" -> "38 /layer1/layer1.2/relu/Relu" [label="[1, 64, 56, 56]", style=solid]; +"35 /layer1/layer1.1/conv2/Conv/WithoutBiases" -> "39 /layer1/layer1.1/conv2/Conv" [label="[1, 64, 56, 56]", style=solid]; +"36 /layer1/layer1.0/conv3/Conv/fq_output_0" -> "14 /layer1/layer1.0/Add" [label="[1, 256, 56, 56]", style=solid]; +"37 /layer1/layer1.2/relu_2/Relu/fq_output_0" -> "40 /layer2/layer2.0/conv1/Conv/WithoutBiases" [label="[1, 256, 56, 56]", style=solid]; +"37 /layer1/layer1.2/relu_2/Relu/fq_output_0" -> "41 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases" [label="[1, 256, 56, 56]", style=solid]; +"38 /layer1/layer1.2/relu/Relu" -> "42 /layer1/layer1.2/relu/Relu/fq_output_0" [label="[1, 64, 56, 56]", style=solid]; +"39 /layer1/layer1.1/conv2/Conv" -> "43 /layer1/layer1.1/relu_1/Relu" [label="[1, 64, 56, 56]", style=solid]; +"40 /layer2/layer2.0/conv1/Conv/WithoutBiases" -> "44 /layer2/layer2.0/conv1/Conv" [label="[1, 128, 56, 56]", style=solid]; +"41 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases" -> "45 /layer2/layer2.0/downsample/downsample.0/Conv" [label="[1, 512, 28, 28]", style=solid]; +"42 /layer1/layer1.2/relu/Relu/fq_output_0" -> "46 /layer1/layer1.2/conv2/Conv/WithoutBiases" [label="[1, 64, 56, 56]", style=solid]; +"43 /layer1/layer1.1/relu_1/Relu" -> "47 /layer1/layer1.1/relu_1/Relu/fq_output_0" [label="[1, 64, 56, 56]", style=solid]; +"44 /layer2/layer2.0/conv1/Conv" -> "48 /layer2/layer2.0/relu/Relu" [label="[1, 128, 56, 56]", style=solid]; +"45 /layer2/layer2.0/downsample/downsample.0/Conv" -> "49 /layer2/layer2.0/Add" [label="[1, 512, 28, 28]", style=solid]; +"46 /layer1/layer1.2/conv2/Conv/WithoutBiases" -> "50 /layer1/layer1.2/conv2/Conv" [label="[1, 64, 56, 56]", style=solid]; +"47 /layer1/layer1.1/relu_1/Relu/fq_output_0" -> "51 /layer1/layer1.1/conv3/Conv/WithoutBiases" [label="[1, 64, 56, 56]", style=solid]; +"48 /layer2/layer2.0/relu/Relu" -> "52 /layer2/layer2.0/relu/Relu/fq_output_0" [label="[1, 128, 56, 56]", style=solid]; +"49 /layer2/layer2.0/Add" -> "53 /layer2/layer2.0/relu_2/Relu" [label="[1, 512, 28, 28]", style=solid]; +"50 /layer1/layer1.2/conv2/Conv" -> "54 /layer1/layer1.2/relu_1/Relu" [label="[1, 64, 56, 56]", style=solid]; +"51 /layer1/layer1.1/conv3/Conv/WithoutBiases" -> "55 /layer1/layer1.1/conv3/Conv" [label="[1, 256, 56, 56]", style=solid]; +"52 /layer2/layer2.0/relu/Relu/fq_output_0" -> "56 /layer2/layer2.0/conv2/Conv/WithoutBiases" [label="[1, 128, 56, 56]", style=solid]; +"53 /layer2/layer2.0/relu_2/Relu" -> "57 /layer2/layer2.0/relu_2/Relu/fq_output_0" [label="[1, 512, 28, 28]", style=solid]; +"54 /layer1/layer1.2/relu_1/Relu" -> "58 /layer1/layer1.2/relu_1/Relu/fq_output_0" [label="[1, 64, 56, 56]", style=solid]; +"55 /layer1/layer1.1/conv3/Conv" -> "20 /layer1/layer1.1/Add" [label="[1, 256, 56, 56]", style=solid]; +"56 /layer2/layer2.0/conv2/Conv/WithoutBiases" -> "59 /layer2/layer2.0/conv2/Conv" [label="[1, 128, 28, 28]", style=solid]; +"57 /layer2/layer2.0/relu_2/Relu/fq_output_0" -> "60 /layer2/layer2.1/Add" [label="[1, 512, 28, 28]", style=solid]; +"57 /layer2/layer2.0/relu_2/Relu/fq_output_0" -> "61 /layer2/layer2.1/conv1/Conv/WithoutBiases" [label="[1, 512, 28, 28]", style=solid]; +"58 /layer1/layer1.2/relu_1/Relu/fq_output_0" -> "62 /layer1/layer1.2/conv3/Conv/WithoutBiases" [label="[1, 64, 56, 56]", style=solid]; +"59 /layer2/layer2.0/conv2/Conv" -> "63 /layer2/layer2.0/relu_1/Relu" [label="[1, 128, 28, 28]", style=solid]; +"60 /layer2/layer2.1/Add" -> "64 /layer2/layer2.1/relu_2/Relu" [label="[1, 512, 28, 28]", style=solid]; +"61 /layer2/layer2.1/conv1/Conv/WithoutBiases" -> "65 /layer2/layer2.1/conv1/Conv" [label="[1, 128, 28, 28]", style=solid]; +"62 /layer1/layer1.2/conv3/Conv/WithoutBiases" -> "66 /layer1/layer1.2/conv3/Conv" [label="[1, 256, 56, 56]", style=solid]; +"63 /layer2/layer2.0/relu_1/Relu" -> "67 /layer2/layer2.0/relu_1/Relu/fq_output_0" [label="[1, 128, 28, 28]", style=solid]; +"64 /layer2/layer2.1/relu_2/Relu" -> "68 /layer2/layer2.1/relu_2/Relu/fq_output_0" [label="[1, 512, 28, 28]", style=solid]; +"65 /layer2/layer2.1/conv1/Conv" -> "69 /layer2/layer2.1/relu/Relu" [label="[1, 128, 28, 28]", style=solid]; +"66 /layer1/layer1.2/conv3/Conv" -> "29 /layer1/layer1.2/Add" [label="[1, 256, 56, 56]", style=solid]; +"67 /layer2/layer2.0/relu_1/Relu/fq_output_0" -> "70 /layer2/layer2.0/conv3/Conv/WithoutBiases" [label="[1, 128, 28, 28]", style=solid]; +"68 /layer2/layer2.1/relu_2/Relu/fq_output_0" -> "71 /layer2/layer2.2/Add" [label="[1, 512, 28, 28]", style=solid]; +"68 /layer2/layer2.1/relu_2/Relu/fq_output_0" -> "72 /layer2/layer2.2/conv1/Conv/WithoutBiases" [label="[1, 512, 28, 28]", style=solid]; +"69 /layer2/layer2.1/relu/Relu" -> "73 /layer2/layer2.1/relu/Relu/fq_output_0" [label="[1, 128, 28, 28]", style=solid]; +"70 /layer2/layer2.0/conv3/Conv/WithoutBiases" -> "74 /layer2/layer2.0/conv3/Conv" [label="[1, 512, 28, 28]", style=solid]; +"71 /layer2/layer2.2/Add" -> "75 /layer2/layer2.2/relu_2/Relu" [label="[1, 512, 28, 28]", style=solid]; +"72 /layer2/layer2.2/conv1/Conv/WithoutBiases" -> "76 /layer2/layer2.2/conv1/Conv" [label="[1, 128, 28, 28]", style=solid]; +"73 /layer2/layer2.1/relu/Relu/fq_output_0" -> "77 /layer2/layer2.1/conv2/Conv/WithoutBiases" [label="[1, 128, 28, 28]", style=solid]; +"74 /layer2/layer2.0/conv3/Conv" -> "78 /layer2/layer2.0/conv3/Conv/fq_output_0" [label="[1, 512, 28, 28]", style=solid]; +"75 /layer2/layer2.2/relu_2/Relu" -> "79 /layer2/layer2.2/relu_2/Relu/fq_output_0" [label="[1, 512, 28, 28]", style=solid]; +"76 /layer2/layer2.2/conv1/Conv" -> "80 /layer2/layer2.2/relu/Relu" [label="[1, 128, 28, 28]", style=solid]; +"77 /layer2/layer2.1/conv2/Conv/WithoutBiases" -> "81 /layer2/layer2.1/conv2/Conv" [label="[1, 128, 28, 28]", style=solid]; +"78 /layer2/layer2.0/conv3/Conv/fq_output_0" -> "49 /layer2/layer2.0/Add" [label="[1, 512, 28, 28]", style=solid]; +"79 /layer2/layer2.2/relu_2/Relu/fq_output_0" -> "82 /layer2/layer2.3/Add" [label="[1, 512, 28, 28]", style=solid]; +"79 /layer2/layer2.2/relu_2/Relu/fq_output_0" -> "83 /layer2/layer2.3/conv1/Conv/WithoutBiases" [label="[1, 512, 28, 28]", style=solid]; +"80 /layer2/layer2.2/relu/Relu" -> "84 /layer2/layer2.2/relu/Relu/fq_output_0" [label="[1, 128, 28, 28]", style=solid]; +"81 /layer2/layer2.1/conv2/Conv" -> "85 /layer2/layer2.1/relu_1/Relu" [label="[1, 128, 28, 28]", style=solid]; +"82 /layer2/layer2.3/Add" -> "86 /layer2/layer2.3/relu_2/Relu" [label="[1, 512, 28, 28]", style=solid]; +"83 /layer2/layer2.3/conv1/Conv/WithoutBiases" -> "87 /layer2/layer2.3/conv1/Conv" [label="[1, 128, 28, 28]", style=solid]; +"84 /layer2/layer2.2/relu/Relu/fq_output_0" -> "88 /layer2/layer2.2/conv2/Conv/WithoutBiases" [label="[1, 128, 28, 28]", style=solid]; +"85 /layer2/layer2.1/relu_1/Relu" -> "89 /layer2/layer2.1/relu_1/Relu/fq_output_0" [label="[1, 128, 28, 28]", style=solid]; +"86 /layer2/layer2.3/relu_2/Relu" -> "90 /layer2/layer2.3/relu_2/Relu/fq_output_0" [label="[1, 512, 28, 28]", style=solid]; +"87 /layer2/layer2.3/conv1/Conv" -> "91 /layer2/layer2.3/relu/Relu" [label="[1, 128, 28, 28]", style=solid]; +"88 /layer2/layer2.2/conv2/Conv/WithoutBiases" -> "92 /layer2/layer2.2/conv2/Conv" [label="[1, 128, 28, 28]", style=solid]; +"89 /layer2/layer2.1/relu_1/Relu/fq_output_0" -> "93 /layer2/layer2.1/conv3/Conv/WithoutBiases" [label="[1, 128, 28, 28]", style=solid]; +"90 /layer2/layer2.3/relu_2/Relu/fq_output_0" -> "94 /layer3/layer3.0/conv1/Conv/WithoutBiases" [label="[1, 512, 28, 28]", style=solid]; +"90 /layer2/layer2.3/relu_2/Relu/fq_output_0" -> "95 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases" [label="[1, 512, 28, 28]", style=solid]; +"91 /layer2/layer2.3/relu/Relu" -> "96 /layer2/layer2.3/relu/Relu/fq_output_0" [label="[1, 128, 28, 28]", style=solid]; +"92 /layer2/layer2.2/conv2/Conv" -> "97 /layer2/layer2.2/relu_1/Relu" [label="[1, 128, 28, 28]", style=solid]; +"93 /layer2/layer2.1/conv3/Conv/WithoutBiases" -> "98 /layer2/layer2.1/conv3/Conv" [label="[1, 512, 28, 28]", style=solid]; +"94 /layer3/layer3.0/conv1/Conv/WithoutBiases" -> "99 /layer3/layer3.0/conv1/Conv" [label="[1, 256, 28, 28]", style=solid]; +"95 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases" -> "100 /layer3/layer3.0/downsample/downsample.0/Conv" [label="[1, 1024, 14, 14]", style=solid]; +"96 /layer2/layer2.3/relu/Relu/fq_output_0" -> "101 /layer2/layer2.3/conv2/Conv/WithoutBiases" [label="[1, 128, 28, 28]", style=solid]; +"97 /layer2/layer2.2/relu_1/Relu" -> "102 /layer2/layer2.2/relu_1/Relu/fq_output_0" [label="[1, 128, 28, 28]", style=solid]; +"98 /layer2/layer2.1/conv3/Conv" -> "60 /layer2/layer2.1/Add" [label="[1, 512, 28, 28]", style=solid]; +"99 /layer3/layer3.0/conv1/Conv" -> "103 /layer3/layer3.0/relu/Relu" [label="[1, 256, 28, 28]", style=solid]; +"100 /layer3/layer3.0/downsample/downsample.0/Conv" -> "104 /layer3/layer3.0/Add" [label="[1, 1024, 14, 14]", style=solid]; +"101 /layer2/layer2.3/conv2/Conv/WithoutBiases" -> "105 /layer2/layer2.3/conv2/Conv" [label="[1, 128, 28, 28]", style=solid]; +"102 /layer2/layer2.2/relu_1/Relu/fq_output_0" -> "106 /layer2/layer2.2/conv3/Conv/WithoutBiases" [label="[1, 128, 28, 28]", style=solid]; +"103 /layer3/layer3.0/relu/Relu" -> "107 /layer3/layer3.0/relu/Relu/fq_output_0" [label="[1, 256, 28, 28]", style=solid]; +"104 /layer3/layer3.0/Add" -> "108 /layer3/layer3.0/relu_2/Relu" [label="[1, 1024, 14, 14]", style=solid]; +"105 /layer2/layer2.3/conv2/Conv" -> "109 /layer2/layer2.3/relu_1/Relu" [label="[1, 128, 28, 28]", style=solid]; +"106 /layer2/layer2.2/conv3/Conv/WithoutBiases" -> "110 /layer2/layer2.2/conv3/Conv" [label="[1, 512, 28, 28]", style=solid]; +"107 /layer3/layer3.0/relu/Relu/fq_output_0" -> "111 /layer3/layer3.0/conv2/Conv/WithoutBiases" [label="[1, 256, 28, 28]", style=solid]; +"108 /layer3/layer3.0/relu_2/Relu" -> "112 /layer3/layer3.0/relu_2/Relu/fq_output_0" [label="[1, 1024, 14, 14]", style=solid]; +"109 /layer2/layer2.3/relu_1/Relu" -> "113 /layer2/layer2.3/relu_1/Relu/fq_output_0" [label="[1, 128, 28, 28]", style=solid]; +"110 /layer2/layer2.2/conv3/Conv" -> "71 /layer2/layer2.2/Add" [label="[1, 512, 28, 28]", style=solid]; +"111 /layer3/layer3.0/conv2/Conv/WithoutBiases" -> "114 /layer3/layer3.0/conv2/Conv" [label="[1, 256, 14, 14]", style=solid]; +"112 /layer3/layer3.0/relu_2/Relu/fq_output_0" -> "115 /layer3/layer3.1/Add" [label="[1, 1024, 14, 14]", style=solid]; +"112 /layer3/layer3.0/relu_2/Relu/fq_output_0" -> "116 /layer3/layer3.1/conv1/Conv/WithoutBiases" [label="[1, 1024, 14, 14]", style=solid]; +"113 /layer2/layer2.3/relu_1/Relu/fq_output_0" -> "117 /layer2/layer2.3/conv3/Conv/WithoutBiases" [label="[1, 128, 28, 28]", style=solid]; +"114 /layer3/layer3.0/conv2/Conv" -> "118 /layer3/layer3.0/relu_1/Relu" [label="[1, 256, 14, 14]", style=solid]; +"115 /layer3/layer3.1/Add" -> "119 /layer3/layer3.1/relu_2/Relu" [label="[1, 1024, 14, 14]", style=solid]; +"116 /layer3/layer3.1/conv1/Conv/WithoutBiases" -> "120 /layer3/layer3.1/conv1/Conv" [label="[1, 256, 14, 14]", style=solid]; +"117 /layer2/layer2.3/conv3/Conv/WithoutBiases" -> "121 /layer2/layer2.3/conv3/Conv" [label="[1, 512, 28, 28]", style=solid]; +"118 /layer3/layer3.0/relu_1/Relu" -> "122 /layer3/layer3.0/relu_1/Relu/fq_output_0" [label="[1, 256, 14, 14]", style=solid]; +"119 /layer3/layer3.1/relu_2/Relu" -> "123 /layer3/layer3.1/relu_2/Relu/fq_output_0" [label="[1, 1024, 14, 14]", style=solid]; +"120 /layer3/layer3.1/conv1/Conv" -> "124 /layer3/layer3.1/relu/Relu" [label="[1, 256, 14, 14]", style=solid]; +"121 /layer2/layer2.3/conv3/Conv" -> "82 /layer2/layer2.3/Add" [label="[1, 512, 28, 28]", style=solid]; +"122 /layer3/layer3.0/relu_1/Relu/fq_output_0" -> "125 /layer3/layer3.0/conv3/Conv/WithoutBiases" [label="[1, 256, 14, 14]", style=solid]; +"123 /layer3/layer3.1/relu_2/Relu/fq_output_0" -> "126 /layer3/layer3.2/Add" [label="[1, 1024, 14, 14]", style=solid]; +"123 /layer3/layer3.1/relu_2/Relu/fq_output_0" -> "127 /layer3/layer3.2/conv1/Conv/WithoutBiases" [label="[1, 1024, 14, 14]", style=solid]; +"124 /layer3/layer3.1/relu/Relu" -> "128 /layer3/layer3.1/relu/Relu/fq_output_0" [label="[1, 256, 14, 14]", style=solid]; +"125 /layer3/layer3.0/conv3/Conv/WithoutBiases" -> "129 /layer3/layer3.0/conv3/Conv" [label="[1, 1024, 14, 14]", style=solid]; +"126 /layer3/layer3.2/Add" -> "130 /layer3/layer3.2/relu_2/Relu" [label="[1, 1024, 14, 14]", style=solid]; +"127 /layer3/layer3.2/conv1/Conv/WithoutBiases" -> "131 /layer3/layer3.2/conv1/Conv" [label="[1, 256, 14, 14]", style=solid]; +"128 /layer3/layer3.1/relu/Relu/fq_output_0" -> "132 /layer3/layer3.1/conv2/Conv/WithoutBiases" [label="[1, 256, 14, 14]", style=solid]; +"129 /layer3/layer3.0/conv3/Conv" -> "133 /layer3/layer3.0/conv3/Conv/fq_output_0" [label="[1, 1024, 14, 14]", style=solid]; +"130 /layer3/layer3.2/relu_2/Relu" -> "134 /layer3/layer3.2/relu_2/Relu/fq_output_0" [label="[1, 1024, 14, 14]", style=solid]; +"131 /layer3/layer3.2/conv1/Conv" -> "135 /layer3/layer3.2/relu/Relu" [label="[1, 256, 14, 14]", style=solid]; +"132 /layer3/layer3.1/conv2/Conv/WithoutBiases" -> "136 /layer3/layer3.1/conv2/Conv" [label="[1, 256, 14, 14]", style=solid]; +"133 /layer3/layer3.0/conv3/Conv/fq_output_0" -> "104 /layer3/layer3.0/Add" [label="[1, 1024, 14, 14]", style=solid]; +"134 /layer3/layer3.2/relu_2/Relu/fq_output_0" -> "137 /layer3/layer3.3/Add" [label="[1, 1024, 14, 14]", style=solid]; +"134 /layer3/layer3.2/relu_2/Relu/fq_output_0" -> "138 /layer3/layer3.3/conv1/Conv/WithoutBiases" [label="[1, 1024, 14, 14]", style=solid]; +"135 /layer3/layer3.2/relu/Relu" -> "139 /layer3/layer3.2/relu/Relu/fq_output_0" [label="[1, 256, 14, 14]", style=solid]; +"136 /layer3/layer3.1/conv2/Conv" -> "140 /layer3/layer3.1/relu_1/Relu" [label="[1, 256, 14, 14]", style=solid]; +"137 /layer3/layer3.3/Add" -> "141 /layer3/layer3.3/relu_2/Relu" [label="[1, 1024, 14, 14]", style=solid]; +"138 /layer3/layer3.3/conv1/Conv/WithoutBiases" -> "142 /layer3/layer3.3/conv1/Conv" [label="[1, 256, 14, 14]", style=solid]; +"139 /layer3/layer3.2/relu/Relu/fq_output_0" -> "143 /layer3/layer3.2/conv2/Conv/WithoutBiases" [label="[1, 256, 14, 14]", style=solid]; +"140 /layer3/layer3.1/relu_1/Relu" -> "144 /layer3/layer3.1/relu_1/Relu/fq_output_0" [label="[1, 256, 14, 14]", style=solid]; +"141 /layer3/layer3.3/relu_2/Relu" -> "145 /layer3/layer3.3/relu_2/Relu/fq_output_0" [label="[1, 1024, 14, 14]", style=solid]; +"142 /layer3/layer3.3/conv1/Conv" -> "146 /layer3/layer3.3/relu/Relu" [label="[1, 256, 14, 14]", style=solid]; +"143 /layer3/layer3.2/conv2/Conv/WithoutBiases" -> "147 /layer3/layer3.2/conv2/Conv" [label="[1, 256, 14, 14]", style=solid]; +"144 /layer3/layer3.1/relu_1/Relu/fq_output_0" -> "148 /layer3/layer3.1/conv3/Conv/WithoutBiases" [label="[1, 256, 14, 14]", style=solid]; +"145 /layer3/layer3.3/relu_2/Relu/fq_output_0" -> "149 /layer3/layer3.4/Add" [label="[1, 1024, 14, 14]", style=solid]; +"145 /layer3/layer3.3/relu_2/Relu/fq_output_0" -> "150 /layer3/layer3.4/conv1/Conv/WithoutBiases" [label="[1, 1024, 14, 14]", style=solid]; +"146 /layer3/layer3.3/relu/Relu" -> "151 /layer3/layer3.3/relu/Relu/fq_output_0" [label="[1, 256, 14, 14]", style=solid]; +"147 /layer3/layer3.2/conv2/Conv" -> "152 /layer3/layer3.2/relu_1/Relu" [label="[1, 256, 14, 14]", style=solid]; +"148 /layer3/layer3.1/conv3/Conv/WithoutBiases" -> "153 /layer3/layer3.1/conv3/Conv" [label="[1, 1024, 14, 14]", style=solid]; +"149 /layer3/layer3.4/Add" -> "154 /layer3/layer3.4/relu_2/Relu" [label="[1, 1024, 14, 14]", style=solid]; +"150 /layer3/layer3.4/conv1/Conv/WithoutBiases" -> "155 /layer3/layer3.4/conv1/Conv" [label="[1, 256, 14, 14]", style=solid]; +"151 /layer3/layer3.3/relu/Relu/fq_output_0" -> "156 /layer3/layer3.3/conv2/Conv/WithoutBiases" [label="[1, 256, 14, 14]", style=solid]; +"152 /layer3/layer3.2/relu_1/Relu" -> "157 /layer3/layer3.2/relu_1/Relu/fq_output_0" [label="[1, 256, 14, 14]", style=solid]; +"153 /layer3/layer3.1/conv3/Conv" -> "115 /layer3/layer3.1/Add" [label="[1, 1024, 14, 14]", style=solid]; +"154 /layer3/layer3.4/relu_2/Relu" -> "158 /layer3/layer3.4/relu_2/Relu/fq_output_0" [label="[1, 1024, 14, 14]", style=solid]; +"155 /layer3/layer3.4/conv1/Conv" -> "159 /layer3/layer3.4/relu/Relu" [label="[1, 256, 14, 14]", style=solid]; +"156 /layer3/layer3.3/conv2/Conv/WithoutBiases" -> "160 /layer3/layer3.3/conv2/Conv" [label="[1, 256, 14, 14]", style=solid]; +"157 /layer3/layer3.2/relu_1/Relu/fq_output_0" -> "161 /layer3/layer3.2/conv3/Conv/WithoutBiases" [label="[1, 256, 14, 14]", style=solid]; +"158 /layer3/layer3.4/relu_2/Relu/fq_output_0" -> "162 /layer3/layer3.5/Add" [label="[1, 1024, 14, 14]", style=solid]; +"158 /layer3/layer3.4/relu_2/Relu/fq_output_0" -> "163 /layer3/layer3.5/conv1/Conv/WithoutBiases" [label="[1, 1024, 14, 14]", style=solid]; +"159 /layer3/layer3.4/relu/Relu" -> "164 /layer3/layer3.4/relu/Relu/fq_output_0" [label="[1, 256, 14, 14]", style=solid]; +"160 /layer3/layer3.3/conv2/Conv" -> "165 /layer3/layer3.3/relu_1/Relu" [label="[1, 256, 14, 14]", style=solid]; +"161 /layer3/layer3.2/conv3/Conv/WithoutBiases" -> "166 /layer3/layer3.2/conv3/Conv" [label="[1, 1024, 14, 14]", style=solid]; +"162 /layer3/layer3.5/Add" -> "167 /layer3/layer3.5/relu_2/Relu" [label="[1, 1024, 14, 14]", style=solid]; +"163 /layer3/layer3.5/conv1/Conv/WithoutBiases" -> "168 /layer3/layer3.5/conv1/Conv" [label="[1, 256, 14, 14]", style=solid]; +"164 /layer3/layer3.4/relu/Relu/fq_output_0" -> "169 /layer3/layer3.4/conv2/Conv/WithoutBiases" [label="[1, 256, 14, 14]", style=solid]; +"165 /layer3/layer3.3/relu_1/Relu" -> "170 /layer3/layer3.3/relu_1/Relu/fq_output_0" [label="[1, 256, 14, 14]", style=solid]; +"166 /layer3/layer3.2/conv3/Conv" -> "126 /layer3/layer3.2/Add" [label="[1, 1024, 14, 14]", style=solid]; +"167 /layer3/layer3.5/relu_2/Relu" -> "171 /layer3/layer3.5/relu_2/Relu/fq_output_0" [label="[1, 1024, 14, 14]", style=solid]; +"168 /layer3/layer3.5/conv1/Conv" -> "172 /layer3/layer3.5/relu/Relu" [label="[1, 256, 14, 14]", style=solid]; +"169 /layer3/layer3.4/conv2/Conv/WithoutBiases" -> "173 /layer3/layer3.4/conv2/Conv" [label="[1, 256, 14, 14]", style=solid]; +"170 /layer3/layer3.3/relu_1/Relu/fq_output_0" -> "174 /layer3/layer3.3/conv3/Conv/WithoutBiases" [label="[1, 256, 14, 14]", style=solid]; +"171 /layer3/layer3.5/relu_2/Relu/fq_output_0" -> "175 /layer4/layer4.0/conv1/Conv/WithoutBiases" [label="[1, 1024, 14, 14]", style=solid]; +"171 /layer3/layer3.5/relu_2/Relu/fq_output_0" -> "176 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases" [label="[1, 1024, 14, 14]", style=solid]; +"172 /layer3/layer3.5/relu/Relu" -> "177 /layer3/layer3.5/relu/Relu/fq_output_0" [label="[1, 256, 14, 14]", style=solid]; +"173 /layer3/layer3.4/conv2/Conv" -> "178 /layer3/layer3.4/relu_1/Relu" [label="[1, 256, 14, 14]", style=solid]; +"174 /layer3/layer3.3/conv3/Conv/WithoutBiases" -> "179 /layer3/layer3.3/conv3/Conv" [label="[1, 1024, 14, 14]", style=solid]; +"175 /layer4/layer4.0/conv1/Conv/WithoutBiases" -> "180 /layer4/layer4.0/conv1/Conv" [label="[1, 512, 14, 14]", style=solid]; +"176 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases" -> "181 /layer4/layer4.0/downsample/downsample.0/Conv" [label="[1, 2048, 7, 7]", style=solid]; +"177 /layer3/layer3.5/relu/Relu/fq_output_0" -> "182 /layer3/layer3.5/conv2/Conv/WithoutBiases" [label="[1, 256, 14, 14]", style=solid]; +"178 /layer3/layer3.4/relu_1/Relu" -> "183 /layer3/layer3.4/relu_1/Relu/fq_output_0" [label="[1, 256, 14, 14]", style=solid]; +"179 /layer3/layer3.3/conv3/Conv" -> "137 /layer3/layer3.3/Add" [label="[1, 1024, 14, 14]", style=solid]; +"180 /layer4/layer4.0/conv1/Conv" -> "184 /layer4/layer4.0/relu/Relu" [label="[1, 512, 14, 14]", style=solid]; +"181 /layer4/layer4.0/downsample/downsample.0/Conv" -> "185 /layer4/layer4.0/Add" [label="[1, 2048, 7, 7]", style=solid]; +"182 /layer3/layer3.5/conv2/Conv/WithoutBiases" -> "186 /layer3/layer3.5/conv2/Conv" [label="[1, 256, 14, 14]", style=solid]; +"183 /layer3/layer3.4/relu_1/Relu/fq_output_0" -> "187 /layer3/layer3.4/conv3/Conv/WithoutBiases" [label="[1, 256, 14, 14]", style=solid]; +"184 /layer4/layer4.0/relu/Relu" -> "188 /layer4/layer4.0/relu/Relu/fq_output_0" [label="[1, 512, 14, 14]", style=solid]; +"185 /layer4/layer4.0/Add" -> "189 /layer4/layer4.0/relu_2/Relu" [label="[1, 2048, 7, 7]", style=solid]; +"186 /layer3/layer3.5/conv2/Conv" -> "190 /layer3/layer3.5/relu_1/Relu" [label="[1, 256, 14, 14]", style=solid]; +"187 /layer3/layer3.4/conv3/Conv/WithoutBiases" -> "191 /layer3/layer3.4/conv3/Conv" [label="[1, 1024, 14, 14]", style=solid]; +"188 /layer4/layer4.0/relu/Relu/fq_output_0" -> "192 /layer4/layer4.0/conv2/Conv/WithoutBiases" [label="[1, 512, 14, 14]", style=solid]; +"189 /layer4/layer4.0/relu_2/Relu" -> "193 /layer4/layer4.0/relu_2/Relu/fq_output_0" [label="[1, 2048, 7, 7]", style=solid]; +"190 /layer3/layer3.5/relu_1/Relu" -> "194 /layer3/layer3.5/relu_1/Relu/fq_output_0" [label="[1, 256, 14, 14]", style=solid]; +"191 /layer3/layer3.4/conv3/Conv" -> "149 /layer3/layer3.4/Add" [label="[1, 1024, 14, 14]", style=solid]; +"192 /layer4/layer4.0/conv2/Conv/WithoutBiases" -> "195 /layer4/layer4.0/conv2/Conv" [label="[1, 512, 7, 7]", style=solid]; +"193 /layer4/layer4.0/relu_2/Relu/fq_output_0" -> "196 /layer4/layer4.1/Add" [label="[1, 2048, 7, 7]", style=solid]; +"193 /layer4/layer4.0/relu_2/Relu/fq_output_0" -> "197 /layer4/layer4.1/conv1/Conv/WithoutBiases" [label="[1, 2048, 7, 7]", style=solid]; +"194 /layer3/layer3.5/relu_1/Relu/fq_output_0" -> "198 /layer3/layer3.5/conv3/Conv/WithoutBiases" [label="[1, 256, 14, 14]", style=solid]; +"195 /layer4/layer4.0/conv2/Conv" -> "199 /layer4/layer4.0/relu_1/Relu" [label="[1, 512, 7, 7]", style=solid]; +"196 /layer4/layer4.1/Add" -> "200 /layer4/layer4.1/relu_2/Relu" [label="[1, 2048, 7, 7]", style=solid]; +"197 /layer4/layer4.1/conv1/Conv/WithoutBiases" -> "201 /layer4/layer4.1/conv1/Conv" [label="[1, 512, 7, 7]", style=solid]; +"198 /layer3/layer3.5/conv3/Conv/WithoutBiases" -> "202 /layer3/layer3.5/conv3/Conv" [label="[1, 1024, 14, 14]", style=solid]; +"199 /layer4/layer4.0/relu_1/Relu" -> "203 /layer4/layer4.0/relu_1/Relu/fq_output_0" [label="[1, 512, 7, 7]", style=solid]; +"200 /layer4/layer4.1/relu_2/Relu" -> "204 /layer4/layer4.1/relu_2/Relu/fq_output_0" [label="[1, 2048, 7, 7]", style=solid]; +"201 /layer4/layer4.1/conv1/Conv" -> "205 /layer4/layer4.1/relu/Relu" [label="[1, 512, 7, 7]", style=solid]; +"202 /layer3/layer3.5/conv3/Conv" -> "162 /layer3/layer3.5/Add" [label="[1, 1024, 14, 14]", style=solid]; +"203 /layer4/layer4.0/relu_1/Relu/fq_output_0" -> "206 /layer4/layer4.0/conv3/Conv/WithoutBiases" [label="[1, 512, 7, 7]", style=solid]; +"204 /layer4/layer4.1/relu_2/Relu/fq_output_0" -> "207 /layer4/layer4.2/Add" [label="[1, 2048, 7, 7]", style=solid]; +"204 /layer4/layer4.1/relu_2/Relu/fq_output_0" -> "208 /layer4/layer4.2/conv1/Conv/WithoutBiases" [label="[1, 2048, 7, 7]", style=solid]; +"205 /layer4/layer4.1/relu/Relu" -> "209 /layer4/layer4.1/relu/Relu/fq_output_0" [label="[1, 512, 7, 7]", style=solid]; +"206 /layer4/layer4.0/conv3/Conv/WithoutBiases" -> "210 /layer4/layer4.0/conv3/Conv" [label="[1, 2048, 7, 7]", style=solid]; +"207 /layer4/layer4.2/Add" -> "211 /layer4/layer4.2/relu_2/Relu" [label="[1, 2048, 7, 7]", style=solid]; +"208 /layer4/layer4.2/conv1/Conv/WithoutBiases" -> "212 /layer4/layer4.2/conv1/Conv" [label="[1, 512, 7, 7]", style=solid]; +"209 /layer4/layer4.1/relu/Relu/fq_output_0" -> "213 /layer4/layer4.1/conv2/Conv/WithoutBiases" [label="[1, 512, 7, 7]", style=solid]; +"210 /layer4/layer4.0/conv3/Conv" -> "214 /layer4/layer4.0/conv3/Conv/fq_output_0" [label="[1, 2048, 7, 7]", style=solid]; +"211 /layer4/layer4.2/relu_2/Relu" -> "215 /layer4/layer4.2/relu_2/Relu/fq_output_0" [label="[1, 2048, 7, 7]", style=solid]; +"212 /layer4/layer4.2/conv1/Conv" -> "216 /layer4/layer4.2/relu/Relu" [label="[1, 512, 7, 7]", style=solid]; +"213 /layer4/layer4.1/conv2/Conv/WithoutBiases" -> "217 /layer4/layer4.1/conv2/Conv" [label="[1, 512, 7, 7]", style=solid]; +"214 /layer4/layer4.0/conv3/Conv/fq_output_0" -> "185 /layer4/layer4.0/Add" [label="[1, 2048, 7, 7]", style=solid]; +"215 /layer4/layer4.2/relu_2/Relu/fq_output_0" -> "218 /avgpool/GlobalAveragePool" [label="[1, 2048, 7, 7]", style=solid]; +"216 /layer4/layer4.2/relu/Relu" -> "219 /layer4/layer4.2/relu/Relu/fq_output_0" [label="[1, 512, 7, 7]", style=solid]; +"217 /layer4/layer4.1/conv2/Conv" -> "220 /layer4/layer4.1/relu_1/Relu" [label="[1, 512, 7, 7]", style=solid]; +"218 /avgpool/GlobalAveragePool" -> "221 /avgpool/GlobalAveragePool/fq_output_0" [label="[1, 2048, 1, 1]", style=solid]; +"219 /layer4/layer4.2/relu/Relu/fq_output_0" -> "222 /layer4/layer4.2/conv2/Conv/WithoutBiases" [label="[1, 512, 7, 7]", style=solid]; +"220 /layer4/layer4.1/relu_1/Relu" -> "223 /layer4/layer4.1/relu_1/Relu/fq_output_0" [label="[1, 512, 7, 7]", style=solid]; +"221 /avgpool/GlobalAveragePool/fq_output_0" -> "224 /Flatten" [label="[1, 2048, 1, 1]", style=solid]; +"222 /layer4/layer4.2/conv2/Conv/WithoutBiases" -> "225 /layer4/layer4.2/conv2/Conv" [label="[1, 512, 7, 7]", style=solid]; +"223 /layer4/layer4.1/relu_1/Relu/fq_output_0" -> "226 /layer4/layer4.1/conv3/Conv/WithoutBiases" [label="[1, 512, 7, 7]", style=solid]; +"224 /Flatten" -> "227 /fc/Gemm/WithoutBiases" [label="[1, 2048]", style=solid]; +"225 /layer4/layer4.2/conv2/Conv" -> "228 /layer4/layer4.2/relu_1/Relu" [label="[1, 512, 7, 7]", style=solid]; +"226 /layer4/layer4.1/conv3/Conv/WithoutBiases" -> "229 /layer4/layer4.1/conv3/Conv" [label="[1, 2048, 7, 7]", style=solid]; +"227 /fc/Gemm/WithoutBiases" -> "230 prob" [label="[1, 1000]", style=solid]; +"228 /layer4/layer4.2/relu_1/Relu" -> "231 /layer4/layer4.2/relu_1/Relu/fq_output_0" [label="[1, 512, 7, 7]", style=solid]; +"229 /layer4/layer4.1/conv3/Conv" -> "196 /layer4/layer4.1/Add" [label="[1, 2048, 7, 7]", style=solid]; +"230 prob" -> "232 prob/sink_port_0" [label="[1, 1000]", style=solid]; +"231 /layer4/layer4.2/relu_1/Relu/fq_output_0" -> "233 /layer4/layer4.2/conv3/Conv/WithoutBiases" [label="[1, 512, 7, 7]", style=solid]; +"233 /layer4/layer4.2/conv3/Conv/WithoutBiases" -> "234 /layer4/layer4.2/conv3/Conv" [label="[1, 2048, 7, 7]", style=solid]; +"234 /layer4/layer4.2/conv3/Conv" -> "207 /layer4/layer4.2/Add" [label="[1, 2048, 7, 7]", style=solid]; +"235 Constant_2819" -> "230 prob" [label="[1, 1000]", style=solid]; +"236 /fc/Gemm/WithoutBiases/fq_weights_1" -> "227 /fc/Gemm/WithoutBiases" [label="[1000, 2048]", style=solid]; +"237 Constant_62386" -> "236 /fc/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; +"238 Constant_62385" -> "236 /fc/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; +"239 Constant_62384" -> "236 /fc/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; +"240 Constant_62383" -> "236 /fc/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 1]", style=solid]; +"241 fc.weight" -> "236 /fc/Gemm/WithoutBiases/fq_weights_1" [label="[1000, 2048]", style=solid]; +"242 Constant_981" -> "224 /Flatten" [label="[2]", style=dashed]; +"243 Constant_62381" -> "221 /avgpool/GlobalAveragePool/fq_output_0" [label="[]", style=solid]; +"244 Constant_62380" -> "221 /avgpool/GlobalAveragePool/fq_output_0" [label="[]", style=solid]; +"245 Constant_62379" -> "221 /avgpool/GlobalAveragePool/fq_output_0" [label="[]", style=solid]; +"246 Constant_62378" -> "221 /avgpool/GlobalAveragePool/fq_output_0" [label="[]", style=solid]; +"247 Range_977" -> "218 /avgpool/GlobalAveragePool" [label="[2]", style=dashed]; +"248 Constant_62376" -> "215 /layer4/layer4.2/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"249 Constant_62375" -> "215 /layer4/layer4.2/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"250 Constant_62374" -> "215 /layer4/layer4.2/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"251 Constant_62373" -> "215 /layer4/layer4.2/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"252 Constant_62341" -> "204 /layer4/layer4.1/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"253 Constant_62340" -> "204 /layer4/layer4.1/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"254 Constant_62339" -> "204 /layer4/layer4.1/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"255 Constant_62338" -> "204 /layer4/layer4.1/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"256 Constant_62306" -> "193 /layer4/layer4.0/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"257 Constant_62305" -> "193 /layer4/layer4.0/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"258 Constant_62304" -> "193 /layer4/layer4.0/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"259 Constant_62303" -> "193 /layer4/layer4.0/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"260 Reshape_867" -> "181 /layer4/layer4.0/downsample/downsample.0/Conv" [label="[1, 2048, 1, 1]", style=solid]; +"261 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" -> "176 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases" [label="[2048, 1024, 1, 1]", style=solid]; +"262 Constant_62266" -> "261 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[2048, 1, 1, 1]", style=solid]; +"263 Constant_62265" -> "261 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[2048, 1, 1, 1]", style=solid]; +"264 Constant_62264" -> "261 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[2048, 1, 1, 1]", style=solid]; +"265 Constant_62263" -> "261 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[2048, 1, 1, 1]", style=solid]; +"266 onnx^^Conv_635" -> "261 /layer4/layer4.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[2048, 1024, 1, 1]", style=solid]; +"267 Constant_62261" -> "171 /layer3/layer3.5/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"268 Constant_62260" -> "171 /layer3/layer3.5/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"269 Constant_62259" -> "171 /layer3/layer3.5/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"270 Constant_62258" -> "171 /layer3/layer3.5/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"271 Constant_62226" -> "158 /layer3/layer3.4/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"272 Constant_62225" -> "158 /layer3/layer3.4/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"273 Constant_62224" -> "158 /layer3/layer3.4/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"274 Constant_62223" -> "158 /layer3/layer3.4/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"275 Constant_62191" -> "145 /layer3/layer3.3/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"276 Constant_62190" -> "145 /layer3/layer3.3/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"277 Constant_62189" -> "145 /layer3/layer3.3/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"278 Constant_62188" -> "145 /layer3/layer3.3/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"279 Constant_62156" -> "134 /layer3/layer3.2/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"280 Constant_62155" -> "134 /layer3/layer3.2/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"281 Constant_62154" -> "134 /layer3/layer3.2/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"282 Constant_62153" -> "134 /layer3/layer3.2/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"283 Constant_62121" -> "123 /layer3/layer3.1/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"284 Constant_62120" -> "123 /layer3/layer3.1/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"285 Constant_62119" -> "123 /layer3/layer3.1/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"286 Constant_62118" -> "123 /layer3/layer3.1/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"287 Constant_62086" -> "112 /layer3/layer3.0/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"288 Constant_62085" -> "112 /layer3/layer3.0/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"289 Constant_62084" -> "112 /layer3/layer3.0/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"290 Constant_62083" -> "112 /layer3/layer3.0/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"291 Reshape_558" -> "100 /layer3/layer3.0/downsample/downsample.0/Conv" [label="[1, 1024, 1, 1]", style=solid]; +"292 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" -> "95 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases" [label="[1024, 512, 1, 1]", style=solid]; +"293 Constant_62046" -> "292 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"294 Constant_62045" -> "292 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"295 Constant_62044" -> "292 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"296 Constant_62043" -> "292 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"297 onnx^^Conv_578" -> "292 /layer3/layer3.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[1024, 512, 1, 1]", style=solid]; +"298 Constant_62041" -> "90 /layer2/layer2.3/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"299 Constant_62040" -> "90 /layer2/layer2.3/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"300 Constant_62039" -> "90 /layer2/layer2.3/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"301 Constant_62038" -> "90 /layer2/layer2.3/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"302 Constant_62006" -> "79 /layer2/layer2.2/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"303 Constant_62005" -> "79 /layer2/layer2.2/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"304 Constant_62004" -> "79 /layer2/layer2.2/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"305 Constant_62003" -> "79 /layer2/layer2.2/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"306 Constant_61971" -> "68 /layer2/layer2.1/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"307 Constant_61970" -> "68 /layer2/layer2.1/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"308 Constant_61969" -> "68 /layer2/layer2.1/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"309 Constant_61968" -> "68 /layer2/layer2.1/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"310 Constant_61936" -> "57 /layer2/layer2.0/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"311 Constant_61935" -> "57 /layer2/layer2.0/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"312 Constant_61934" -> "57 /layer2/layer2.0/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"313 Constant_61933" -> "57 /layer2/layer2.0/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"314 Reshape_347" -> "45 /layer2/layer2.0/downsample/downsample.0/Conv" [label="[1, 512, 1, 1]", style=solid]; +"315 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" -> "41 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases" [label="[512, 256, 1, 1]", style=solid]; +"316 Constant_61896" -> "315 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"317 Constant_61895" -> "315 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"318 Constant_61894" -> "315 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"319 Constant_61893" -> "315 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"320 onnx^^Conv_539" -> "315 /layer2/layer2.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[512, 256, 1, 1]", style=solid]; +"321 Constant_61891" -> "37 /layer1/layer1.2/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"322 Constant_61890" -> "37 /layer1/layer1.2/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"323 Constant_61889" -> "37 /layer1/layer1.2/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"324 Constant_61888" -> "37 /layer1/layer1.2/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"325 Constant_61856" -> "26 /layer1/layer1.1/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"326 Constant_61855" -> "26 /layer1/layer1.1/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"327 Constant_61854" -> "26 /layer1/layer1.1/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"328 Constant_61853" -> "26 /layer1/layer1.1/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"329 Constant_61821" -> "18 /layer1/layer1.0/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"330 Constant_61820" -> "18 /layer1/layer1.0/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"331 Constant_61819" -> "18 /layer1/layer1.0/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"332 Constant_61818" -> "18 /layer1/layer1.0/relu_2/Relu/fq_output_0" [label="[]", style=solid]; +"333 Reshape_185" -> "12 /layer1/layer1.0/downsample/downsample.0/Conv" [label="[1, 256, 1, 1]", style=solid]; +"334 /layer1/layer1.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" -> "10 /layer1/layer1.0/downsample/downsample.0/Conv/WithoutBiases" [label="[256, 64, 1, 1]", style=solid]; +"335 Constant_61781" -> "334 /layer1/layer1.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"336 Constant_61780" -> "334 /layer1/layer1.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"337 Constant_61779" -> "334 /layer1/layer1.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"338 Constant_61778" -> "334 /layer1/layer1.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"339 onnx^^Conv_509" -> "334 /layer1/layer1.0/downsample/downsample.0/Conv/WithoutBiases/fq_weights_1" [label="[256, 64, 1, 1]", style=solid]; +"340 Constant_61776" -> "7 /relu/Relu/fq_output_0" [label="[]", style=solid]; +"341 Constant_61775" -> "7 /relu/Relu/fq_output_0" [label="[]", style=solid]; +"342 Constant_61774" -> "7 /relu/Relu/fq_output_0" [label="[]", style=solid]; +"343 Constant_61773" -> "7 /relu/Relu/fq_output_0" [label="[]", style=solid]; +"344 Reshape_121" -> "5 /conv1/Conv" [label="[1, 64, 1, 1]", style=solid]; +"345 /conv1/Conv/WithoutBiases/fq_weights_1" -> "4 /conv1/Conv/WithoutBiases" [label="[64, 3, 7, 7]", style=solid]; +"346 Constant_61771" -> "345 /conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"347 Constant_61770" -> "345 /conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"348 Constant_61769" -> "345 /conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"349 Constant_61768" -> "345 /conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"350 Gather_2818" -> "345 /conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 3, 7, 7]", style=solid]; +"351 Constant_61766" -> "3 Divide_1002/fq_output_0" [label="[]", style=solid]; +"352 Constant_61765" -> "3 Divide_1002/fq_output_0" [label="[]", style=solid]; +"353 Constant_61764" -> "3 Divide_1002/fq_output_0" [label="[]", style=solid]; +"354 Constant_61763" -> "3 Divide_1002/fq_output_0" [label="[]", style=solid]; +"355 Gather_2815" -> "2 Divide_1002" [label="[1, 3, 1, 1]", style=solid]; +"356 Gather_2812" -> "1 Multiply_2745" [label="[1, 3, 1, 1]", style=solid]; +"357 Constant_61816" -> "36 /layer1/layer1.0/conv3/Conv/fq_output_0" [label="[]", style=solid]; +"358 Constant_61815" -> "36 /layer1/layer1.0/conv3/Conv/fq_output_0" [label="[]", style=solid]; +"359 Constant_61814" -> "36 /layer1/layer1.0/conv3/Conv/fq_output_0" [label="[]", style=solid]; +"360 Constant_61813" -> "36 /layer1/layer1.0/conv3/Conv/fq_output_0" [label="[]", style=solid]; +"361 Reshape_170" -> "32 /layer1/layer1.0/conv3/Conv" [label="[1, 256, 1, 1]", style=solid]; +"362 /layer1/layer1.0/conv3/Conv/WithoutBiases/fq_weights_1" -> "28 /layer1/layer1.0/conv3/Conv/WithoutBiases" [label="[256, 64, 1, 1]", style=solid]; +"363 Constant_61811" -> "362 /layer1/layer1.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"364 Constant_61810" -> "362 /layer1/layer1.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"365 Constant_61809" -> "362 /layer1/layer1.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"366 Constant_61808" -> "362 /layer1/layer1.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"367 onnx^^Conv_506" -> "362 /layer1/layer1.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[256, 64, 1, 1]", style=solid]; +"368 Constant_61806" -> "25 /layer1/layer1.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"369 Constant_61805" -> "25 /layer1/layer1.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"370 Constant_61804" -> "25 /layer1/layer1.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"371 Constant_61803" -> "25 /layer1/layer1.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"372 Reshape_154" -> "19 /layer1/layer1.0/conv2/Conv" [label="[1, 64, 1, 1]", style=solid]; +"373 /layer1/layer1.0/conv2/Conv/WithoutBiases/fq_weights_1" -> "17 /layer1/layer1.0/conv2/Conv/WithoutBiases" [label="[64, 64, 3, 3]", style=solid]; +"374 Constant_61801" -> "373 /layer1/layer1.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"375 Constant_61800" -> "373 /layer1/layer1.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"376 Constant_61799" -> "373 /layer1/layer1.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"377 Constant_61798" -> "373 /layer1/layer1.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"378 onnx^^Conv_503" -> "373 /layer1/layer1.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 64, 3, 3]", style=solid]; +"379 Constant_61796" -> "15 /layer1/layer1.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"380 Constant_61795" -> "15 /layer1/layer1.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"381 Constant_61794" -> "15 /layer1/layer1.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"382 Constant_61793" -> "15 /layer1/layer1.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"383 Reshape_138" -> "11 /layer1/layer1.0/conv1/Conv" [label="[1, 64, 1, 1]", style=solid]; +"384 /layer1/layer1.0/conv1/Conv/WithoutBiases/fq_weights_1" -> "9 /layer1/layer1.0/conv1/Conv/WithoutBiases" [label="[64, 64, 1, 1]", style=solid]; +"385 Constant_61791" -> "384 /layer1/layer1.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"386 Constant_61790" -> "384 /layer1/layer1.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"387 Constant_61789" -> "384 /layer1/layer1.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"388 Constant_61788" -> "384 /layer1/layer1.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"389 onnx^^Conv_500" -> "384 /layer1/layer1.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 64, 1, 1]", style=solid]; +"390 Reshape_234" -> "55 /layer1/layer1.1/conv3/Conv" [label="[1, 256, 1, 1]", style=solid]; +"391 /layer1/layer1.1/conv3/Conv/WithoutBiases/fq_weights_1" -> "51 /layer1/layer1.1/conv3/Conv/WithoutBiases" [label="[256, 64, 1, 1]", style=solid]; +"392 Constant_61846" -> "391 /layer1/layer1.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"393 Constant_61845" -> "391 /layer1/layer1.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"394 Constant_61844" -> "391 /layer1/layer1.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"395 Constant_61843" -> "391 /layer1/layer1.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"396 onnx^^Conv_518" -> "391 /layer1/layer1.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[256, 64, 1, 1]", style=solid]; +"397 Constant_61841" -> "47 /layer1/layer1.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"398 Constant_61840" -> "47 /layer1/layer1.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"399 Constant_61839" -> "47 /layer1/layer1.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"400 Constant_61838" -> "47 /layer1/layer1.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"401 Reshape_218" -> "39 /layer1/layer1.1/conv2/Conv" [label="[1, 64, 1, 1]", style=solid]; +"402 /layer1/layer1.1/conv2/Conv/WithoutBiases/fq_weights_1" -> "35 /layer1/layer1.1/conv2/Conv/WithoutBiases" [label="[64, 64, 3, 3]", style=solid]; +"403 Constant_61836" -> "402 /layer1/layer1.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"404 Constant_61835" -> "402 /layer1/layer1.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"405 Constant_61834" -> "402 /layer1/layer1.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"406 Constant_61833" -> "402 /layer1/layer1.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"407 onnx^^Conv_515" -> "402 /layer1/layer1.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 64, 3, 3]", style=solid]; +"408 Constant_61831" -> "31 /layer1/layer1.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"409 Constant_61830" -> "31 /layer1/layer1.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"410 Constant_61829" -> "31 /layer1/layer1.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"411 Constant_61828" -> "31 /layer1/layer1.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"412 Reshape_202" -> "24 /layer1/layer1.1/conv1/Conv" [label="[1, 64, 1, 1]", style=solid]; +"413 /layer1/layer1.1/conv1/Conv/WithoutBiases/fq_weights_1" -> "21 /layer1/layer1.1/conv1/Conv/WithoutBiases" [label="[64, 256, 1, 1]", style=solid]; +"414 Constant_61826" -> "413 /layer1/layer1.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"415 Constant_61825" -> "413 /layer1/layer1.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"416 Constant_61824" -> "413 /layer1/layer1.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"417 Constant_61823" -> "413 /layer1/layer1.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"418 onnx^^Conv_512" -> "413 /layer1/layer1.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 256, 1, 1]", style=solid]; +"419 Reshape_283" -> "66 /layer1/layer1.2/conv3/Conv" [label="[1, 256, 1, 1]", style=solid]; +"420 /layer1/layer1.2/conv3/Conv/WithoutBiases/fq_weights_1" -> "62 /layer1/layer1.2/conv3/Conv/WithoutBiases" [label="[256, 64, 1, 1]", style=solid]; +"421 Constant_61881" -> "420 /layer1/layer1.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"422 Constant_61880" -> "420 /layer1/layer1.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"423 Constant_61879" -> "420 /layer1/layer1.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"424 Constant_61878" -> "420 /layer1/layer1.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"425 onnx^^Conv_527" -> "420 /layer1/layer1.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[256, 64, 1, 1]", style=solid]; +"426 Constant_61876" -> "58 /layer1/layer1.2/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"427 Constant_61875" -> "58 /layer1/layer1.2/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"428 Constant_61874" -> "58 /layer1/layer1.2/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"429 Constant_61873" -> "58 /layer1/layer1.2/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"430 Reshape_267" -> "50 /layer1/layer1.2/conv2/Conv" [label="[1, 64, 1, 1]", style=solid]; +"431 /layer1/layer1.2/conv2/Conv/WithoutBiases/fq_weights_1" -> "46 /layer1/layer1.2/conv2/Conv/WithoutBiases" [label="[64, 64, 3, 3]", style=solid]; +"432 Constant_61871" -> "431 /layer1/layer1.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"433 Constant_61870" -> "431 /layer1/layer1.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"434 Constant_61869" -> "431 /layer1/layer1.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"435 Constant_61868" -> "431 /layer1/layer1.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"436 onnx^^Conv_524" -> "431 /layer1/layer1.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[64, 64, 3, 3]", style=solid]; +"437 Constant_61866" -> "42 /layer1/layer1.2/relu/Relu/fq_output_0" [label="[]", style=solid]; +"438 Constant_61865" -> "42 /layer1/layer1.2/relu/Relu/fq_output_0" [label="[]", style=solid]; +"439 Constant_61864" -> "42 /layer1/layer1.2/relu/Relu/fq_output_0" [label="[]", style=solid]; +"440 Constant_61863" -> "42 /layer1/layer1.2/relu/Relu/fq_output_0" [label="[]", style=solid]; +"441 Reshape_251" -> "34 /layer1/layer1.2/conv1/Conv" [label="[1, 64, 1, 1]", style=solid]; +"442 /layer1/layer1.2/conv1/Conv/WithoutBiases/fq_weights_1" -> "30 /layer1/layer1.2/conv1/Conv/WithoutBiases" [label="[64, 256, 1, 1]", style=solid]; +"443 Constant_61861" -> "442 /layer1/layer1.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"444 Constant_61860" -> "442 /layer1/layer1.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"445 Constant_61859" -> "442 /layer1/layer1.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"446 Constant_61858" -> "442 /layer1/layer1.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"447 onnx^^Conv_521" -> "442 /layer1/layer1.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[64, 256, 1, 1]", style=solid]; +"448 Constant_61931" -> "78 /layer2/layer2.0/conv3/Conv/fq_output_0" [label="[]", style=solid]; +"449 Constant_61930" -> "78 /layer2/layer2.0/conv3/Conv/fq_output_0" [label="[]", style=solid]; +"450 Constant_61929" -> "78 /layer2/layer2.0/conv3/Conv/fq_output_0" [label="[]", style=solid]; +"451 Constant_61928" -> "78 /layer2/layer2.0/conv3/Conv/fq_output_0" [label="[]", style=solid]; +"452 Reshape_332" -> "74 /layer2/layer2.0/conv3/Conv" [label="[1, 512, 1, 1]", style=solid]; +"453 /layer2/layer2.0/conv3/Conv/WithoutBiases/fq_weights_1" -> "70 /layer2/layer2.0/conv3/Conv/WithoutBiases" [label="[512, 128, 1, 1]", style=solid]; +"454 Constant_61926" -> "453 /layer2/layer2.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"455 Constant_61925" -> "453 /layer2/layer2.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"456 Constant_61924" -> "453 /layer2/layer2.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"457 Constant_61923" -> "453 /layer2/layer2.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"458 onnx^^Conv_536" -> "453 /layer2/layer2.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 128, 1, 1]", style=solid]; +"459 Constant_61921" -> "67 /layer2/layer2.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"460 Constant_61920" -> "67 /layer2/layer2.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"461 Constant_61919" -> "67 /layer2/layer2.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"462 Constant_61918" -> "67 /layer2/layer2.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"463 Reshape_316" -> "59 /layer2/layer2.0/conv2/Conv" [label="[1, 128, 1, 1]", style=solid]; +"464 /layer2/layer2.0/conv2/Conv/WithoutBiases/fq_weights_1" -> "56 /layer2/layer2.0/conv2/Conv/WithoutBiases" [label="[128, 128, 3, 3]", style=solid]; +"465 Constant_61916" -> "464 /layer2/layer2.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"466 Constant_61915" -> "464 /layer2/layer2.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"467 Constant_61914" -> "464 /layer2/layer2.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"468 Constant_61913" -> "464 /layer2/layer2.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"469 onnx^^Conv_533" -> "464 /layer2/layer2.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 128, 3, 3]", style=solid]; +"470 Constant_61911" -> "52 /layer2/layer2.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"471 Constant_61910" -> "52 /layer2/layer2.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"472 Constant_61909" -> "52 /layer2/layer2.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"473 Constant_61908" -> "52 /layer2/layer2.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"474 Reshape_300" -> "44 /layer2/layer2.0/conv1/Conv" [label="[1, 128, 1, 1]", style=solid]; +"475 /layer2/layer2.0/conv1/Conv/WithoutBiases/fq_weights_1" -> "40 /layer2/layer2.0/conv1/Conv/WithoutBiases" [label="[128, 256, 1, 1]", style=solid]; +"476 Constant_61906" -> "475 /layer2/layer2.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"477 Constant_61905" -> "475 /layer2/layer2.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"478 Constant_61904" -> "475 /layer2/layer2.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"479 Constant_61903" -> "475 /layer2/layer2.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"480 onnx^^Conv_530" -> "475 /layer2/layer2.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 256, 1, 1]", style=solid]; +"481 Reshape_396" -> "98 /layer2/layer2.1/conv3/Conv" [label="[1, 512, 1, 1]", style=solid]; +"482 /layer2/layer2.1/conv3/Conv/WithoutBiases/fq_weights_1" -> "93 /layer2/layer2.1/conv3/Conv/WithoutBiases" [label="[512, 128, 1, 1]", style=solid]; +"483 Constant_61961" -> "482 /layer2/layer2.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"484 Constant_61960" -> "482 /layer2/layer2.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"485 Constant_61959" -> "482 /layer2/layer2.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"486 Constant_61958" -> "482 /layer2/layer2.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"487 onnx^^Conv_548" -> "482 /layer2/layer2.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 128, 1, 1]", style=solid]; +"488 Constant_61956" -> "89 /layer2/layer2.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"489 Constant_61955" -> "89 /layer2/layer2.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"490 Constant_61954" -> "89 /layer2/layer2.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"491 Constant_61953" -> "89 /layer2/layer2.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"492 Reshape_380" -> "81 /layer2/layer2.1/conv2/Conv" [label="[1, 128, 1, 1]", style=solid]; +"493 /layer2/layer2.1/conv2/Conv/WithoutBiases/fq_weights_1" -> "77 /layer2/layer2.1/conv2/Conv/WithoutBiases" [label="[128, 128, 3, 3]", style=solid]; +"494 Constant_61951" -> "493 /layer2/layer2.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"495 Constant_61950" -> "493 /layer2/layer2.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"496 Constant_61949" -> "493 /layer2/layer2.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"497 Constant_61948" -> "493 /layer2/layer2.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"498 onnx^^Conv_545" -> "493 /layer2/layer2.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 128, 3, 3]", style=solid]; +"499 Constant_61946" -> "73 /layer2/layer2.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"500 Constant_61945" -> "73 /layer2/layer2.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"501 Constant_61944" -> "73 /layer2/layer2.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"502 Constant_61943" -> "73 /layer2/layer2.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"503 Reshape_364" -> "65 /layer2/layer2.1/conv1/Conv" [label="[1, 128, 1, 1]", style=solid]; +"504 /layer2/layer2.1/conv1/Conv/WithoutBiases/fq_weights_1" -> "61 /layer2/layer2.1/conv1/Conv/WithoutBiases" [label="[128, 512, 1, 1]", style=solid]; +"505 Constant_61941" -> "504 /layer2/layer2.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"506 Constant_61940" -> "504 /layer2/layer2.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"507 Constant_61939" -> "504 /layer2/layer2.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"508 Constant_61938" -> "504 /layer2/layer2.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"509 onnx^^Conv_542" -> "504 /layer2/layer2.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 512, 1, 1]", style=solid]; +"510 Reshape_445" -> "110 /layer2/layer2.2/conv3/Conv" [label="[1, 512, 1, 1]", style=solid]; +"511 /layer2/layer2.2/conv3/Conv/WithoutBiases/fq_weights_1" -> "106 /layer2/layer2.2/conv3/Conv/WithoutBiases" [label="[512, 128, 1, 1]", style=solid]; +"512 Constant_61996" -> "511 /layer2/layer2.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"513 Constant_61995" -> "511 /layer2/layer2.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"514 Constant_61994" -> "511 /layer2/layer2.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"515 Constant_61993" -> "511 /layer2/layer2.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"516 onnx^^Conv_557" -> "511 /layer2/layer2.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 128, 1, 1]", style=solid]; +"517 Constant_61991" -> "102 /layer2/layer2.2/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"518 Constant_61990" -> "102 /layer2/layer2.2/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"519 Constant_61989" -> "102 /layer2/layer2.2/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"520 Constant_61988" -> "102 /layer2/layer2.2/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"521 Reshape_429" -> "92 /layer2/layer2.2/conv2/Conv" [label="[1, 128, 1, 1]", style=solid]; +"522 /layer2/layer2.2/conv2/Conv/WithoutBiases/fq_weights_1" -> "88 /layer2/layer2.2/conv2/Conv/WithoutBiases" [label="[128, 128, 3, 3]", style=solid]; +"523 Constant_61986" -> "522 /layer2/layer2.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"524 Constant_61985" -> "522 /layer2/layer2.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"525 Constant_61984" -> "522 /layer2/layer2.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"526 Constant_61983" -> "522 /layer2/layer2.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"527 onnx^^Conv_554" -> "522 /layer2/layer2.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 128, 3, 3]", style=solid]; +"528 Constant_61981" -> "84 /layer2/layer2.2/relu/Relu/fq_output_0" [label="[]", style=solid]; +"529 Constant_61980" -> "84 /layer2/layer2.2/relu/Relu/fq_output_0" [label="[]", style=solid]; +"530 Constant_61979" -> "84 /layer2/layer2.2/relu/Relu/fq_output_0" [label="[]", style=solid]; +"531 Constant_61978" -> "84 /layer2/layer2.2/relu/Relu/fq_output_0" [label="[]", style=solid]; +"532 Reshape_413" -> "76 /layer2/layer2.2/conv1/Conv" [label="[1, 128, 1, 1]", style=solid]; +"533 /layer2/layer2.2/conv1/Conv/WithoutBiases/fq_weights_1" -> "72 /layer2/layer2.2/conv1/Conv/WithoutBiases" [label="[128, 512, 1, 1]", style=solid]; +"534 Constant_61976" -> "533 /layer2/layer2.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"535 Constant_61975" -> "533 /layer2/layer2.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"536 Constant_61974" -> "533 /layer2/layer2.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"537 Constant_61973" -> "533 /layer2/layer2.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"538 onnx^^Conv_551" -> "533 /layer2/layer2.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 512, 1, 1]", style=solid]; +"539 Reshape_494" -> "121 /layer2/layer2.3/conv3/Conv" [label="[1, 512, 1, 1]", style=solid]; +"540 /layer2/layer2.3/conv3/Conv/WithoutBiases/fq_weights_1" -> "117 /layer2/layer2.3/conv3/Conv/WithoutBiases" [label="[512, 128, 1, 1]", style=solid]; +"541 Constant_62031" -> "540 /layer2/layer2.3/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"542 Constant_62030" -> "540 /layer2/layer2.3/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"543 Constant_62029" -> "540 /layer2/layer2.3/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"544 Constant_62028" -> "540 /layer2/layer2.3/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"545 onnx^^Conv_566" -> "540 /layer2/layer2.3/conv3/Conv/WithoutBiases/fq_weights_1" [label="[512, 128, 1, 1]", style=solid]; +"546 Constant_62026" -> "113 /layer2/layer2.3/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"547 Constant_62025" -> "113 /layer2/layer2.3/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"548 Constant_62024" -> "113 /layer2/layer2.3/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"549 Constant_62023" -> "113 /layer2/layer2.3/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"550 Reshape_478" -> "105 /layer2/layer2.3/conv2/Conv" [label="[1, 128, 1, 1]", style=solid]; +"551 /layer2/layer2.3/conv2/Conv/WithoutBiases/fq_weights_1" -> "101 /layer2/layer2.3/conv2/Conv/WithoutBiases" [label="[128, 128, 3, 3]", style=solid]; +"552 Constant_62021" -> "551 /layer2/layer2.3/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"553 Constant_62020" -> "551 /layer2/layer2.3/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"554 Constant_62019" -> "551 /layer2/layer2.3/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"555 Constant_62018" -> "551 /layer2/layer2.3/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"556 onnx^^Conv_563" -> "551 /layer2/layer2.3/conv2/Conv/WithoutBiases/fq_weights_1" [label="[128, 128, 3, 3]", style=solid]; +"557 Constant_62016" -> "96 /layer2/layer2.3/relu/Relu/fq_output_0" [label="[]", style=solid]; +"558 Constant_62015" -> "96 /layer2/layer2.3/relu/Relu/fq_output_0" [label="[]", style=solid]; +"559 Constant_62014" -> "96 /layer2/layer2.3/relu/Relu/fq_output_0" [label="[]", style=solid]; +"560 Constant_62013" -> "96 /layer2/layer2.3/relu/Relu/fq_output_0" [label="[]", style=solid]; +"561 Reshape_462" -> "87 /layer2/layer2.3/conv1/Conv" [label="[1, 128, 1, 1]", style=solid]; +"562 /layer2/layer2.3/conv1/Conv/WithoutBiases/fq_weights_1" -> "83 /layer2/layer2.3/conv1/Conv/WithoutBiases" [label="[128, 512, 1, 1]", style=solid]; +"563 Constant_62011" -> "562 /layer2/layer2.3/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"564 Constant_62010" -> "562 /layer2/layer2.3/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"565 Constant_62009" -> "562 /layer2/layer2.3/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"566 Constant_62008" -> "562 /layer2/layer2.3/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"567 onnx^^Conv_560" -> "562 /layer2/layer2.3/conv1/Conv/WithoutBiases/fq_weights_1" [label="[128, 512, 1, 1]", style=solid]; +"568 Constant_62081" -> "133 /layer3/layer3.0/conv3/Conv/fq_output_0" [label="[]", style=solid]; +"569 Constant_62080" -> "133 /layer3/layer3.0/conv3/Conv/fq_output_0" [label="[]", style=solid]; +"570 Constant_62079" -> "133 /layer3/layer3.0/conv3/Conv/fq_output_0" [label="[]", style=solid]; +"571 Constant_62078" -> "133 /layer3/layer3.0/conv3/Conv/fq_output_0" [label="[]", style=solid]; +"572 Reshape_543" -> "129 /layer3/layer3.0/conv3/Conv" [label="[1, 1024, 1, 1]", style=solid]; +"573 /layer3/layer3.0/conv3/Conv/WithoutBiases/fq_weights_1" -> "125 /layer3/layer3.0/conv3/Conv/WithoutBiases" [label="[1024, 256, 1, 1]", style=solid]; +"574 Constant_62076" -> "573 /layer3/layer3.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"575 Constant_62075" -> "573 /layer3/layer3.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"576 Constant_62074" -> "573 /layer3/layer3.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"577 Constant_62073" -> "573 /layer3/layer3.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"578 onnx^^Conv_575" -> "573 /layer3/layer3.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 256, 1, 1]", style=solid]; +"579 Constant_62071" -> "122 /layer3/layer3.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"580 Constant_62070" -> "122 /layer3/layer3.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"581 Constant_62069" -> "122 /layer3/layer3.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"582 Constant_62068" -> "122 /layer3/layer3.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"583 Reshape_527" -> "114 /layer3/layer3.0/conv2/Conv" [label="[1, 256, 1, 1]", style=solid]; +"584 /layer3/layer3.0/conv2/Conv/WithoutBiases/fq_weights_1" -> "111 /layer3/layer3.0/conv2/Conv/WithoutBiases" [label="[256, 256, 3, 3]", style=solid]; +"585 Constant_62066" -> "584 /layer3/layer3.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"586 Constant_62065" -> "584 /layer3/layer3.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"587 Constant_62064" -> "584 /layer3/layer3.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"588 Constant_62063" -> "584 /layer3/layer3.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"589 onnx^^Conv_572" -> "584 /layer3/layer3.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 256, 3, 3]", style=solid]; +"590 Constant_62061" -> "107 /layer3/layer3.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"591 Constant_62060" -> "107 /layer3/layer3.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"592 Constant_62059" -> "107 /layer3/layer3.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"593 Constant_62058" -> "107 /layer3/layer3.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"594 Reshape_511" -> "99 /layer3/layer3.0/conv1/Conv" [label="[1, 256, 1, 1]", style=solid]; +"595 /layer3/layer3.0/conv1/Conv/WithoutBiases/fq_weights_1" -> "94 /layer3/layer3.0/conv1/Conv/WithoutBiases" [label="[256, 512, 1, 1]", style=solid]; +"596 Constant_62056" -> "595 /layer3/layer3.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"597 Constant_62055" -> "595 /layer3/layer3.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"598 Constant_62054" -> "595 /layer3/layer3.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"599 Constant_62053" -> "595 /layer3/layer3.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"600 onnx^^Conv_569" -> "595 /layer3/layer3.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 512, 1, 1]", style=solid]; +"601 Reshape_607" -> "153 /layer3/layer3.1/conv3/Conv" [label="[1, 1024, 1, 1]", style=solid]; +"602 /layer3/layer3.1/conv3/Conv/WithoutBiases/fq_weights_1" -> "148 /layer3/layer3.1/conv3/Conv/WithoutBiases" [label="[1024, 256, 1, 1]", style=solid]; +"603 Constant_62111" -> "602 /layer3/layer3.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"604 Constant_62110" -> "602 /layer3/layer3.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"605 Constant_62109" -> "602 /layer3/layer3.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"606 Constant_62108" -> "602 /layer3/layer3.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"607 onnx^^Conv_587" -> "602 /layer3/layer3.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 256, 1, 1]", style=solid]; +"608 Constant_62106" -> "144 /layer3/layer3.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"609 Constant_62105" -> "144 /layer3/layer3.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"610 Constant_62104" -> "144 /layer3/layer3.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"611 Constant_62103" -> "144 /layer3/layer3.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"612 Reshape_591" -> "136 /layer3/layer3.1/conv2/Conv" [label="[1, 256, 1, 1]", style=solid]; +"613 /layer3/layer3.1/conv2/Conv/WithoutBiases/fq_weights_1" -> "132 /layer3/layer3.1/conv2/Conv/WithoutBiases" [label="[256, 256, 3, 3]", style=solid]; +"614 Constant_62101" -> "613 /layer3/layer3.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"615 Constant_62100" -> "613 /layer3/layer3.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"616 Constant_62099" -> "613 /layer3/layer3.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"617 Constant_62098" -> "613 /layer3/layer3.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"618 onnx^^Conv_584" -> "613 /layer3/layer3.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 256, 3, 3]", style=solid]; +"619 Constant_62096" -> "128 /layer3/layer3.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"620 Constant_62095" -> "128 /layer3/layer3.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"621 Constant_62094" -> "128 /layer3/layer3.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"622 Constant_62093" -> "128 /layer3/layer3.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"623 Reshape_575" -> "120 /layer3/layer3.1/conv1/Conv" [label="[1, 256, 1, 1]", style=solid]; +"624 /layer3/layer3.1/conv1/Conv/WithoutBiases/fq_weights_1" -> "116 /layer3/layer3.1/conv1/Conv/WithoutBiases" [label="[256, 1024, 1, 1]", style=solid]; +"625 Constant_62091" -> "624 /layer3/layer3.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"626 Constant_62090" -> "624 /layer3/layer3.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"627 Constant_62089" -> "624 /layer3/layer3.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"628 Constant_62088" -> "624 /layer3/layer3.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"629 onnx^^Conv_581" -> "624 /layer3/layer3.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1024, 1, 1]", style=solid]; +"630 Reshape_656" -> "166 /layer3/layer3.2/conv3/Conv" [label="[1, 1024, 1, 1]", style=solid]; +"631 /layer3/layer3.2/conv3/Conv/WithoutBiases/fq_weights_1" -> "161 /layer3/layer3.2/conv3/Conv/WithoutBiases" [label="[1024, 256, 1, 1]", style=solid]; +"632 Constant_62146" -> "631 /layer3/layer3.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"633 Constant_62145" -> "631 /layer3/layer3.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"634 Constant_62144" -> "631 /layer3/layer3.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"635 Constant_62143" -> "631 /layer3/layer3.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"636 onnx^^Conv_596" -> "631 /layer3/layer3.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 256, 1, 1]", style=solid]; +"637 Constant_62141" -> "157 /layer3/layer3.2/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"638 Constant_62140" -> "157 /layer3/layer3.2/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"639 Constant_62139" -> "157 /layer3/layer3.2/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"640 Constant_62138" -> "157 /layer3/layer3.2/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"641 Reshape_640" -> "147 /layer3/layer3.2/conv2/Conv" [label="[1, 256, 1, 1]", style=solid]; +"642 /layer3/layer3.2/conv2/Conv/WithoutBiases/fq_weights_1" -> "143 /layer3/layer3.2/conv2/Conv/WithoutBiases" [label="[256, 256, 3, 3]", style=solid]; +"643 Constant_62136" -> "642 /layer3/layer3.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"644 Constant_62135" -> "642 /layer3/layer3.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"645 Constant_62134" -> "642 /layer3/layer3.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"646 Constant_62133" -> "642 /layer3/layer3.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"647 onnx^^Conv_593" -> "642 /layer3/layer3.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 256, 3, 3]", style=solid]; +"648 Constant_62131" -> "139 /layer3/layer3.2/relu/Relu/fq_output_0" [label="[]", style=solid]; +"649 Constant_62130" -> "139 /layer3/layer3.2/relu/Relu/fq_output_0" [label="[]", style=solid]; +"650 Constant_62129" -> "139 /layer3/layer3.2/relu/Relu/fq_output_0" [label="[]", style=solid]; +"651 Constant_62128" -> "139 /layer3/layer3.2/relu/Relu/fq_output_0" [label="[]", style=solid]; +"652 Reshape_624" -> "131 /layer3/layer3.2/conv1/Conv" [label="[1, 256, 1, 1]", style=solid]; +"653 /layer3/layer3.2/conv1/Conv/WithoutBiases/fq_weights_1" -> "127 /layer3/layer3.2/conv1/Conv/WithoutBiases" [label="[256, 1024, 1, 1]", style=solid]; +"654 Constant_62126" -> "653 /layer3/layer3.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"655 Constant_62125" -> "653 /layer3/layer3.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"656 Constant_62124" -> "653 /layer3/layer3.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"657 Constant_62123" -> "653 /layer3/layer3.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"658 onnx^^Conv_590" -> "653 /layer3/layer3.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1024, 1, 1]", style=solid]; +"659 Reshape_705" -> "179 /layer3/layer3.3/conv3/Conv" [label="[1, 1024, 1, 1]", style=solid]; +"660 /layer3/layer3.3/conv3/Conv/WithoutBiases/fq_weights_1" -> "174 /layer3/layer3.3/conv3/Conv/WithoutBiases" [label="[1024, 256, 1, 1]", style=solid]; +"661 Constant_62181" -> "660 /layer3/layer3.3/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"662 Constant_62180" -> "660 /layer3/layer3.3/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"663 Constant_62179" -> "660 /layer3/layer3.3/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"664 Constant_62178" -> "660 /layer3/layer3.3/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"665 onnx^^Conv_605" -> "660 /layer3/layer3.3/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 256, 1, 1]", style=solid]; +"666 Constant_62176" -> "170 /layer3/layer3.3/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"667 Constant_62175" -> "170 /layer3/layer3.3/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"668 Constant_62174" -> "170 /layer3/layer3.3/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"669 Constant_62173" -> "170 /layer3/layer3.3/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"670 Reshape_689" -> "160 /layer3/layer3.3/conv2/Conv" [label="[1, 256, 1, 1]", style=solid]; +"671 /layer3/layer3.3/conv2/Conv/WithoutBiases/fq_weights_1" -> "156 /layer3/layer3.3/conv2/Conv/WithoutBiases" [label="[256, 256, 3, 3]", style=solid]; +"672 Constant_62171" -> "671 /layer3/layer3.3/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"673 Constant_62170" -> "671 /layer3/layer3.3/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"674 Constant_62169" -> "671 /layer3/layer3.3/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"675 Constant_62168" -> "671 /layer3/layer3.3/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"676 onnx^^Conv_602" -> "671 /layer3/layer3.3/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 256, 3, 3]", style=solid]; +"677 Constant_62166" -> "151 /layer3/layer3.3/relu/Relu/fq_output_0" [label="[]", style=solid]; +"678 Constant_62165" -> "151 /layer3/layer3.3/relu/Relu/fq_output_0" [label="[]", style=solid]; +"679 Constant_62164" -> "151 /layer3/layer3.3/relu/Relu/fq_output_0" [label="[]", style=solid]; +"680 Constant_62163" -> "151 /layer3/layer3.3/relu/Relu/fq_output_0" [label="[]", style=solid]; +"681 Reshape_673" -> "142 /layer3/layer3.3/conv1/Conv" [label="[1, 256, 1, 1]", style=solid]; +"682 /layer3/layer3.3/conv1/Conv/WithoutBiases/fq_weights_1" -> "138 /layer3/layer3.3/conv1/Conv/WithoutBiases" [label="[256, 1024, 1, 1]", style=solid]; +"683 Constant_62161" -> "682 /layer3/layer3.3/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"684 Constant_62160" -> "682 /layer3/layer3.3/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"685 Constant_62159" -> "682 /layer3/layer3.3/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"686 Constant_62158" -> "682 /layer3/layer3.3/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"687 onnx^^Conv_599" -> "682 /layer3/layer3.3/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1024, 1, 1]", style=solid]; +"688 Reshape_754" -> "191 /layer3/layer3.4/conv3/Conv" [label="[1, 1024, 1, 1]", style=solid]; +"689 /layer3/layer3.4/conv3/Conv/WithoutBiases/fq_weights_1" -> "187 /layer3/layer3.4/conv3/Conv/WithoutBiases" [label="[1024, 256, 1, 1]", style=solid]; +"690 Constant_62216" -> "689 /layer3/layer3.4/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"691 Constant_62215" -> "689 /layer3/layer3.4/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"692 Constant_62214" -> "689 /layer3/layer3.4/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"693 Constant_62213" -> "689 /layer3/layer3.4/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"694 onnx^^Conv_614" -> "689 /layer3/layer3.4/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 256, 1, 1]", style=solid]; +"695 Constant_62211" -> "183 /layer3/layer3.4/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"696 Constant_62210" -> "183 /layer3/layer3.4/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"697 Constant_62209" -> "183 /layer3/layer3.4/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"698 Constant_62208" -> "183 /layer3/layer3.4/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"699 Reshape_738" -> "173 /layer3/layer3.4/conv2/Conv" [label="[1, 256, 1, 1]", style=solid]; +"700 /layer3/layer3.4/conv2/Conv/WithoutBiases/fq_weights_1" -> "169 /layer3/layer3.4/conv2/Conv/WithoutBiases" [label="[256, 256, 3, 3]", style=solid]; +"701 Constant_62206" -> "700 /layer3/layer3.4/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"702 Constant_62205" -> "700 /layer3/layer3.4/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"703 Constant_62204" -> "700 /layer3/layer3.4/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"704 Constant_62203" -> "700 /layer3/layer3.4/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"705 onnx^^Conv_611" -> "700 /layer3/layer3.4/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 256, 3, 3]", style=solid]; +"706 Constant_62201" -> "164 /layer3/layer3.4/relu/Relu/fq_output_0" [label="[]", style=solid]; +"707 Constant_62200" -> "164 /layer3/layer3.4/relu/Relu/fq_output_0" [label="[]", style=solid]; +"708 Constant_62199" -> "164 /layer3/layer3.4/relu/Relu/fq_output_0" [label="[]", style=solid]; +"709 Constant_62198" -> "164 /layer3/layer3.4/relu/Relu/fq_output_0" [label="[]", style=solid]; +"710 Reshape_722" -> "155 /layer3/layer3.4/conv1/Conv" [label="[1, 256, 1, 1]", style=solid]; +"711 /layer3/layer3.4/conv1/Conv/WithoutBiases/fq_weights_1" -> "150 /layer3/layer3.4/conv1/Conv/WithoutBiases" [label="[256, 1024, 1, 1]", style=solid]; +"712 Constant_62196" -> "711 /layer3/layer3.4/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"713 Constant_62195" -> "711 /layer3/layer3.4/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"714 Constant_62194" -> "711 /layer3/layer3.4/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"715 Constant_62193" -> "711 /layer3/layer3.4/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"716 onnx^^Conv_608" -> "711 /layer3/layer3.4/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1024, 1, 1]", style=solid]; +"717 Reshape_803" -> "202 /layer3/layer3.5/conv3/Conv" [label="[1, 1024, 1, 1]", style=solid]; +"718 /layer3/layer3.5/conv3/Conv/WithoutBiases/fq_weights_1" -> "198 /layer3/layer3.5/conv3/Conv/WithoutBiases" [label="[1024, 256, 1, 1]", style=solid]; +"719 Constant_62251" -> "718 /layer3/layer3.5/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"720 Constant_62250" -> "718 /layer3/layer3.5/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"721 Constant_62249" -> "718 /layer3/layer3.5/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"722 Constant_62248" -> "718 /layer3/layer3.5/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 1, 1, 1]", style=solid]; +"723 onnx^^Conv_623" -> "718 /layer3/layer3.5/conv3/Conv/WithoutBiases/fq_weights_1" [label="[1024, 256, 1, 1]", style=solid]; +"724 Constant_62246" -> "194 /layer3/layer3.5/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"725 Constant_62245" -> "194 /layer3/layer3.5/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"726 Constant_62244" -> "194 /layer3/layer3.5/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"727 Constant_62243" -> "194 /layer3/layer3.5/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"728 Reshape_787" -> "186 /layer3/layer3.5/conv2/Conv" [label="[1, 256, 1, 1]", style=solid]; +"729 /layer3/layer3.5/conv2/Conv/WithoutBiases/fq_weights_1" -> "182 /layer3/layer3.5/conv2/Conv/WithoutBiases" [label="[256, 256, 3, 3]", style=solid]; +"730 Constant_62241" -> "729 /layer3/layer3.5/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"731 Constant_62240" -> "729 /layer3/layer3.5/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"732 Constant_62239" -> "729 /layer3/layer3.5/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"733 Constant_62238" -> "729 /layer3/layer3.5/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"734 onnx^^Conv_620" -> "729 /layer3/layer3.5/conv2/Conv/WithoutBiases/fq_weights_1" [label="[256, 256, 3, 3]", style=solid]; +"735 Constant_62236" -> "177 /layer3/layer3.5/relu/Relu/fq_output_0" [label="[]", style=solid]; +"736 Constant_62235" -> "177 /layer3/layer3.5/relu/Relu/fq_output_0" [label="[]", style=solid]; +"737 Constant_62234" -> "177 /layer3/layer3.5/relu/Relu/fq_output_0" [label="[]", style=solid]; +"738 Constant_62233" -> "177 /layer3/layer3.5/relu/Relu/fq_output_0" [label="[]", style=solid]; +"739 Reshape_771" -> "168 /layer3/layer3.5/conv1/Conv" [label="[1, 256, 1, 1]", style=solid]; +"740 /layer3/layer3.5/conv1/Conv/WithoutBiases/fq_weights_1" -> "163 /layer3/layer3.5/conv1/Conv/WithoutBiases" [label="[256, 1024, 1, 1]", style=solid]; +"741 Constant_62231" -> "740 /layer3/layer3.5/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"742 Constant_62230" -> "740 /layer3/layer3.5/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"743 Constant_62229" -> "740 /layer3/layer3.5/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"744 Constant_62228" -> "740 /layer3/layer3.5/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"745 onnx^^Conv_617" -> "740 /layer3/layer3.5/conv1/Conv/WithoutBiases/fq_weights_1" [label="[256, 1024, 1, 1]", style=solid]; +"746 Constant_62301" -> "214 /layer4/layer4.0/conv3/Conv/fq_output_0" [label="[]", style=solid]; +"747 Constant_62300" -> "214 /layer4/layer4.0/conv3/Conv/fq_output_0" [label="[]", style=solid]; +"748 Constant_62299" -> "214 /layer4/layer4.0/conv3/Conv/fq_output_0" [label="[]", style=solid]; +"749 Constant_62298" -> "214 /layer4/layer4.0/conv3/Conv/fq_output_0" [label="[]", style=solid]; +"750 Reshape_852" -> "210 /layer4/layer4.0/conv3/Conv" [label="[1, 2048, 1, 1]", style=solid]; +"751 /layer4/layer4.0/conv3/Conv/WithoutBiases/fq_weights_1" -> "206 /layer4/layer4.0/conv3/Conv/WithoutBiases" [label="[2048, 512, 1, 1]", style=solid]; +"752 Constant_62296" -> "751 /layer4/layer4.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[2048, 1, 1, 1]", style=solid]; +"753 Constant_62295" -> "751 /layer4/layer4.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[2048, 1, 1, 1]", style=solid]; +"754 Constant_62294" -> "751 /layer4/layer4.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[2048, 1, 1, 1]", style=solid]; +"755 Constant_62293" -> "751 /layer4/layer4.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[2048, 1, 1, 1]", style=solid]; +"756 onnx^^Conv_632" -> "751 /layer4/layer4.0/conv3/Conv/WithoutBiases/fq_weights_1" [label="[2048, 512, 1, 1]", style=solid]; +"757 Constant_62291" -> "203 /layer4/layer4.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"758 Constant_62290" -> "203 /layer4/layer4.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"759 Constant_62289" -> "203 /layer4/layer4.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"760 Constant_62288" -> "203 /layer4/layer4.0/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"761 Reshape_836" -> "195 /layer4/layer4.0/conv2/Conv" [label="[1, 512, 1, 1]", style=solid]; +"762 /layer4/layer4.0/conv2/Conv/WithoutBiases/fq_weights_1" -> "192 /layer4/layer4.0/conv2/Conv/WithoutBiases" [label="[512, 512, 3, 3]", style=solid]; +"763 Constant_62286" -> "762 /layer4/layer4.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"764 Constant_62285" -> "762 /layer4/layer4.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"765 Constant_62284" -> "762 /layer4/layer4.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"766 Constant_62283" -> "762 /layer4/layer4.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"767 onnx^^Conv_629" -> "762 /layer4/layer4.0/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 512, 3, 3]", style=solid]; +"768 Constant_62281" -> "188 /layer4/layer4.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"769 Constant_62280" -> "188 /layer4/layer4.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"770 Constant_62279" -> "188 /layer4/layer4.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"771 Constant_62278" -> "188 /layer4/layer4.0/relu/Relu/fq_output_0" [label="[]", style=solid]; +"772 Reshape_820" -> "180 /layer4/layer4.0/conv1/Conv" [label="[1, 512, 1, 1]", style=solid]; +"773 /layer4/layer4.0/conv1/Conv/WithoutBiases/fq_weights_1" -> "175 /layer4/layer4.0/conv1/Conv/WithoutBiases" [label="[512, 1024, 1, 1]", style=solid]; +"774 Constant_62276" -> "773 /layer4/layer4.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"775 Constant_62275" -> "773 /layer4/layer4.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"776 Constant_62274" -> "773 /layer4/layer4.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"777 Constant_62273" -> "773 /layer4/layer4.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"778 onnx^^Conv_626" -> "773 /layer4/layer4.0/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1024, 1, 1]", style=solid]; +"779 Reshape_916" -> "229 /layer4/layer4.1/conv3/Conv" [label="[1, 2048, 1, 1]", style=solid]; +"780 /layer4/layer4.1/conv3/Conv/WithoutBiases/fq_weights_1" -> "226 /layer4/layer4.1/conv3/Conv/WithoutBiases" [label="[2048, 512, 1, 1]", style=solid]; +"781 Constant_62331" -> "780 /layer4/layer4.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[2048, 1, 1, 1]", style=solid]; +"782 Constant_62330" -> "780 /layer4/layer4.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[2048, 1, 1, 1]", style=solid]; +"783 Constant_62329" -> "780 /layer4/layer4.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[2048, 1, 1, 1]", style=solid]; +"784 Constant_62328" -> "780 /layer4/layer4.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[2048, 1, 1, 1]", style=solid]; +"785 onnx^^Conv_644" -> "780 /layer4/layer4.1/conv3/Conv/WithoutBiases/fq_weights_1" [label="[2048, 512, 1, 1]", style=solid]; +"786 Constant_62326" -> "223 /layer4/layer4.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"787 Constant_62325" -> "223 /layer4/layer4.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"788 Constant_62324" -> "223 /layer4/layer4.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"789 Constant_62323" -> "223 /layer4/layer4.1/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"790 Reshape_900" -> "217 /layer4/layer4.1/conv2/Conv" [label="[1, 512, 1, 1]", style=solid]; +"791 /layer4/layer4.1/conv2/Conv/WithoutBiases/fq_weights_1" -> "213 /layer4/layer4.1/conv2/Conv/WithoutBiases" [label="[512, 512, 3, 3]", style=solid]; +"792 Constant_62321" -> "791 /layer4/layer4.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"793 Constant_62320" -> "791 /layer4/layer4.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"794 Constant_62319" -> "791 /layer4/layer4.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"795 Constant_62318" -> "791 /layer4/layer4.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"796 onnx^^Conv_641" -> "791 /layer4/layer4.1/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 512, 3, 3]", style=solid]; +"797 Constant_62316" -> "209 /layer4/layer4.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"798 Constant_62315" -> "209 /layer4/layer4.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"799 Constant_62314" -> "209 /layer4/layer4.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"800 Constant_62313" -> "209 /layer4/layer4.1/relu/Relu/fq_output_0" [label="[]", style=solid]; +"801 Reshape_884" -> "201 /layer4/layer4.1/conv1/Conv" [label="[1, 512, 1, 1]", style=solid]; +"802 /layer4/layer4.1/conv1/Conv/WithoutBiases/fq_weights_1" -> "197 /layer4/layer4.1/conv1/Conv/WithoutBiases" [label="[512, 2048, 1, 1]", style=solid]; +"803 Constant_62311" -> "802 /layer4/layer4.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"804 Constant_62310" -> "802 /layer4/layer4.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"805 Constant_62309" -> "802 /layer4/layer4.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"806 Constant_62308" -> "802 /layer4/layer4.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"807 onnx^^Conv_638" -> "802 /layer4/layer4.1/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 2048, 1, 1]", style=solid]; +"808 Reshape_965" -> "234 /layer4/layer4.2/conv3/Conv" [label="[1, 2048, 1, 1]", style=solid]; +"809 /layer4/layer4.2/conv3/Conv/WithoutBiases/fq_weights_1" -> "233 /layer4/layer4.2/conv3/Conv/WithoutBiases" [label="[2048, 512, 1, 1]", style=solid]; +"810 Constant_62366" -> "809 /layer4/layer4.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[2048, 1, 1, 1]", style=solid]; +"811 Constant_62365" -> "809 /layer4/layer4.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[2048, 1, 1, 1]", style=solid]; +"812 Constant_62364" -> "809 /layer4/layer4.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[2048, 1, 1, 1]", style=solid]; +"813 Constant_62363" -> "809 /layer4/layer4.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[2048, 1, 1, 1]", style=solid]; +"814 onnx^^Conv_653" -> "809 /layer4/layer4.2/conv3/Conv/WithoutBiases/fq_weights_1" [label="[2048, 512, 1, 1]", style=solid]; +"815 Constant_62361" -> "231 /layer4/layer4.2/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"816 Constant_62360" -> "231 /layer4/layer4.2/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"817 Constant_62359" -> "231 /layer4/layer4.2/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"818 Constant_62358" -> "231 /layer4/layer4.2/relu_1/Relu/fq_output_0" [label="[]", style=solid]; +"819 Reshape_949" -> "225 /layer4/layer4.2/conv2/Conv" [label="[1, 512, 1, 1]", style=solid]; +"820 /layer4/layer4.2/conv2/Conv/WithoutBiases/fq_weights_1" -> "222 /layer4/layer4.2/conv2/Conv/WithoutBiases" [label="[512, 512, 3, 3]", style=solid]; +"821 Constant_62356" -> "820 /layer4/layer4.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"822 Constant_62355" -> "820 /layer4/layer4.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"823 Constant_62354" -> "820 /layer4/layer4.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"824 Constant_62353" -> "820 /layer4/layer4.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"825 onnx^^Conv_650" -> "820 /layer4/layer4.2/conv2/Conv/WithoutBiases/fq_weights_1" [label="[512, 512, 3, 3]", style=solid]; +"826 Constant_62351" -> "219 /layer4/layer4.2/relu/Relu/fq_output_0" [label="[]", style=solid]; +"827 Constant_62350" -> "219 /layer4/layer4.2/relu/Relu/fq_output_0" [label="[]", style=solid]; +"828 Constant_62349" -> "219 /layer4/layer4.2/relu/Relu/fq_output_0" [label="[]", style=solid]; +"829 Constant_62348" -> "219 /layer4/layer4.2/relu/Relu/fq_output_0" [label="[]", style=solid]; +"830 Reshape_933" -> "212 /layer4/layer4.2/conv1/Conv" [label="[1, 512, 1, 1]", style=solid]; +"831 /layer4/layer4.2/conv1/Conv/WithoutBiases/fq_weights_1" -> "208 /layer4/layer4.2/conv1/Conv/WithoutBiases" [label="[512, 2048, 1, 1]", style=solid]; +"832 Constant_62346" -> "831 /layer4/layer4.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"833 Constant_62345" -> "831 /layer4/layer4.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"834 Constant_62344" -> "831 /layer4/layer4.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"835 Constant_62343" -> "831 /layer4/layer4.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"836 onnx^^Conv_647" -> "831 /layer4/layer4.2/conv1/Conv/WithoutBiases/fq_weights_1" [label="[512, 2048, 1, 1]", style=solid]; +} diff --git a/tests/openvino/native/data/reference_graphs/quantized/swin-tiny-patch4-window7-224.dot b/tests/openvino/native/data/reference_graphs/quantized/swin-tiny-patch4-window7-224_performance_transformer.dot similarity index 100% rename from tests/openvino/native/data/reference_graphs/quantized/swin-tiny-patch4-window7-224.dot rename to tests/openvino/native/data/reference_graphs/quantized/swin-tiny-patch4-window7-224_performance_transformer.dot diff --git a/tests/openvino/native/data/reference_graphs/quantized/swin-tiny-patch4-window7-224_sq.dot b/tests/openvino/native/data/reference_graphs/quantized/swin-tiny-patch4-window7-224_sq.dot new file mode 100644 index 00000000000..5ee24d3048c --- /dev/null +++ b/tests/openvino/native/data/reference_graphs/quantized/swin-tiny-patch4-window7-224_sq.dot @@ -0,0 +1,2750 @@ +strict digraph { +"0 input" [id=0, type=Parameter]; +"1 Multiply_6794" [id=1, type=Multiply]; +"2 Divide_2169" [id=2, type=Add]; +"3 /patch_embed/proj/Conv/WithoutBiases" [id=3, type=Convolution]; +"4 /patch_embed/proj/Conv" [id=4, type=Add]; +"5 /patch_embed/Reshape" [id=5, type=Reshape]; +"6 /patch_embed/Shape" [id=6, type=ShapeOf]; +"7 /patch_embed/Transpose" [id=7, type=Transpose]; +"8 /patch_embed/Slice" [id=8, type=StridedSlice]; +"9 /patch_embed/norm/Div" [id=9, type=MVN]; +"10 /patch_embed/Concat" [id=10, type=Concat]; +"11 /patch_embed/norm/Mul" [id=11, type=Multiply]; +"12 /patch_embed/norm/Add_1" [id=12, type=Add]; +"13 /layers/layers.0/blocks.0/Add" [id=13, type=Add]; +"14 /layers/layers.0/blocks.0/norm1/Div" [id=14, type=MVN]; +"15 /layers/layers.0/blocks.0/Add_1" [id=15, type=Add]; +"16 /layers/layers.0/blocks.0/norm2/Div" [id=16, type=MVN]; +"17 /layers/layers.0/blocks.0/norm1/Mul" [id=17, type=Multiply]; +"18 /layers/layers.0/blocks.1/Add" [id=18, type=Add]; +"19 /layers/layers.0/blocks.1/norm1/Div" [id=19, type=MVN]; +"20 /layers/layers.0/blocks.0/norm2/Mul" [id=20, type=Multiply]; +"21 /layers/layers.0/blocks.0/norm1/Add_1" [id=21, type=Add]; +"22 /layers/layers.0/blocks.1/Add_1" [id=22, type=Add]; +"23 /layers/layers.0/blocks.1/norm2/Div" [id=23, type=MVN]; +"24 /layers/layers.0/blocks.1/norm1/Mul" [id=24, type=Multiply]; +"25 /layers/layers.0/blocks.0/norm2/Add_1" [id=25, type=Add]; +"26 /layers/layers.0/blocks.0/Reshape_1" [id=26, type=Reshape]; +"27 /layers/layers.0/downsample/Reshape" [id=27, type=Reshape]; +"28 /layers/layers.0/blocks.1/norm2/Mul" [id=28, type=Multiply]; +"29 /layers/layers.0/blocks.1/norm1/Add_1" [id=29, type=Add]; +"30 /layers/layers.0/blocks.0/norm2/Add_1/smooth_quant_multiply" [id=30, type=Multiply]; +"31 /layers/layers.0/blocks.0/Transpose" [id=31, type=Transpose]; +"32 /layers/layers.0/downsample/Slice" [id=32, type=StridedSlice]; +"33 /layers/layers.0/downsample/Slice_2" [id=33, type=StridedSlice]; +"34 /layers/layers.0/blocks.1/norm2/Add_1" [id=34, type=Add]; +"35 /layers/layers.0/blocks.1/Reshape" [id=35, type=Reshape]; +"36 /layers/layers.0/blocks.0/mlp/fc1/MatMul" [id=36, type=MatMul]; +"37 /layers/layers.0/blocks.0/Reshape_2" [id=37, type=Reshape]; +"38 /layers/layers.0/downsample/Slice_1" [id=38, type=StridedSlice]; +"39 /layers/layers.0/downsample/Slice_4" [id=39, type=StridedSlice]; +"40 /layers/layers.0/downsample/Slice_3" [id=40, type=StridedSlice]; +"41 /layers/layers.0/downsample/Slice_5" [id=41, type=StridedSlice]; +"42 /layers/layers.0/blocks.1/norm2/Add_1/smooth_quant_multiply" [id=42, type=Multiply]; +"43 /layers/layers.0/blocks.1/Slice" [id=43, type=StridedSlice]; +"44 /layers/layers.0/blocks.1/Slice_1" [id=44, type=StridedSlice]; +"45 /layers/layers.0/blocks.0/mlp/fc1/Add" [id=45, type=Add]; +"46 /layers/layers.0/blocks.0/Reshape_3" [id=46, type=Reshape]; +"47 /layers/layers.0/downsample/Concat" [id=47, type=Concat]; +"48 /layers/layers.0/blocks.1/mlp/fc1/MatMul" [id=48, type=MatMul]; +"49 /layers/layers.0/blocks.1/Concat" [id=49, type=Concat]; +"50 /layers/layers.0/blocks.0/mlp/act/Mul_1" [id=50, type=Gelu]; +"51 /layers/layers.0/blocks.0/Reshape_3/smooth_quant_multiply" [id=51, type=Multiply]; +"52 /layers/layers.0/downsample/Reshape_1" [id=52, type=Reshape]; +"53 /layers/layers.0/blocks.1/mlp/fc1/Add" [id=53, type=Add]; +"54 /layers/layers.0/blocks.1/Slice_2" [id=54, type=StridedSlice]; +"55 /layers/layers.0/blocks.1/Slice_3" [id=55, type=StridedSlice]; +"56 /layers/layers.0/blocks.0/mlp/act/Mul_1/smooth_quant_multiply" [id=56, type=Multiply]; +"57 /layers/layers.0/blocks.0/attn/qkv/MatMul" [id=57, type=MatMul]; +"58 /layers/layers.0/downsample/norm/Div" [id=58, type=MVN]; +"59 /layers/layers.0/blocks.1/mlp/act/Mul_1" [id=59, type=Gelu]; +"60 /layers/layers.0/blocks.1/Concat_1" [id=60, type=Concat]; +"61 /layers/layers.0/blocks.0/mlp/fc2/MatMul" [id=61, type=MatMul]; +"62 /layers/layers.0/blocks.0/attn/qkv/Add" [id=62, type=Add]; +"63 /layers/layers.0/downsample/norm/Mul" [id=63, type=Multiply]; +"64 /layers/layers.0/blocks.1/mlp/act/Mul_1/smooth_quant_multiply" [id=64, type=Multiply]; +"65 /layers/layers.0/blocks.1/Reshape_1" [id=65, type=Reshape]; +"66 /layers/layers.0/blocks.0/mlp/fc2/Add" [id=66, type=Add]; +"67 /layers/layers.0/blocks.0/attn/Reshape" [id=67, type=Reshape]; +"68 /layers/layers.0/downsample/norm/Add_1" [id=68, type=Add]; +"69 /layers/layers.0/blocks.1/mlp/fc2/MatMul" [id=69, type=MatMul]; +"70 /layers/layers.0/blocks.1/Transpose" [id=70, type=Transpose]; +"71 /layers/layers.0/blocks.0/attn/Transpose" [id=71, type=Transpose]; +"72 /layers/layers.0/downsample/norm/Add_1/smooth_quant_multiply" [id=72, type=Multiply]; +"73 /layers/layers.0/blocks.1/mlp/fc2/Add" [id=73, type=Add]; +"74 /layers/layers.0/blocks.1/Reshape_2" [id=74, type=Reshape]; +"75 /layers/layers.0/blocks.0/attn/Gather" [id=75, type=Gather]; +"76 /layers/layers.0/blocks.0/attn/Gather_1" [id=76, type=Gather]; +"77 /layers/layers.0/blocks.0/attn/Gather_2" [id=77, type=Gather]; +"78 /layers/layers.0/downsample/reduction/MatMul" [id=78, type=MatMul]; +"79 /layers/layers.0/blocks.1/Reshape_3" [id=79, type=Reshape]; +"80 /layers/layers.0/blocks.0/attn/Mul" [id=80, type=Multiply]; +"81 /layers/layers.0/blocks.0/attn/MatMul" [id=81, type=MatMul]; +"82 /layers/layers.0/blocks.0/attn/MatMul_1" [id=82, type=MatMul]; +"83 /layers/layers.1/blocks.0/Add" [id=83, type=Add]; +"84 /layers/layers.1/blocks.0/norm1/Div" [id=84, type=MVN]; +"85 /layers/layers.0/blocks.1/Reshape_3/smooth_quant_multiply" [id=85, type=Multiply]; +"86 /layers/layers.0/blocks.0/attn/Add" [id=86, type=Add]; +"87 /layers/layers.0/blocks.0/attn/Transpose_2" [id=87, type=Transpose]; +"88 /layers/layers.1/blocks.0/Add_1" [id=88, type=Add]; +"89 /layers/layers.1/blocks.0/norm2/Div" [id=89, type=MVN]; +"90 /layers/layers.1/blocks.0/norm1/Mul" [id=90, type=Multiply]; +"91 /layers/layers.0/blocks.1/attn/qkv/MatMul" [id=91, type=MatMul]; +"92 /layers/layers.0/blocks.0/attn/softmax/Softmax" [id=92, type=Softmax]; +"93 /layers/layers.0/blocks.0/attn/Reshape_1" [id=93, type=Reshape]; +"94 /layers/layers.1/blocks.1/Add" [id=94, type=Add]; +"95 /layers/layers.1/blocks.1/norm1/Div" [id=95, type=MVN]; +"96 /layers/layers.1/blocks.0/norm2/Mul" [id=96, type=Multiply]; +"97 /layers/layers.1/blocks.0/norm1/Add_1" [id=97, type=Add]; +"98 /layers/layers.0/blocks.1/attn/qkv/Add" [id=98, type=Add]; +"99 /layers/layers.0/blocks.0/attn/Reshape_1/smooth_quant_multiply" [id=99, type=Multiply]; +"100 /layers/layers.1/blocks.1/Add_1" [id=100, type=Add]; +"101 /layers/layers.1/blocks.1/norm2/Div" [id=101, type=MVN]; +"102 /layers/layers.1/blocks.1/norm1/Mul" [id=102, type=Multiply]; +"103 /layers/layers.1/blocks.0/norm2/Add_1" [id=103, type=Add]; +"104 /layers/layers.1/blocks.0/Reshape_1" [id=104, type=Reshape]; +"105 /layers/layers.0/blocks.1/attn/Reshape" [id=105, type=Reshape]; +"106 /layers/layers.0/blocks.0/attn/proj/MatMul" [id=106, type=MatMul]; +"107 /layers/layers.1/downsample/Reshape" [id=107, type=Reshape]; +"108 /layers/layers.1/blocks.1/norm2/Mul" [id=108, type=Multiply]; +"109 /layers/layers.1/blocks.1/norm1/Add_1" [id=109, type=Add]; +"110 /layers/layers.1/blocks.0/norm2/Add_1/smooth_quant_multiply" [id=110, type=Multiply]; +"111 /layers/layers.1/blocks.0/Transpose" [id=111, type=Transpose]; +"112 /layers/layers.0/blocks.1/attn/Transpose" [id=112, type=Transpose]; +"113 /layers/layers.0/blocks.0/attn/proj/Add" [id=113, type=Add]; +"114 /layers/layers.1/downsample/Slice" [id=114, type=StridedSlice]; +"115 /layers/layers.1/downsample/Slice_2" [id=115, type=StridedSlice]; +"116 /layers/layers.1/blocks.1/norm2/Add_1" [id=116, type=Add]; +"117 /layers/layers.1/blocks.1/Reshape" [id=117, type=Reshape]; +"118 /layers/layers.1/blocks.0/mlp/fc1/MatMul" [id=118, type=MatMul]; +"119 /layers/layers.1/blocks.0/Reshape_2" [id=119, type=Reshape]; +"120 /layers/layers.0/blocks.1/attn/Gather" [id=120, type=Gather]; +"121 /layers/layers.0/blocks.1/attn/Gather_1" [id=121, type=Gather]; +"122 /layers/layers.0/blocks.1/attn/Gather_2" [id=122, type=Gather]; +"123 /layers/layers.0/blocks.0/Reshape_4" [id=123, type=Reshape]; +"124 /layers/layers.1/downsample/Slice_1" [id=124, type=StridedSlice]; +"125 /layers/layers.1/downsample/Slice_4" [id=125, type=StridedSlice]; +"126 /layers/layers.1/downsample/Slice_3" [id=126, type=StridedSlice]; +"127 /layers/layers.1/downsample/Slice_5" [id=127, type=StridedSlice]; +"128 /layers/layers.1/blocks.1/norm2/Add_1/smooth_quant_multiply" [id=128, type=Multiply]; +"129 /layers/layers.1/blocks.1/Slice" [id=129, type=StridedSlice]; +"130 /layers/layers.1/blocks.1/Slice_1" [id=130, type=StridedSlice]; +"131 /layers/layers.1/blocks.0/mlp/fc1/Add" [id=131, type=Add]; +"132 /layers/layers.1/blocks.0/Reshape_3" [id=132, type=Reshape]; +"133 /layers/layers.0/blocks.1/attn/Mul" [id=133, type=Multiply]; +"134 /layers/layers.0/blocks.1/attn/MatMul" [id=134, type=MatMul]; +"135 /layers/layers.0/blocks.1/attn/MatMul_1" [id=135, type=MatMul]; +"136 /layers/layers.0/blocks.0/Reshape_5" [id=136, type=Reshape]; +"137 /layers/layers.1/downsample/Concat" [id=137, type=Concat]; +"138 /layers/layers.1/blocks.1/mlp/fc1/MatMul" [id=138, type=MatMul]; +"139 /layers/layers.1/blocks.1/Concat" [id=139, type=Concat]; +"140 /layers/layers.1/blocks.0/mlp/act/Mul_1" [id=140, type=Gelu]; +"141 /layers/layers.1/blocks.0/Reshape_3/smooth_quant_multiply" [id=141, type=Multiply]; +"142 /layers/layers.0/blocks.1/attn/Add" [id=142, type=Add]; +"143 /layers/layers.0/blocks.1/attn/Transpose_2" [id=143, type=Transpose]; +"144 /layers/layers.0/blocks.0/Transpose_1" [id=144, type=Transpose]; +"145 /layers/layers.1/downsample/Reshape_1" [id=145, type=Reshape]; +"146 /layers/layers.1/blocks.1/mlp/fc1/Add" [id=146, type=Add]; +"147 /layers/layers.1/blocks.1/Slice_2" [id=147, type=StridedSlice]; +"148 /layers/layers.1/blocks.1/Slice_3" [id=148, type=StridedSlice]; +"149 /layers/layers.1/blocks.0/mlp/act/Mul_1/smooth_quant_multiply" [id=149, type=Multiply]; +"150 /layers/layers.1/blocks.0/attn/qkv/MatMul" [id=150, type=MatMul]; +"151 /layers/layers.0/blocks.1/attn/Reshape_1" [id=151, type=Reshape]; +"152 /layers/layers.0/blocks.1/attn/Reshape_3" [id=152, type=Reshape]; +"153 /layers/layers.0/blocks.0/Reshape_6" [id=153, type=Reshape]; +"154 /layers/layers.1/downsample/norm/Div" [id=154, type=MVN]; +"155 /layers/layers.1/blocks.1/mlp/act/Mul_1" [id=155, type=Gelu]; +"156 /layers/layers.1/blocks.1/Concat_1" [id=156, type=Concat]; +"157 /layers/layers.1/blocks.0/mlp/fc2/MatMul" [id=157, type=MatMul]; +"158 /layers/layers.1/blocks.0/attn/qkv/Add" [id=158, type=Add]; +"159 /layers/layers.0/blocks.1/attn/Add_1" [id=159, type=Add]; +"160 /layers/layers.0/blocks.1/attn/Reshape_3/smooth_quant_multiply" [id=160, type=Multiply]; +"161 /layers/layers.0/blocks.0/Reshape_7" [id=161, type=Reshape]; +"162 /layers/layers.1/downsample/norm/Mul" [id=162, type=Multiply]; +"163 /layers/layers.1/blocks.1/mlp/act/Mul_1/smooth_quant_multiply" [id=163, type=Multiply]; +"164 /layers/layers.1/blocks.1/Reshape_1" [id=164, type=Reshape]; +"165 /layers/layers.1/blocks.0/mlp/fc2/Add" [id=165, type=Add]; +"166 /layers/layers.1/blocks.0/attn/Reshape" [id=166, type=Reshape]; +"167 /layers/layers.0/blocks.1/attn/Reshape_2" [id=167, type=Reshape]; +"168 /layers/layers.0/blocks.1/attn/proj/MatMul" [id=168, type=MatMul]; +"169 /layers/layers.1/downsample/norm/Add_1" [id=169, type=Add]; +"170 /layers/layers.1/blocks.1/mlp/fc2/MatMul" [id=170, type=MatMul]; +"171 /layers/layers.1/blocks.1/Transpose" [id=171, type=Transpose]; +"172 /layers/layers.1/blocks.0/attn/Transpose" [id=172, type=Transpose]; +"173 /layers/layers.0/blocks.1/attn/softmax/Softmax" [id=173, type=Softmax]; +"174 /layers/layers.0/blocks.1/attn/proj/Add" [id=174, type=Add]; +"175 /layers/layers.1/downsample/norm/Add_1/smooth_quant_multiply" [id=175, type=Multiply]; +"176 /layers/layers.1/blocks.1/mlp/fc2/Add" [id=176, type=Add]; +"177 /layers/layers.1/blocks.1/Reshape_2" [id=177, type=Reshape]; +"178 /layers/layers.1/blocks.0/attn/Gather" [id=178, type=Gather]; +"179 /layers/layers.1/blocks.0/attn/Gather_1" [id=179, type=Gather]; +"180 /layers/layers.1/blocks.0/attn/Gather_2" [id=180, type=Gather]; +"181 /layers/layers.0/blocks.1/Reshape_4" [id=181, type=Reshape]; +"182 /layers/layers.1/downsample/reduction/MatMul" [id=182, type=MatMul]; +"183 /layers/layers.1/blocks.1/Reshape_3" [id=183, type=Reshape]; +"184 /layers/layers.1/blocks.0/attn/Mul" [id=184, type=Multiply]; +"185 /layers/layers.1/blocks.0/attn/MatMul" [id=185, type=MatMul]; +"186 /layers/layers.1/blocks.0/attn/MatMul_1" [id=186, type=MatMul]; +"187 /layers/layers.0/blocks.1/Reshape_5" [id=187, type=Reshape]; +"188 /layers/layers.2/blocks.0/Add" [id=188, type=Add]; +"189 /layers/layers.2/blocks.0/norm1/Div" [id=189, type=MVN]; +"190 /layers/layers.1/blocks.1/Reshape_3/smooth_quant_multiply" [id=190, type=Multiply]; +"191 /layers/layers.1/blocks.0/attn/Add" [id=191, type=Add]; +"192 /layers/layers.1/blocks.0/attn/Transpose_2" [id=192, type=Transpose]; +"193 /layers/layers.0/blocks.1/Transpose_1" [id=193, type=Transpose]; +"194 /layers/layers.2/blocks.0/Add_1" [id=194, type=Add]; +"195 /layers/layers.2/blocks.0/norm2/Div" [id=195, type=MVN]; +"196 /layers/layers.2/blocks.0/norm1/Mul" [id=196, type=Multiply]; +"197 /layers/layers.1/blocks.1/attn/qkv/MatMul" [id=197, type=MatMul]; +"198 /layers/layers.1/blocks.0/attn/softmax/Softmax" [id=198, type=Softmax]; +"199 /layers/layers.1/blocks.0/attn/Reshape_1" [id=199, type=Reshape]; +"200 /layers/layers.0/blocks.1/Reshape_6" [id=200, type=Reshape]; +"201 /layers/layers.2/blocks.1/Add" [id=201, type=Add]; +"202 /layers/layers.2/blocks.1/norm1/Div" [id=202, type=MVN]; +"203 /layers/layers.2/blocks.0/norm2/Mul" [id=203, type=Multiply]; +"204 /layers/layers.2/blocks.0/norm1/Add_1" [id=204, type=Add]; +"205 /layers/layers.1/blocks.1/attn/qkv/Add" [id=205, type=Add]; +"206 /layers/layers.1/blocks.0/attn/Reshape_1/smooth_quant_multiply" [id=206, type=Multiply]; +"207 /layers/layers.0/blocks.1/Slice_4" [id=207, type=StridedSlice]; +"208 /layers/layers.0/blocks.1/Slice_5" [id=208, type=StridedSlice]; +"209 /layers/layers.2/blocks.1/Add_1" [id=209, type=Add]; +"210 /layers/layers.2/blocks.1/norm2/Div" [id=210, type=MVN]; +"211 /layers/layers.2/blocks.1/norm1/Mul" [id=211, type=Multiply]; +"212 /layers/layers.2/blocks.0/norm2/Add_1" [id=212, type=Add]; +"213 /layers/layers.2/blocks.0/Reshape_1" [id=213, type=Reshape]; +"214 /layers/layers.1/blocks.1/attn/Reshape" [id=214, type=Reshape]; +"215 /layers/layers.1/blocks.0/attn/proj/MatMul" [id=215, type=MatMul]; +"216 /layers/layers.0/blocks.1/Concat_2" [id=216, type=Concat]; +"217 /layers/layers.2/blocks.2/Add" [id=217, type=Add]; +"218 /layers/layers.2/blocks.2/norm1/Div" [id=218, type=MVN]; +"219 /layers/layers.2/blocks.1/norm2/Mul" [id=219, type=Multiply]; +"220 /layers/layers.2/blocks.1/norm1/Add_1" [id=220, type=Add]; +"221 /layers/layers.2/blocks.0/norm2/Add_1/smooth_quant_multiply" [id=221, type=Multiply]; +"222 /layers/layers.2/blocks.0/Transpose" [id=222, type=Transpose]; +"223 /layers/layers.1/blocks.1/attn/Transpose" [id=223, type=Transpose]; +"224 /layers/layers.1/blocks.0/attn/proj/Add" [id=224, type=Add]; +"225 /layers/layers.0/blocks.1/Slice_6" [id=225, type=StridedSlice]; +"226 /layers/layers.0/blocks.1/Slice_7" [id=226, type=StridedSlice]; +"227 /layers/layers.2/blocks.2/Add_1" [id=227, type=Add]; +"228 /layers/layers.2/blocks.2/norm2/Div" [id=228, type=MVN]; +"229 /layers/layers.2/blocks.2/norm1/Mul" [id=229, type=Multiply]; +"230 /layers/layers.2/blocks.1/norm2/Add_1" [id=230, type=Add]; +"231 /layers/layers.2/blocks.1/Reshape" [id=231, type=Reshape]; +"232 /layers/layers.2/blocks.0/mlp/fc1/MatMul" [id=232, type=MatMul]; +"233 /layers/layers.2/blocks.0/Reshape_2" [id=233, type=Reshape]; +"234 /layers/layers.1/blocks.1/attn/Gather" [id=234, type=Gather]; +"235 /layers/layers.1/blocks.1/attn/Gather_1" [id=235, type=Gather]; +"236 /layers/layers.1/blocks.1/attn/Gather_2" [id=236, type=Gather]; +"237 /layers/layers.1/blocks.0/Reshape_4" [id=237, type=Reshape]; +"238 /layers/layers.0/blocks.1/Concat_3" [id=238, type=Concat]; +"239 /layers/layers.2/blocks.3/Add" [id=239, type=Add]; +"240 /layers/layers.2/blocks.3/norm1/Div" [id=240, type=MVN]; +"241 /layers/layers.2/blocks.2/norm2/Mul" [id=241, type=Multiply]; +"242 /layers/layers.2/blocks.2/norm1/Add_1" [id=242, type=Add]; +"243 /layers/layers.2/blocks.1/norm2/Add_1/smooth_quant_multiply" [id=243, type=Multiply]; +"244 /layers/layers.2/blocks.1/Slice" [id=244, type=StridedSlice]; +"245 /layers/layers.2/blocks.1/Slice_1" [id=245, type=StridedSlice]; +"246 /layers/layers.2/blocks.0/mlp/fc1/Add" [id=246, type=Add]; +"247 /layers/layers.2/blocks.0/Reshape_3" [id=247, type=Reshape]; +"248 /layers/layers.1/blocks.1/attn/Mul" [id=248, type=Multiply]; +"249 /layers/layers.1/blocks.1/attn/MatMul" [id=249, type=MatMul]; +"250 /layers/layers.1/blocks.1/attn/MatMul_1" [id=250, type=MatMul]; +"251 /layers/layers.1/blocks.0/Reshape_5" [id=251, type=Reshape]; +"252 /layers/layers.0/blocks.1/Reshape_7" [id=252, type=Reshape]; +"253 /layers/layers.2/blocks.3/Add_1" [id=253, type=Add]; +"254 /layers/layers.2/blocks.3/norm2/Div" [id=254, type=MVN]; +"255 /layers/layers.2/blocks.3/norm1/Mul" [id=255, type=Multiply]; +"256 /layers/layers.2/blocks.2/norm2/Add_1" [id=256, type=Add]; +"257 /layers/layers.2/blocks.2/Reshape_1" [id=257, type=Reshape]; +"258 /layers/layers.2/blocks.1/mlp/fc1/MatMul" [id=258, type=MatMul]; +"259 /layers/layers.2/blocks.1/Concat" [id=259, type=Concat]; +"260 /layers/layers.2/blocks.0/mlp/act/Mul_1" [id=260, type=Gelu]; +"261 /layers/layers.2/blocks.0/Reshape_3/smooth_quant_multiply" [id=261, type=Multiply]; +"262 /layers/layers.1/blocks.1/attn/Add" [id=262, type=Add]; +"263 /layers/layers.1/blocks.1/attn/Transpose_2" [id=263, type=Transpose]; +"264 /layers/layers.1/blocks.0/Transpose_1" [id=264, type=Transpose]; +"265 /layers/layers.2/blocks.4/Add" [id=265, type=Add]; +"266 /layers/layers.2/blocks.4/norm1/Div" [id=266, type=MVN]; +"267 /layers/layers.2/blocks.3/norm2/Mul" [id=267, type=Multiply]; +"268 /layers/layers.2/blocks.3/norm1/Add_1" [id=268, type=Add]; +"269 /layers/layers.2/blocks.2/norm2/Add_1/smooth_quant_multiply" [id=269, type=Multiply]; +"270 /layers/layers.2/blocks.2/Transpose" [id=270, type=Transpose]; +"271 /layers/layers.2/blocks.1/mlp/fc1/Add" [id=271, type=Add]; +"272 /layers/layers.2/blocks.1/Slice_2" [id=272, type=StridedSlice]; +"273 /layers/layers.2/blocks.1/Slice_3" [id=273, type=StridedSlice]; +"274 /layers/layers.2/blocks.0/mlp/act/Mul_1/smooth_quant_multiply" [id=274, type=Multiply]; +"275 /layers/layers.2/blocks.0/attn/qkv/MatMul" [id=275, type=MatMul]; +"276 /layers/layers.1/blocks.1/attn/Reshape_1" [id=276, type=Reshape]; +"277 /layers/layers.1/blocks.1/attn/Reshape_3" [id=277, type=Reshape]; +"278 /layers/layers.1/blocks.0/Reshape_6" [id=278, type=Reshape]; +"279 /layers/layers.2/blocks.4/Add_1" [id=279, type=Add]; +"280 /layers/layers.2/blocks.4/norm2/Div" [id=280, type=MVN]; +"281 /layers/layers.2/blocks.4/norm1/Mul" [id=281, type=Multiply]; +"282 /layers/layers.2/blocks.3/norm2/Add_1" [id=282, type=Add]; +"283 /layers/layers.2/blocks.3/Reshape" [id=283, type=Reshape]; +"284 /layers/layers.2/blocks.2/mlp/fc1/MatMul" [id=284, type=MatMul]; +"285 /layers/layers.2/blocks.2/Reshape_2" [id=285, type=Reshape]; +"286 /layers/layers.2/blocks.1/mlp/act/Mul_1" [id=286, type=Gelu]; +"287 /layers/layers.2/blocks.1/Concat_1" [id=287, type=Concat]; +"288 /layers/layers.2/blocks.0/mlp/fc2/MatMul" [id=288, type=MatMul]; +"289 /layers/layers.2/blocks.0/attn/qkv/Add" [id=289, type=Add]; +"290 /layers/layers.1/blocks.1/attn/Add_1" [id=290, type=Add]; +"291 /layers/layers.1/blocks.1/attn/Reshape_3/smooth_quant_multiply" [id=291, type=Multiply]; +"292 /layers/layers.1/blocks.0/Reshape_7" [id=292, type=Reshape]; +"293 /layers/layers.2/blocks.5/Add" [id=293, type=Add]; +"294 /layers/layers.2/blocks.5/norm1/Div" [id=294, type=MVN]; +"295 /layers/layers.2/blocks.4/norm2/Mul" [id=295, type=Multiply]; +"296 /layers/layers.2/blocks.4/norm1/Add_1" [id=296, type=Add]; +"297 /layers/layers.2/blocks.3/norm2/Add_1/smooth_quant_multiply" [id=297, type=Multiply]; +"298 /layers/layers.2/blocks.3/Slice" [id=298, type=StridedSlice]; +"299 /layers/layers.2/blocks.3/Slice_1" [id=299, type=StridedSlice]; +"300 /layers/layers.2/blocks.2/mlp/fc1/Add" [id=300, type=Add]; +"301 /layers/layers.2/blocks.2/Reshape_3" [id=301, type=Reshape]; +"302 /layers/layers.2/blocks.1/mlp/act/Mul_1/smooth_quant_multiply" [id=302, type=Multiply]; +"303 /layers/layers.2/blocks.1/Reshape_1" [id=303, type=Reshape]; +"304 /layers/layers.2/blocks.0/mlp/fc2/Add" [id=304, type=Add]; +"305 /layers/layers.2/blocks.0/attn/Reshape" [id=305, type=Reshape]; +"306 /layers/layers.1/blocks.1/attn/Reshape_2" [id=306, type=Reshape]; +"307 /layers/layers.1/blocks.1/attn/proj/MatMul" [id=307, type=MatMul]; +"308 /layers/layers.2/blocks.5/Add_1" [id=308, type=Add]; +"309 /layers/layers.2/blocks.5/norm2/Div" [id=309, type=MVN]; +"310 /layers/layers.2/blocks.5/norm1/Mul" [id=310, type=Multiply]; +"311 /layers/layers.2/blocks.4/norm2/Add_1" [id=311, type=Add]; +"312 /layers/layers.2/blocks.4/Reshape_1" [id=312, type=Reshape]; +"313 /layers/layers.2/blocks.3/mlp/fc1/MatMul" [id=313, type=MatMul]; +"314 /layers/layers.2/blocks.3/Concat" [id=314, type=Concat]; +"315 /layers/layers.2/blocks.2/mlp/act/Mul_1" [id=315, type=Gelu]; +"316 /layers/layers.2/blocks.2/Reshape_3/smooth_quant_multiply" [id=316, type=Multiply]; +"317 /layers/layers.2/blocks.1/mlp/fc2/MatMul" [id=317, type=MatMul]; +"318 /layers/layers.2/blocks.1/Transpose" [id=318, type=Transpose]; +"319 /layers/layers.2/blocks.0/attn/Transpose" [id=319, type=Transpose]; +"320 /layers/layers.1/blocks.1/attn/softmax/Softmax" [id=320, type=Softmax]; +"321 /layers/layers.1/blocks.1/attn/proj/Add" [id=321, type=Add]; +"322 /layers/layers.2/downsample/Reshape" [id=322, type=Reshape]; +"323 /layers/layers.2/blocks.5/norm2/Mul" [id=323, type=Multiply]; +"324 /layers/layers.2/blocks.5/norm1/Add_1" [id=324, type=Add]; +"325 /layers/layers.2/blocks.4/norm2/Add_1/smooth_quant_multiply" [id=325, type=Multiply]; +"326 /layers/layers.2/blocks.4/Transpose" [id=326, type=Transpose]; +"327 /layers/layers.2/blocks.3/mlp/fc1/Add" [id=327, type=Add]; +"328 /layers/layers.2/blocks.3/Slice_2" [id=328, type=StridedSlice]; +"329 /layers/layers.2/blocks.3/Slice_3" [id=329, type=StridedSlice]; +"330 /layers/layers.2/blocks.2/mlp/act/Mul_1/smooth_quant_multiply" [id=330, type=Multiply]; +"331 /layers/layers.2/blocks.2/attn/qkv/MatMul" [id=331, type=MatMul]; +"332 /layers/layers.2/blocks.1/mlp/fc2/Add" [id=332, type=Add]; +"333 /layers/layers.2/blocks.1/Reshape_2" [id=333, type=Reshape]; +"334 /layers/layers.2/blocks.0/attn/Gather" [id=334, type=Gather]; +"335 /layers/layers.2/blocks.0/attn/Gather_1" [id=335, type=Gather]; +"336 /layers/layers.2/blocks.0/attn/Gather_2" [id=336, type=Gather]; +"337 /layers/layers.1/blocks.1/Reshape_4" [id=337, type=Reshape]; +"338 /layers/layers.2/downsample/Slice" [id=338, type=StridedSlice]; +"339 /layers/layers.2/downsample/Slice_2" [id=339, type=StridedSlice]; +"340 /layers/layers.2/blocks.5/norm2/Add_1" [id=340, type=Add]; +"341 /layers/layers.2/blocks.5/Reshape" [id=341, type=Reshape]; +"342 /layers/layers.2/blocks.4/mlp/fc1/MatMul" [id=342, type=MatMul]; +"343 /layers/layers.2/blocks.4/Reshape_2" [id=343, type=Reshape]; +"344 /layers/layers.2/blocks.3/mlp/act/Mul_1" [id=344, type=Gelu]; +"345 /layers/layers.2/blocks.3/Concat_1" [id=345, type=Concat]; +"346 /layers/layers.2/blocks.2/mlp/fc2/MatMul" [id=346, type=MatMul]; +"347 /layers/layers.2/blocks.2/attn/qkv/Add" [id=347, type=Add]; +"348 /layers/layers.2/blocks.1/Reshape_3" [id=348, type=Reshape]; +"349 /layers/layers.2/blocks.0/attn/Mul" [id=349, type=Multiply]; +"350 /layers/layers.2/blocks.0/attn/MatMul" [id=350, type=MatMul]; +"351 /layers/layers.2/blocks.0/attn/MatMul_1" [id=351, type=MatMul]; +"352 /layers/layers.1/blocks.1/Reshape_5" [id=352, type=Reshape]; +"353 /layers/layers.2/downsample/Slice_1" [id=353, type=StridedSlice]; +"354 /layers/layers.2/downsample/Slice_4" [id=354, type=StridedSlice]; +"355 /layers/layers.2/downsample/Slice_3" [id=355, type=StridedSlice]; +"356 /layers/layers.2/downsample/Slice_5" [id=356, type=StridedSlice]; +"357 /layers/layers.2/blocks.5/norm2/Add_1/smooth_quant_multiply" [id=357, type=Multiply]; +"358 /layers/layers.2/blocks.5/Slice" [id=358, type=StridedSlice]; +"359 /layers/layers.2/blocks.5/Slice_1" [id=359, type=StridedSlice]; +"360 /layers/layers.2/blocks.4/mlp/fc1/Add" [id=360, type=Add]; +"361 /layers/layers.2/blocks.4/Reshape_3" [id=361, type=Reshape]; +"362 /layers/layers.2/blocks.3/mlp/act/Mul_1/smooth_quant_multiply" [id=362, type=Multiply]; +"363 /layers/layers.2/blocks.3/Reshape_1" [id=363, type=Reshape]; +"364 /layers/layers.2/blocks.2/mlp/fc2/Add" [id=364, type=Add]; +"365 /layers/layers.2/blocks.2/attn/Reshape" [id=365, type=Reshape]; +"366 /layers/layers.2/blocks.1/Reshape_3/smooth_quant_multiply" [id=366, type=Multiply]; +"367 /layers/layers.2/blocks.0/attn/Add" [id=367, type=Add]; +"368 /layers/layers.2/blocks.0/attn/Transpose_2" [id=368, type=Transpose]; +"369 /layers/layers.1/blocks.1/Transpose_1" [id=369, type=Transpose]; +"370 /layers/layers.2/downsample/Concat" [id=370, type=Concat]; +"371 /layers/layers.2/blocks.5/mlp/fc1/MatMul" [id=371, type=MatMul]; +"372 /layers/layers.2/blocks.5/Concat" [id=372, type=Concat]; +"373 /layers/layers.2/blocks.4/mlp/act/Mul_1" [id=373, type=Gelu]; +"374 /layers/layers.2/blocks.4/Reshape_3/smooth_quant_multiply" [id=374, type=Multiply]; +"375 /layers/layers.2/blocks.3/mlp/fc2/MatMul" [id=375, type=MatMul]; +"376 /layers/layers.2/blocks.3/Transpose" [id=376, type=Transpose]; +"377 /layers/layers.2/blocks.2/attn/Transpose" [id=377, type=Transpose]; +"378 /layers/layers.2/blocks.1/attn/qkv/MatMul" [id=378, type=MatMul]; +"379 /layers/layers.2/blocks.0/attn/softmax/Softmax" [id=379, type=Softmax]; +"380 /layers/layers.2/blocks.0/attn/Reshape_1" [id=380, type=Reshape]; +"381 /layers/layers.1/blocks.1/Reshape_6" [id=381, type=Reshape]; +"382 /layers/layers.2/downsample/Reshape_1" [id=382, type=Reshape]; +"383 /layers/layers.2/blocks.5/mlp/fc1/Add" [id=383, type=Add]; +"384 /layers/layers.2/blocks.5/Slice_2" [id=384, type=StridedSlice]; +"385 /layers/layers.2/blocks.5/Slice_3" [id=385, type=StridedSlice]; +"386 /layers/layers.2/blocks.4/mlp/act/Mul_1/smooth_quant_multiply" [id=386, type=Multiply]; +"387 /layers/layers.2/blocks.4/attn/qkv/MatMul" [id=387, type=MatMul]; +"388 /layers/layers.2/blocks.3/mlp/fc2/Add" [id=388, type=Add]; +"389 /layers/layers.2/blocks.3/Reshape_2" [id=389, type=Reshape]; +"390 /layers/layers.2/blocks.2/attn/Gather" [id=390, type=Gather]; +"391 /layers/layers.2/blocks.2/attn/Gather_1" [id=391, type=Gather]; +"392 /layers/layers.2/blocks.2/attn/Gather_2" [id=392, type=Gather]; +"393 /layers/layers.2/blocks.1/attn/qkv/Add" [id=393, type=Add]; +"394 /layers/layers.2/blocks.0/attn/Reshape_1/smooth_quant_multiply" [id=394, type=Multiply]; +"395 /layers/layers.1/blocks.1/Slice_4" [id=395, type=StridedSlice]; +"396 /layers/layers.1/blocks.1/Slice_5" [id=396, type=StridedSlice]; +"397 /layers/layers.2/downsample/norm/Div" [id=397, type=MVN]; +"398 /layers/layers.2/blocks.5/mlp/act/Mul_1" [id=398, type=Gelu]; +"399 /layers/layers.2/blocks.5/Concat_1" [id=399, type=Concat]; +"400 /layers/layers.2/blocks.4/mlp/fc2/MatMul" [id=400, type=MatMul]; +"401 /layers/layers.2/blocks.4/attn/qkv/Add" [id=401, type=Add]; +"402 /layers/layers.2/blocks.3/Reshape_3" [id=402, type=Reshape]; +"403 /layers/layers.2/blocks.2/attn/Mul" [id=403, type=Multiply]; +"404 /layers/layers.2/blocks.2/attn/MatMul" [id=404, type=MatMul]; +"405 /layers/layers.2/blocks.2/attn/MatMul_1" [id=405, type=MatMul]; +"406 /layers/layers.2/blocks.1/attn/Reshape" [id=406, type=Reshape]; +"407 /layers/layers.2/blocks.0/attn/proj/MatMul" [id=407, type=MatMul]; +"408 /layers/layers.1/blocks.1/Concat_2" [id=408, type=Concat]; +"409 /layers/layers.2/downsample/norm/Mul" [id=409, type=Multiply]; +"410 /layers/layers.2/blocks.5/mlp/act/Mul_1/smooth_quant_multiply" [id=410, type=Multiply]; +"411 /layers/layers.2/blocks.5/Reshape_1" [id=411, type=Reshape]; +"412 /layers/layers.2/blocks.4/mlp/fc2/Add" [id=412, type=Add]; +"413 /layers/layers.2/blocks.4/attn/Reshape" [id=413, type=Reshape]; +"414 /layers/layers.2/blocks.3/Reshape_3/smooth_quant_multiply" [id=414, type=Multiply]; +"415 /layers/layers.2/blocks.2/attn/Add" [id=415, type=Add]; +"416 /layers/layers.2/blocks.2/attn/Transpose_2" [id=416, type=Transpose]; +"417 /layers/layers.2/blocks.1/attn/Transpose" [id=417, type=Transpose]; +"418 /layers/layers.2/blocks.0/attn/proj/Add" [id=418, type=Add]; +"419 /layers/layers.1/blocks.1/Slice_6" [id=419, type=StridedSlice]; +"420 /layers/layers.1/blocks.1/Slice_7" [id=420, type=StridedSlice]; +"421 /layers/layers.2/downsample/norm/Add_1" [id=421, type=Add]; +"422 /layers/layers.2/blocks.5/mlp/fc2/MatMul" [id=422, type=MatMul]; +"423 /layers/layers.2/blocks.5/Transpose" [id=423, type=Transpose]; +"424 /layers/layers.2/blocks.4/attn/Transpose" [id=424, type=Transpose]; +"425 /layers/layers.2/blocks.3/attn/qkv/MatMul" [id=425, type=MatMul]; +"426 /layers/layers.2/blocks.2/attn/softmax/Softmax" [id=426, type=Softmax]; +"427 /layers/layers.2/blocks.2/attn/Reshape_1" [id=427, type=Reshape]; +"428 /layers/layers.2/blocks.1/attn/Gather" [id=428, type=Gather]; +"429 /layers/layers.2/blocks.1/attn/Gather_1" [id=429, type=Gather]; +"430 /layers/layers.2/blocks.1/attn/Gather_2" [id=430, type=Gather]; +"431 /layers/layers.2/blocks.0/Reshape_4" [id=431, type=Reshape]; +"432 /layers/layers.1/blocks.1/Concat_3" [id=432, type=Concat]; +"433 /layers/layers.2/downsample/norm/Add_1/smooth_quant_multiply" [id=433, type=Multiply]; +"434 /layers/layers.2/blocks.5/mlp/fc2/Add" [id=434, type=Add]; +"435 /layers/layers.2/blocks.5/Reshape_2" [id=435, type=Reshape]; +"436 /layers/layers.2/blocks.4/attn/Gather" [id=436, type=Gather]; +"437 /layers/layers.2/blocks.4/attn/Gather_1" [id=437, type=Gather]; +"438 /layers/layers.2/blocks.4/attn/Gather_2" [id=438, type=Gather]; +"439 /layers/layers.2/blocks.3/attn/qkv/Add" [id=439, type=Add]; +"440 /layers/layers.2/blocks.2/attn/Reshape_1/smooth_quant_multiply" [id=440, type=Multiply]; +"441 /layers/layers.2/blocks.1/attn/Mul" [id=441, type=Multiply]; +"442 /layers/layers.2/blocks.1/attn/MatMul" [id=442, type=MatMul]; +"443 /layers/layers.2/blocks.1/attn/MatMul_1" [id=443, type=MatMul]; +"444 /layers/layers.2/blocks.0/Reshape_5" [id=444, type=Reshape]; +"445 /layers/layers.1/blocks.1/Reshape_7" [id=445, type=Reshape]; +"446 /layers/layers.2/downsample/reduction/MatMul" [id=446, type=MatMul]; +"447 /layers/layers.2/blocks.5/Reshape_3" [id=447, type=Reshape]; +"448 /layers/layers.2/blocks.4/attn/Mul" [id=448, type=Multiply]; +"449 /layers/layers.2/blocks.4/attn/MatMul" [id=449, type=MatMul]; +"450 /layers/layers.2/blocks.4/attn/MatMul_1" [id=450, type=MatMul]; +"451 /layers/layers.2/blocks.3/attn/Reshape" [id=451, type=Reshape]; +"452 /layers/layers.2/blocks.2/attn/proj/MatMul" [id=452, type=MatMul]; +"453 /layers/layers.2/blocks.1/attn/Add" [id=453, type=Add]; +"454 /layers/layers.2/blocks.1/attn/Transpose_2" [id=454, type=Transpose]; +"455 /layers/layers.2/blocks.0/Transpose_1" [id=455, type=Transpose]; +"456 /layers/layers.3/blocks.0/Add" [id=456, type=Add]; +"457 /layers/layers.3/blocks.0/norm1/Div" [id=457, type=MVN]; +"458 /layers/layers.2/blocks.5/Reshape_3/smooth_quant_multiply" [id=458, type=Multiply]; +"459 /layers/layers.2/blocks.4/attn/Add" [id=459, type=Add]; +"460 /layers/layers.2/blocks.4/attn/Transpose_2" [id=460, type=Transpose]; +"461 /layers/layers.2/blocks.3/attn/Transpose" [id=461, type=Transpose]; +"462 /layers/layers.2/blocks.2/attn/proj/Add" [id=462, type=Add]; +"463 /layers/layers.2/blocks.1/attn/Reshape_1" [id=463, type=Reshape]; +"464 /layers/layers.2/blocks.1/attn/Reshape_3" [id=464, type=Reshape]; +"465 /layers/layers.2/blocks.0/Reshape_6" [id=465, type=Reshape]; +"466 /layers/layers.3/blocks.0/Add_1" [id=466, type=Add]; +"467 /layers/layers.3/blocks.0/norm2/Div" [id=467, type=MVN]; +"468 /layers/layers.3/blocks.0/norm1/Mul" [id=468, type=Multiply]; +"469 /layers/layers.2/blocks.5/attn/qkv/MatMul" [id=469, type=MatMul]; +"470 /layers/layers.2/blocks.4/attn/softmax/Softmax" [id=470, type=Softmax]; +"471 /layers/layers.2/blocks.4/attn/Reshape_1" [id=471, type=Reshape]; +"472 /layers/layers.2/blocks.3/attn/Gather" [id=472, type=Gather]; +"473 /layers/layers.2/blocks.3/attn/Gather_1" [id=473, type=Gather]; +"474 /layers/layers.2/blocks.3/attn/Gather_2" [id=474, type=Gather]; +"475 /layers/layers.2/blocks.2/Reshape_4" [id=475, type=Reshape]; +"476 /layers/layers.2/blocks.1/attn/Add_1" [id=476, type=Add]; +"477 /layers/layers.2/blocks.1/attn/Reshape_3/smooth_quant_multiply" [id=477, type=Multiply]; +"478 /layers/layers.2/blocks.0/Reshape_7" [id=478, type=Reshape]; +"479 /layers/layers.3/blocks.1/Add" [id=479, type=Add]; +"480 /layers/layers.3/blocks.1/norm1/Div" [id=480, type=MVN]; +"481 /layers/layers.3/blocks.0/norm2/Mul" [id=481, type=Multiply]; +"482 /layers/layers.3/blocks.0/norm1/Add_1" [id=482, type=Add]; +"483 /layers/layers.2/blocks.5/attn/qkv/Add" [id=483, type=Add]; +"484 /layers/layers.2/blocks.4/attn/Reshape_1/smooth_quant_multiply" [id=484, type=Multiply]; +"485 /layers/layers.2/blocks.3/attn/Mul" [id=485, type=Multiply]; +"486 /layers/layers.2/blocks.3/attn/MatMul" [id=486, type=MatMul]; +"487 /layers/layers.2/blocks.3/attn/MatMul_1" [id=487, type=MatMul]; +"488 /layers/layers.2/blocks.2/Reshape_5" [id=488, type=Reshape]; +"489 /layers/layers.2/blocks.1/attn/Reshape_2" [id=489, type=Reshape]; +"490 /layers/layers.2/blocks.1/attn/proj/MatMul" [id=490, type=MatMul]; +"491 /layers/layers.3/blocks.1/Add_1" [id=491, type=Add]; +"492 /layers/layers.3/blocks.1/norm2/Div" [id=492, type=MVN]; +"493 /layers/layers.3/blocks.1/norm1/Mul" [id=493, type=Multiply]; +"494 /layers/layers.3/blocks.0/norm2/Add_1" [id=494, type=Add]; +"495 /layers/layers.3/blocks.0/Reshape_1" [id=495, type=Reshape]; +"496 /layers/layers.2/blocks.5/attn/Reshape" [id=496, type=Reshape]; +"497 /layers/layers.2/blocks.4/attn/proj/MatMul" [id=497, type=MatMul]; +"498 /layers/layers.2/blocks.3/attn/Add" [id=498, type=Add]; +"499 /layers/layers.2/blocks.3/attn/Transpose_2" [id=499, type=Transpose]; +"500 /layers/layers.2/blocks.2/Transpose_1" [id=500, type=Transpose]; +"501 /layers/layers.2/blocks.1/attn/softmax/Softmax" [id=501, type=Softmax]; +"502 /layers/layers.2/blocks.1/attn/proj/Add" [id=502, type=Add]; +"503 /norm/Div" [id=503, type=MVN]; +"504 /layers/layers.3/blocks.1/norm2/Mul" [id=504, type=Multiply]; +"505 /layers/layers.3/blocks.1/norm1/Add_1" [id=505, type=Add]; +"506 /layers/layers.3/blocks.0/norm2/Add_1/smooth_quant_multiply" [id=506, type=Multiply]; +"507 /layers/layers.3/blocks.0/Transpose" [id=507, type=Reshape]; +"508 /layers/layers.2/blocks.5/attn/Transpose" [id=508, type=Transpose]; +"509 /layers/layers.2/blocks.4/attn/proj/Add" [id=509, type=Add]; +"510 /layers/layers.2/blocks.3/attn/Reshape_1" [id=510, type=Reshape]; +"511 /layers/layers.2/blocks.3/attn/Reshape_3" [id=511, type=Reshape]; +"512 /layers/layers.2/blocks.2/Reshape_6" [id=512, type=Reshape]; +"513 /layers/layers.2/blocks.1/Reshape_4" [id=513, type=Reshape]; +"514 /norm/Mul" [id=514, type=Multiply]; +"515 /layers/layers.3/blocks.1/norm2/Add_1" [id=515, type=Add]; +"516 /layers/layers.3/blocks.1/Reshape_1" [id=516, type=Reshape]; +"517 /layers/layers.3/blocks.0/mlp/fc1/MatMul" [id=517, type=MatMul]; +"518 /layers/layers.3/blocks.0/Reshape_2" [id=518, type=Reshape]; +"519 /layers/layers.2/blocks.5/attn/Gather" [id=519, type=Gather]; +"520 /layers/layers.2/blocks.5/attn/Gather_1" [id=520, type=Gather]; +"521 /layers/layers.2/blocks.5/attn/Gather_2" [id=521, type=Gather]; +"522 /layers/layers.2/blocks.4/Reshape_4" [id=522, type=Reshape]; +"523 /layers/layers.2/blocks.3/attn/Add_1" [id=523, type=Add]; +"524 /layers/layers.2/blocks.3/attn/Reshape_3/smooth_quant_multiply" [id=524, type=Multiply]; +"525 /layers/layers.2/blocks.2/Reshape_7" [id=525, type=Reshape]; +"526 /layers/layers.2/blocks.1/Reshape_5" [id=526, type=Reshape]; +"527 /norm/Add_1" [id=527, type=Add]; +"528 /layers/layers.3/blocks.1/norm2/Add_1/smooth_quant_multiply" [id=528, type=Multiply]; +"529 /layers/layers.3/blocks.1/Transpose" [id=529, type=Reshape]; +"530 /layers/layers.3/blocks.0/mlp/fc1/Add" [id=530, type=Add]; +"531 /layers/layers.3/blocks.0/Reshape_3" [id=531, type=Reshape]; +"532 /layers/layers.2/blocks.5/attn/Mul" [id=532, type=Multiply]; +"533 /layers/layers.2/blocks.5/attn/MatMul" [id=533, type=MatMul]; +"534 /layers/layers.2/blocks.5/attn/MatMul_1" [id=534, type=MatMul]; +"535 /layers/layers.2/blocks.4/Reshape_5" [id=535, type=Reshape]; +"536 /layers/layers.2/blocks.3/attn/Reshape_2" [id=536, type=Reshape]; +"537 /layers/layers.2/blocks.3/attn/proj/MatMul" [id=537, type=MatMul]; +"538 /layers/layers.2/blocks.1/Transpose_1" [id=538, type=Transpose]; +"539 ReduceMean_6197" [id=539, type=ReduceMean]; +"540 /layers/layers.3/blocks.1/mlp/fc1/MatMul" [id=540, type=MatMul]; +"541 /layers/layers.3/blocks.1/Reshape_2" [id=541, type=Reshape]; +"542 /layers/layers.3/blocks.0/mlp/act/Mul_1" [id=542, type=Gelu]; +"543 /layers/layers.3/blocks.0/Reshape_3/smooth_quant_multiply" [id=543, type=Multiply]; +"544 /layers/layers.2/blocks.5/attn/Add" [id=544, type=Add]; +"545 /layers/layers.2/blocks.5/attn/Transpose_2" [id=545, type=Transpose]; +"546 /layers/layers.2/blocks.4/Transpose_1" [id=546, type=Transpose]; +"547 /layers/layers.2/blocks.3/attn/softmax/Softmax" [id=547, type=Softmax]; +"548 /layers/layers.2/blocks.3/attn/proj/Add" [id=548, type=Add]; +"549 /layers/layers.2/blocks.1/Reshape_6" [id=549, type=Reshape]; +"550 /avgpool/GlobalAveragePool" [id=550, type=Reshape]; +"551 /layers/layers.3/blocks.1/mlp/fc1/Add" [id=551, type=Add]; +"552 /layers/layers.3/blocks.1/Reshape_3" [id=552, type=Reshape]; +"553 /layers/layers.3/blocks.0/mlp/act/Mul_1/smooth_quant_multiply" [id=553, type=Multiply]; +"554 /layers/layers.3/blocks.0/attn/qkv/MatMul" [id=554, type=MatMul]; +"555 /layers/layers.2/blocks.5/attn/Reshape_1" [id=555, type=Reshape]; +"556 /layers/layers.2/blocks.5/attn/Reshape_3" [id=556, type=Reshape]; +"557 /layers/layers.2/blocks.4/Reshape_6" [id=557, type=Reshape]; +"558 /layers/layers.2/blocks.3/Reshape_4" [id=558, type=Reshape]; +"559 /layers/layers.2/blocks.1/Slice_4" [id=559, type=StridedSlice]; +"560 /layers/layers.2/blocks.1/Slice_5" [id=560, type=StridedSlice]; +"561 /Flatten" [id=561, type=Reshape]; +"562 /layers/layers.3/blocks.1/mlp/act/Mul_1" [id=562, type=Gelu]; +"563 /layers/layers.3/blocks.1/Reshape_3/smooth_quant_multiply" [id=563, type=Multiply]; +"564 /layers/layers.3/blocks.0/mlp/fc2/MatMul" [id=564, type=MatMul]; +"565 /layers/layers.3/blocks.0/attn/qkv/Add" [id=565, type=Add]; +"566 /layers/layers.2/blocks.5/attn/Add_1" [id=566, type=Add]; +"567 /layers/layers.2/blocks.5/attn/Reshape_3/smooth_quant_multiply" [id=567, type=Multiply]; +"568 /layers/layers.2/blocks.4/Reshape_7" [id=568, type=Reshape]; +"569 /layers/layers.2/blocks.3/Reshape_5" [id=569, type=Reshape]; +"570 /layers/layers.2/blocks.1/Concat_2" [id=570, type=Concat]; +"571 /Flatten/smooth_quant_multiply" [id=571, type=Multiply]; +"572 /layers/layers.3/blocks.1/mlp/act/Mul_1/smooth_quant_multiply" [id=572, type=Multiply]; +"573 /layers/layers.3/blocks.1/attn/qkv/MatMul" [id=573, type=MatMul]; +"574 /layers/layers.3/blocks.0/mlp/fc2/Add" [id=574, type=Add]; +"575 /layers/layers.3/blocks.0/attn/Reshape" [id=575, type=Reshape]; +"576 /layers/layers.2/blocks.5/attn/Reshape_2" [id=576, type=Reshape]; +"577 /layers/layers.2/blocks.5/attn/proj/MatMul" [id=577, type=MatMul]; +"578 /layers/layers.2/blocks.3/Transpose_1" [id=578, type=Transpose]; +"579 /layers/layers.2/blocks.1/Slice_6" [id=579, type=StridedSlice]; +"580 /layers/layers.2/blocks.1/Slice_7" [id=580, type=StridedSlice]; +"581 /head/Gemm/WithoutBiases" [id=581, type=MatMul]; +"582 /layers/layers.3/blocks.1/mlp/fc2/MatMul" [id=582, type=MatMul]; +"583 /layers/layers.3/blocks.1/attn/qkv/Add" [id=583, type=Add]; +"584 /layers/layers.3/blocks.0/attn/Transpose" [id=584, type=Transpose]; +"585 /layers/layers.2/blocks.5/attn/softmax/Softmax" [id=585, type=Softmax]; +"586 /layers/layers.2/blocks.5/attn/proj/Add" [id=586, type=Add]; +"587 /layers/layers.2/blocks.3/Reshape_6" [id=587, type=Reshape]; +"588 /layers/layers.2/blocks.1/Concat_3" [id=588, type=Concat]; +"589 probs" [id=589, type=Add]; +"590 /layers/layers.3/blocks.1/mlp/fc2/Add" [id=590, type=Add]; +"591 /layers/layers.3/blocks.1/attn/Reshape" [id=591, type=Reshape]; +"592 /layers/layers.3/blocks.0/attn/Gather" [id=592, type=Gather]; +"593 /layers/layers.3/blocks.0/attn/Gather_1" [id=593, type=Gather]; +"594 /layers/layers.3/blocks.0/attn/Gather_2" [id=594, type=Gather]; +"595 /layers/layers.2/blocks.5/Reshape_4" [id=595, type=Reshape]; +"596 /layers/layers.2/blocks.3/Slice_4" [id=596, type=StridedSlice]; +"597 /layers/layers.2/blocks.3/Slice_5" [id=597, type=StridedSlice]; +"598 /layers/layers.2/blocks.1/Reshape_7" [id=598, type=Reshape]; +"599 probs/sink_port_0" [id=599, type=Result]; +"600 /layers/layers.3/blocks.1/attn/Transpose" [id=600, type=Transpose]; +"601 /layers/layers.3/blocks.0/attn/Mul" [id=601, type=Multiply]; +"602 /layers/layers.3/blocks.0/attn/MatMul" [id=602, type=MatMul]; +"603 /layers/layers.3/blocks.0/attn/MatMul_1" [id=603, type=MatMul]; +"604 /layers/layers.2/blocks.5/Reshape_5" [id=604, type=Reshape]; +"605 /layers/layers.2/blocks.3/Concat_2" [id=605, type=Concat]; +"606 /layers/layers.3/blocks.1/attn/Gather" [id=606, type=Gather]; +"607 /layers/layers.3/blocks.1/attn/Gather_1" [id=607, type=Gather]; +"608 /layers/layers.3/blocks.1/attn/Gather_2" [id=608, type=Gather]; +"609 /layers/layers.3/blocks.0/attn/Add" [id=609, type=Add]; +"610 /layers/layers.3/blocks.0/attn/Transpose_2" [id=610, type=Transpose]; +"611 /layers/layers.2/blocks.5/Transpose_1" [id=611, type=Transpose]; +"612 /layers/layers.2/blocks.3/Slice_6" [id=612, type=StridedSlice]; +"613 /layers/layers.2/blocks.3/Slice_7" [id=613, type=StridedSlice]; +"614 /layers/layers.3/blocks.1/attn/Mul" [id=614, type=Multiply]; +"615 /layers/layers.3/blocks.1/attn/MatMul" [id=615, type=MatMul]; +"616 /layers/layers.3/blocks.1/attn/MatMul_1" [id=616, type=MatMul]; +"617 /layers/layers.3/blocks.0/attn/softmax/Softmax" [id=617, type=Softmax]; +"618 /layers/layers.3/blocks.0/attn/Reshape_1" [id=618, type=Reshape]; +"619 /layers/layers.2/blocks.5/Reshape_6" [id=619, type=Reshape]; +"620 /layers/layers.2/blocks.3/Concat_3" [id=620, type=Concat]; +"621 /layers/layers.3/blocks.1/attn/Add" [id=621, type=Add]; +"622 /layers/layers.3/blocks.1/attn/Transpose_2" [id=622, type=Transpose]; +"623 /layers/layers.3/blocks.0/attn/Reshape_1/smooth_quant_multiply" [id=623, type=Multiply]; +"624 /layers/layers.2/blocks.5/Slice_4" [id=624, type=StridedSlice]; +"625 /layers/layers.2/blocks.5/Slice_5" [id=625, type=StridedSlice]; +"626 /layers/layers.2/blocks.3/Reshape_7" [id=626, type=Reshape]; +"627 /layers/layers.3/blocks.1/attn/softmax/Softmax" [id=627, type=Softmax]; +"628 /layers/layers.3/blocks.1/attn/Reshape_1" [id=628, type=Reshape]; +"629 /layers/layers.3/blocks.0/attn/proj/MatMul" [id=629, type=MatMul]; +"630 /layers/layers.2/blocks.5/Concat_2" [id=630, type=Concat]; +"631 /layers/layers.3/blocks.1/attn/Reshape_1/smooth_quant_multiply" [id=631, type=Multiply]; +"632 /layers/layers.3/blocks.0/attn/proj/Add" [id=632, type=Add]; +"633 /layers/layers.2/blocks.5/Slice_6" [id=633, type=StridedSlice]; +"634 /layers/layers.2/blocks.5/Slice_7" [id=634, type=StridedSlice]; +"635 /layers/layers.3/blocks.1/attn/proj/MatMul" [id=635, type=MatMul]; +"636 /layers/layers.3/blocks.0/Reshape_4" [id=636, type=Reshape]; +"637 /layers/layers.2/blocks.5/Concat_3" [id=637, type=Concat]; +"638 /layers/layers.3/blocks.1/attn/proj/Add" [id=638, type=Add]; +"639 /layers/layers.3/blocks.0/Reshape_5" [id=639, type=Reshape]; +"640 /layers/layers.2/blocks.5/Reshape_7" [id=640, type=Reshape]; +"641 /layers/layers.3/blocks.1/Reshape_4" [id=641, type=Reshape]; +"642 /layers/layers.3/blocks.0/Transpose_1" [id=642, type=Reshape]; +"643 /layers/layers.3/blocks.1/Reshape_5" [id=643, type=Reshape]; +"644 /layers/layers.3/blocks.0/Reshape_6" [id=644, type=Reshape]; +"645 /layers/layers.3/blocks.1/Transpose_1" [id=645, type=Reshape]; +"646 /layers/layers.3/blocks.0/Reshape_7" [id=646, type=Reshape]; +"647 /layers/layers.3/blocks.1/Reshape_6" [id=647, type=Reshape]; +"648 /layers/layers.3/blocks.1/Reshape_7" [id=648, type=Reshape]; +"649 Constant_7305" [id=649, type=Constant]; +"650 head.weight" [id=650, type=Constant]; +"651 /Flatten/smooth_quant_const" [id=651, type=Constant]; +"652 Constant_2148" [id=652, type=Constant]; +"653 Constant_6782" [id=653, type=Constant]; +"654 Constant_6196" [id=654, type=Constant]; +"655 Constant_7304" [id=655, type=Constant]; +"656 Constant_7303" [id=656, type=Constant]; +"657 Constant_2123" [id=657, type=Constant]; +"658 Constant_6779" [id=658, type=Constant]; +"659 /layers/layers.3/blocks.1/mlp/act/Mul_1/smooth_quant_const" [id=659, type=Constant]; +"660 Constant_6774" [id=660, type=Constant]; +"661 /layers/layers.3/blocks.1/norm2/Add_1/smooth_quant_const" [id=661, type=Constant]; +"662 Constant_7300" [id=662, type=Constant]; +"663 Constant_7299" [id=663, type=Constant]; +"664 Constant_2097" [id=664, type=Constant]; +"665 /layers/layers.3/blocks.1/Constant_7" [id=665, type=Constant]; +"666 /layers/layers.3/blocks.1/Constant_6" [id=666, type=Constant]; +"667 Constant_6767" [id=667, type=Constant]; +"668 /layers/layers.3/blocks.1/Constant_5" [id=668, type=Constant]; +"669 /layers/layers.3/blocks.1/Constant_4" [id=669, type=Constant]; +"670 Constant_6765" [id=670, type=Constant]; +"671 /layers/layers.3/blocks.1/attn/Reshape_1/smooth_quant_const" [id=671, type=Constant]; +"672 /layers/layers.3/blocks.1/attn/Constant_2" [id=672, type=Constant]; +"673 Constant_2070" [id=673, type=Constant]; +"674 Constant_2060" [id=674, type=Constant]; +"675 /patch_embed/Constant" [id=675, type=Constant]; +"676 Constant_2054" [id=676, type=Constant]; +"677 /layers/layers.3/blocks.1/attn/Constant" [id=677, type=Constant]; +"678 Constant_6761" [id=678, type=Constant]; +"679 /layers/layers.3/blocks.1/Reshape_3/smooth_quant_const" [id=679, type=Constant]; +"680 /layers/layers.3/blocks.1/Constant_3" [id=680, type=Constant]; +"681 /layers/layers.3/blocks.1/Constant_2" [id=681, type=Constant]; +"682 Constant_6755" [id=682, type=Constant]; +"683 /layers/layers.3/blocks.1/Constant_1" [id=683, type=Constant]; +"684 Constant_7295" [id=684, type=Constant]; +"685 Constant_7294" [id=685, type=Constant]; +"686 Constant_2017" [id=686, type=Constant]; +"687 Constant_6752" [id=687, type=Constant]; +"688 /layers/layers.3/blocks.0/mlp/act/Mul_1/smooth_quant_const" [id=688, type=Constant]; +"689 Constant_6747" [id=689, type=Constant]; +"690 /layers/layers.3/blocks.0/norm2/Add_1/smooth_quant_const" [id=690, type=Constant]; +"691 Constant_7291" [id=691, type=Constant]; +"692 Constant_7290" [id=692, type=Constant]; +"693 Constant_1991" [id=693, type=Constant]; +"694 /layers/layers.3/blocks.0/Constant_7" [id=694, type=Constant]; +"695 /layers/layers.3/blocks.0/Constant_6" [id=695, type=Constant]; +"696 Constant_6740" [id=696, type=Constant]; +"697 /layers/layers.3/blocks.0/Constant_5" [id=697, type=Constant]; +"698 /layers/layers.3/blocks.0/Constant_4" [id=698, type=Constant]; +"699 Constant_6738" [id=699, type=Constant]; +"700 /layers/layers.3/blocks.0/attn/Reshape_1/smooth_quant_const" [id=700, type=Constant]; +"701 /layers/layers.3/blocks.0/attn/Constant_2" [id=701, type=Constant]; +"702 Constant_1964" [id=702, type=Constant]; +"703 Constant_1954" [id=703, type=Constant]; +"704 Constant_1948" [id=704, type=Constant]; +"705 /layers/layers.3/blocks.0/attn/Constant" [id=705, type=Constant]; +"706 Constant_6734" [id=706, type=Constant]; +"707 /layers/layers.3/blocks.0/Reshape_3/smooth_quant_const" [id=707, type=Constant]; +"708 /layers/layers.3/blocks.0/Constant_3" [id=708, type=Constant]; +"709 /layers/layers.3/blocks.0/Constant_2" [id=709, type=Constant]; +"710 Constant_6728" [id=710, type=Constant]; +"711 /layers/layers.3/blocks.0/Constant_1" [id=711, type=Constant]; +"712 Constant_7286" [id=712, type=Constant]; +"713 Constant_7285" [id=713, type=Constant]; +"714 Constant_1911" [id=714, type=Constant]; +"715 Constant_6725" [id=715, type=Constant]; +"716 /layers/layers.2/downsample/norm/Add_1/smooth_quant_const" [id=716, type=Constant]; +"717 Constant_7284" [id=717, type=Constant]; +"718 Constant_7283" [id=718, type=Constant]; +"719 Constant_1897" [id=719, type=Constant]; +"720 /layers/layers.2/downsample/Constant_25" [id=720, type=Constant]; +"721 Constant_5911" [id=721, type=Constant]; +"722 Constant_5908" [id=722, type=Constant]; +"723 Constant_5905" [id=723, type=Constant]; +"724 Constant_5875" [id=724, type=Constant]; +"725 Constant_5872" [id=725, type=Constant]; +"726 Constant_5869" [id=726, type=Constant]; +"727 /layers/layers.2/downsample/Constant" [id=727, type=Constant]; +"728 Constant_6720" [id=728, type=Constant]; +"729 /layers/layers.2/blocks.5/mlp/act/Mul_1/smooth_quant_const" [id=729, type=Constant]; +"730 Constant_6715" [id=730, type=Constant]; +"731 /layers/layers.2/blocks.5/norm2/Add_1/smooth_quant_const" [id=731, type=Constant]; +"732 Constant_7280" [id=732, type=Constant]; +"733 Constant_7279" [id=733, type=Constant]; +"734 Constant_1832" [id=734, type=Constant]; +"735 /layers/layers.2/blocks.5/Constant_31" [id=735, type=Constant]; +"736 Constant_5839" [id=736, type=Constant]; +"737 Constant_5836" [id=737, type=Constant]; +"738 Constant_5833" [id=738, type=Constant]; +"739 Constant_5815" [id=739, type=Constant]; +"740 Constant_5812" [id=740, type=Constant]; +"741 Constant_5809" [id=741, type=Constant]; +"742 /layers/layers.2/blocks.5/Constant_18" [id=742, type=Constant]; +"743 Constant_1779" [id=743, type=Constant]; +"744 /layers/layers.2/blocks.5/Constant_17" [id=744, type=Constant]; +"745 /layers/layers.2/blocks.5/Constant_16" [id=745, type=Constant]; +"746 Constant_6710" [id=746, type=Constant]; +"747 /layers/layers.2/blocks.5/attn/Reshape_3/smooth_quant_const" [id=747, type=Constant]; +"748 /layers/layers.2/blocks.5/attn/Constant_4" [id=748, type=Constant]; +"749 Constant_1763" [id=749, type=Constant]; +"750 Constant_1744" [id=750, type=Constant]; +"751 Constant_1738" [id=751, type=Constant]; +"752 /layers/layers.2/blocks.5/attn/Constant" [id=752, type=Constant]; +"753 Constant_6706" [id=753, type=Constant]; +"754 /layers/layers.2/blocks.5/Reshape_3/smooth_quant_const" [id=754, type=Constant]; +"755 /layers/layers.2/blocks.5/Constant_15" [id=755, type=Constant]; +"756 /layers/layers.2/blocks.5/Constant_14" [id=756, type=Constant]; +"757 Constant_1722" [id=757, type=Constant]; +"758 /layers/layers.2/blocks.5/Constant_13" [id=758, type=Constant]; +"759 Constant_5791" [id=759, type=Constant]; +"760 Constant_5788" [id=760, type=Constant]; +"761 Constant_5785" [id=761, type=Constant]; +"762 Constant_5767" [id=762, type=Constant]; +"763 Constant_5764" [id=763, type=Constant]; +"764 Constant_5761" [id=764, type=Constant]; +"765 /layers/layers.2/blocks.5/Constant" [id=765, type=Constant]; +"766 Constant_7275" [id=766, type=Constant]; +"767 Constant_7274" [id=767, type=Constant]; +"768 Constant_1659" [id=768, type=Constant]; +"769 Constant_6701" [id=769, type=Constant]; +"770 /layers/layers.2/blocks.4/mlp/act/Mul_1/smooth_quant_const" [id=770, type=Constant]; +"771 Constant_6696" [id=771, type=Constant]; +"772 /layers/layers.2/blocks.4/norm2/Add_1/smooth_quant_const" [id=772, type=Constant]; +"773 Constant_7271" [id=773, type=Constant]; +"774 Constant_7270" [id=774, type=Constant]; +"775 Constant_1633" [id=775, type=Constant]; +"776 /layers/layers.2/blocks.4/Constant_7" [id=776, type=Constant]; +"777 /layers/layers.2/blocks.4/Constant_6" [id=777, type=Constant]; +"778 Constant_1622" [id=778, type=Constant]; +"779 /layers/layers.2/blocks.4/Constant_5" [id=779, type=Constant]; +"780 /layers/layers.2/blocks.4/Constant_4" [id=780, type=Constant]; +"781 Constant_6691" [id=781, type=Constant]; +"782 /layers/layers.2/blocks.4/attn/Reshape_1/smooth_quant_const" [id=782, type=Constant]; +"783 /layers/layers.2/blocks.4/attn/Constant_2" [id=783, type=Constant]; +"784 Constant_1606" [id=784, type=Constant]; +"785 Constant_1596" [id=785, type=Constant]; +"786 Constant_1590" [id=786, type=Constant]; +"787 /layers/layers.2/blocks.4/attn/Constant" [id=787, type=Constant]; +"788 Constant_6687" [id=788, type=Constant]; +"789 /layers/layers.2/blocks.4/Reshape_3/smooth_quant_const" [id=789, type=Constant]; +"790 /layers/layers.2/blocks.4/Constant_3" [id=790, type=Constant]; +"791 /layers/layers.2/blocks.4/Constant_2" [id=791, type=Constant]; +"792 Constant_1574" [id=792, type=Constant]; +"793 /layers/layers.2/blocks.4/Constant_1" [id=793, type=Constant]; +"794 Constant_7266" [id=794, type=Constant]; +"795 Constant_7265" [id=795, type=Constant]; +"796 Constant_1553" [id=796, type=Constant]; +"797 Constant_6682" [id=797, type=Constant]; +"798 /layers/layers.2/blocks.3/mlp/act/Mul_1/smooth_quant_const" [id=798, type=Constant]; +"799 Constant_6677" [id=799, type=Constant]; +"800 /layers/layers.2/blocks.3/norm2/Add_1/smooth_quant_const" [id=800, type=Constant]; +"801 Constant_7262" [id=801, type=Constant]; +"802 Constant_7261" [id=802, type=Constant]; +"803 Constant_1527" [id=803, type=Constant]; +"804 /layers/layers.2/blocks.3/Constant_31" [id=804, type=Constant]; +"805 Constant_5743" [id=805, type=Constant]; +"806 Constant_5740" [id=806, type=Constant]; +"807 Constant_5737" [id=807, type=Constant]; +"808 Constant_5719" [id=808, type=Constant]; +"809 Constant_5716" [id=809, type=Constant]; +"810 Constant_5713" [id=810, type=Constant]; +"811 /layers/layers.2/blocks.3/Constant_18" [id=811, type=Constant]; +"812 Constant_1474" [id=812, type=Constant]; +"813 /layers/layers.2/blocks.3/Constant_17" [id=813, type=Constant]; +"814 /layers/layers.2/blocks.3/Constant_16" [id=814, type=Constant]; +"815 Constant_6672" [id=815, type=Constant]; +"816 /layers/layers.2/blocks.3/attn/Reshape_3/smooth_quant_const" [id=816, type=Constant]; +"817 /layers/layers.2/blocks.3/attn/Constant_4" [id=817, type=Constant]; +"818 Constant_1458" [id=818, type=Constant]; +"819 Constant_1439" [id=819, type=Constant]; +"820 Constant_1433" [id=820, type=Constant]; +"821 /layers/layers.2/blocks.3/attn/Constant" [id=821, type=Constant]; +"822 Constant_6668" [id=822, type=Constant]; +"823 /layers/layers.2/blocks.3/Reshape_3/smooth_quant_const" [id=823, type=Constant]; +"824 /layers/layers.2/blocks.3/Constant_15" [id=824, type=Constant]; +"825 /layers/layers.2/blocks.3/Constant_14" [id=825, type=Constant]; +"826 Constant_1417" [id=826, type=Constant]; +"827 /layers/layers.2/blocks.3/Constant_13" [id=827, type=Constant]; +"828 Constant_5695" [id=828, type=Constant]; +"829 Constant_5692" [id=829, type=Constant]; +"830 Constant_5689" [id=830, type=Constant]; +"831 Constant_5671" [id=831, type=Constant]; +"832 Constant_5668" [id=832, type=Constant]; +"833 Constant_5665" [id=833, type=Constant]; +"834 /layers/layers.2/blocks.3/Constant" [id=834, type=Constant]; +"835 Constant_7257" [id=835, type=Constant]; +"836 Constant_7256" [id=836, type=Constant]; +"837 Constant_1354" [id=837, type=Constant]; +"838 Constant_6663" [id=838, type=Constant]; +"839 /layers/layers.2/blocks.2/mlp/act/Mul_1/smooth_quant_const" [id=839, type=Constant]; +"840 Constant_6658" [id=840, type=Constant]; +"841 /layers/layers.2/blocks.2/norm2/Add_1/smooth_quant_const" [id=841, type=Constant]; +"842 Constant_7253" [id=842, type=Constant]; +"843 Constant_7252" [id=843, type=Constant]; +"844 Constant_1328" [id=844, type=Constant]; +"845 /layers/layers.2/blocks.2/Constant_7" [id=845, type=Constant]; +"846 /layers/layers.2/blocks.2/Constant_6" [id=846, type=Constant]; +"847 Constant_1317" [id=847, type=Constant]; +"848 /layers/layers.2/blocks.2/Constant_5" [id=848, type=Constant]; +"849 /layers/layers.2/blocks.2/Constant_4" [id=849, type=Constant]; +"850 Constant_6653" [id=850, type=Constant]; +"851 /layers/layers.2/blocks.2/attn/Reshape_1/smooth_quant_const" [id=851, type=Constant]; +"852 /layers/layers.2/blocks.2/attn/Constant_2" [id=852, type=Constant]; +"853 Constant_1301" [id=853, type=Constant]; +"854 Constant_1291" [id=854, type=Constant]; +"855 Constant_1285" [id=855, type=Constant]; +"856 /layers/layers.2/blocks.2/attn/Constant" [id=856, type=Constant]; +"857 Constant_6649" [id=857, type=Constant]; +"858 /layers/layers.2/blocks.2/Reshape_3/smooth_quant_const" [id=858, type=Constant]; +"859 /layers/layers.2/blocks.2/Constant_3" [id=859, type=Constant]; +"860 /layers/layers.2/blocks.2/Constant_2" [id=860, type=Constant]; +"861 Constant_1269" [id=861, type=Constant]; +"862 /layers/layers.2/blocks.2/Constant_1" [id=862, type=Constant]; +"863 Constant_7248" [id=863, type=Constant]; +"864 Constant_7247" [id=864, type=Constant]; +"865 Constant_1248" [id=865, type=Constant]; +"866 Constant_6644" [id=866, type=Constant]; +"867 /layers/layers.2/blocks.1/mlp/act/Mul_1/smooth_quant_const" [id=867, type=Constant]; +"868 Constant_6639" [id=868, type=Constant]; +"869 /layers/layers.2/blocks.1/norm2/Add_1/smooth_quant_const" [id=869, type=Constant]; +"870 Constant_7244" [id=870, type=Constant]; +"871 Constant_7243" [id=871, type=Constant]; +"872 Constant_1222" [id=872, type=Constant]; +"873 /layers/layers.2/blocks.1/Constant_31" [id=873, type=Constant]; +"874 Constant_5647" [id=874, type=Constant]; +"875 Constant_5644" [id=875, type=Constant]; +"876 Constant_5641" [id=876, type=Constant]; +"877 Constant_5623" [id=877, type=Constant]; +"878 Constant_5620" [id=878, type=Constant]; +"879 Constant_5617" [id=879, type=Constant]; +"880 /layers/layers.2/blocks.1/Constant_18" [id=880, type=Constant]; +"881 Constant_1169" [id=881, type=Constant]; +"882 /layers/layers.2/blocks.1/Constant_17" [id=882, type=Constant]; +"883 /layers/layers.2/blocks.1/Constant_16" [id=883, type=Constant]; +"884 Constant_6634" [id=884, type=Constant]; +"885 /layers/layers.2/blocks.1/attn/Reshape_3/smooth_quant_const" [id=885, type=Constant]; +"886 /layers/layers.2/blocks.1/attn/Constant_4" [id=886, type=Constant]; +"887 Constant_1153" [id=887, type=Constant]; +"888 Constant_1134" [id=888, type=Constant]; +"889 Constant_1128" [id=889, type=Constant]; +"890 /layers/layers.2/blocks.1/attn/Constant" [id=890, type=Constant]; +"891 Constant_6630" [id=891, type=Constant]; +"892 /layers/layers.2/blocks.1/Reshape_3/smooth_quant_const" [id=892, type=Constant]; +"893 /layers/layers.2/blocks.1/Constant_15" [id=893, type=Constant]; +"894 /layers/layers.2/blocks.1/Constant_14" [id=894, type=Constant]; +"895 Constant_1112" [id=895, type=Constant]; +"896 /layers/layers.2/blocks.1/Constant_13" [id=896, type=Constant]; +"897 Constant_5599" [id=897, type=Constant]; +"898 Constant_5596" [id=898, type=Constant]; +"899 Constant_5593" [id=899, type=Constant]; +"900 Constant_5575" [id=900, type=Constant]; +"901 Constant_5572" [id=901, type=Constant]; +"902 Constant_5569" [id=902, type=Constant]; +"903 /layers/layers.2/blocks.1/Constant" [id=903, type=Constant]; +"904 Constant_7239" [id=904, type=Constant]; +"905 Constant_7238" [id=905, type=Constant]; +"906 Constant_1049" [id=906, type=Constant]; +"907 Constant_6625" [id=907, type=Constant]; +"908 /layers/layers.2/blocks.0/mlp/act/Mul_1/smooth_quant_const" [id=908, type=Constant]; +"909 Constant_6620" [id=909, type=Constant]; +"910 /layers/layers.2/blocks.0/norm2/Add_1/smooth_quant_const" [id=910, type=Constant]; +"911 Constant_7235" [id=911, type=Constant]; +"912 Constant_7234" [id=912, type=Constant]; +"913 Constant_1023" [id=913, type=Constant]; +"914 /layers/layers.2/blocks.0/Constant_7" [id=914, type=Constant]; +"915 /layers/layers.2/blocks.0/Constant_6" [id=915, type=Constant]; +"916 Constant_1012" [id=916, type=Constant]; +"917 /layers/layers.2/blocks.0/Constant_5" [id=917, type=Constant]; +"918 /layers/layers.2/blocks.0/Constant_4" [id=918, type=Constant]; +"919 Constant_6615" [id=919, type=Constant]; +"920 /layers/layers.2/blocks.0/attn/Reshape_1/smooth_quant_const" [id=920, type=Constant]; +"921 /layers/layers.2/blocks.0/attn/Constant_2" [id=921, type=Constant]; +"922 Constant_996" [id=922, type=Constant]; +"923 Constant_986" [id=923, type=Constant]; +"924 Constant_980" [id=924, type=Constant]; +"925 /layers/layers.2/blocks.0/attn/Constant" [id=925, type=Constant]; +"926 Constant_6611" [id=926, type=Constant]; +"927 /layers/layers.2/blocks.0/Reshape_3/smooth_quant_const" [id=927, type=Constant]; +"928 /layers/layers.2/blocks.0/Constant_3" [id=928, type=Constant]; +"929 /layers/layers.2/blocks.0/Constant_2" [id=929, type=Constant]; +"930 Constant_964" [id=930, type=Constant]; +"931 /layers/layers.2/blocks.0/Constant_1" [id=931, type=Constant]; +"932 Constant_7230" [id=932, type=Constant]; +"933 Constant_7229" [id=933, type=Constant]; +"934 Constant_943" [id=934, type=Constant]; +"935 Constant_6606" [id=935, type=Constant]; +"936 /layers/layers.1/downsample/norm/Add_1/smooth_quant_const" [id=936, type=Constant]; +"937 Constant_7228" [id=937, type=Constant]; +"938 Constant_7227" [id=938, type=Constant]; +"939 Constant_929" [id=939, type=Constant]; +"940 /layers/layers.1/downsample/Constant_25" [id=940, type=Constant]; +"941 Constant_5551" [id=941, type=Constant]; +"942 Constant_5548" [id=942, type=Constant]; +"943 Constant_5545" [id=943, type=Constant]; +"944 Constant_5515" [id=944, type=Constant]; +"945 Constant_5512" [id=945, type=Constant]; +"946 Constant_5509" [id=946, type=Constant]; +"947 /layers/layers.1/downsample/Constant" [id=947, type=Constant]; +"948 Constant_6601" [id=948, type=Constant]; +"949 /layers/layers.1/blocks.1/mlp/act/Mul_1/smooth_quant_const" [id=949, type=Constant]; +"950 Constant_6596" [id=950, type=Constant]; +"951 /layers/layers.1/blocks.1/norm2/Add_1/smooth_quant_const" [id=951, type=Constant]; +"952 Constant_7224" [id=952, type=Constant]; +"953 Constant_7223" [id=953, type=Constant]; +"954 Constant_864" [id=954, type=Constant]; +"955 /layers/layers.1/blocks.1/Constant_31" [id=955, type=Constant]; +"956 Constant_5479" [id=956, type=Constant]; +"957 Constant_5476" [id=957, type=Constant]; +"958 Constant_5473" [id=958, type=Constant]; +"959 Constant_5455" [id=959, type=Constant]; +"960 Constant_5452" [id=960, type=Constant]; +"961 Constant_5449" [id=961, type=Constant]; +"962 /layers/layers.1/blocks.1/Constant_18" [id=962, type=Constant]; +"963 Constant_811" [id=963, type=Constant]; +"964 /layers/layers.1/blocks.1/Constant_17" [id=964, type=Constant]; +"965 /layers/layers.1/blocks.1/Constant_16" [id=965, type=Constant]; +"966 Constant_6591" [id=966, type=Constant]; +"967 /layers/layers.1/blocks.1/attn/Reshape_3/smooth_quant_const" [id=967, type=Constant]; +"968 /layers/layers.1/blocks.1/attn/Constant_4" [id=968, type=Constant]; +"969 Constant_795" [id=969, type=Constant]; +"970 Constant_776" [id=970, type=Constant]; +"971 Constant_770" [id=971, type=Constant]; +"972 /layers/layers.1/blocks.1/attn/Constant" [id=972, type=Constant]; +"973 Constant_6587" [id=973, type=Constant]; +"974 /layers/layers.1/blocks.1/Reshape_3/smooth_quant_const" [id=974, type=Constant]; +"975 /layers/layers.1/blocks.1/Constant_15" [id=975, type=Constant]; +"976 /layers/layers.1/blocks.1/Constant_14" [id=976, type=Constant]; +"977 Constant_754" [id=977, type=Constant]; +"978 /layers/layers.1/blocks.1/Constant_13" [id=978, type=Constant]; +"979 Constant_5431" [id=979, type=Constant]; +"980 Constant_5428" [id=980, type=Constant]; +"981 Constant_5425" [id=981, type=Constant]; +"982 Constant_5407" [id=982, type=Constant]; +"983 Constant_5404" [id=983, type=Constant]; +"984 Constant_5401" [id=984, type=Constant]; +"985 /layers/layers.1/blocks.1/Constant" [id=985, type=Constant]; +"986 Constant_7219" [id=986, type=Constant]; +"987 Constant_7218" [id=987, type=Constant]; +"988 Constant_691" [id=988, type=Constant]; +"989 Constant_6582" [id=989, type=Constant]; +"990 /layers/layers.1/blocks.0/mlp/act/Mul_1/smooth_quant_const" [id=990, type=Constant]; +"991 Constant_6577" [id=991, type=Constant]; +"992 /layers/layers.1/blocks.0/norm2/Add_1/smooth_quant_const" [id=992, type=Constant]; +"993 Constant_7215" [id=993, type=Constant]; +"994 Constant_7214" [id=994, type=Constant]; +"995 Constant_665" [id=995, type=Constant]; +"996 /layers/layers.1/blocks.0/Constant_7" [id=996, type=Constant]; +"997 /layers/layers.1/blocks.0/Constant_6" [id=997, type=Constant]; +"998 Constant_654" [id=998, type=Constant]; +"999 /layers/layers.1/blocks.0/Constant_5" [id=999, type=Constant]; +"1000 /layers/layers.1/blocks.0/Constant_4" [id=1000, type=Constant]; +"1001 Constant_6572" [id=1001, type=Constant]; +"1002 /layers/layers.1/blocks.0/attn/Reshape_1/smooth_quant_const" [id=1002, type=Constant]; +"1003 /layers/layers.1/blocks.0/attn/Constant_2" [id=1003, type=Constant]; +"1004 Constant_638" [id=1004, type=Constant]; +"1005 Constant_628" [id=1005, type=Constant]; +"1006 Constant_622" [id=1006, type=Constant]; +"1007 /layers/layers.1/blocks.0/attn/Constant" [id=1007, type=Constant]; +"1008 Constant_6568" [id=1008, type=Constant]; +"1009 /layers/layers.1/blocks.0/Reshape_3/smooth_quant_const" [id=1009, type=Constant]; +"1010 /layers/layers.1/blocks.0/Constant_3" [id=1010, type=Constant]; +"1011 /layers/layers.1/blocks.0/Constant_2" [id=1011, type=Constant]; +"1012 Constant_606" [id=1012, type=Constant]; +"1013 /layers/layers.1/blocks.0/Constant_1" [id=1013, type=Constant]; +"1014 Constant_7210" [id=1014, type=Constant]; +"1015 Constant_7209" [id=1015, type=Constant]; +"1016 Constant_585" [id=1016, type=Constant]; +"1017 Constant_6563" [id=1017, type=Constant]; +"1018 /layers/layers.0/downsample/norm/Add_1/smooth_quant_const" [id=1018, type=Constant]; +"1019 Constant_7208" [id=1019, type=Constant]; +"1020 Constant_7207" [id=1020, type=Constant]; +"1021 Constant_571" [id=1021, type=Constant]; +"1022 /layers/layers.0/downsample/Constant_25" [id=1022, type=Constant]; +"1023 Constant_5383" [id=1023, type=Constant]; +"1024 Constant_5380" [id=1024, type=Constant]; +"1025 Constant_5377" [id=1025, type=Constant]; +"1026 Constant_5347" [id=1026, type=Constant]; +"1027 Constant_5344" [id=1027, type=Constant]; +"1028 Constant_5341" [id=1028, type=Constant]; +"1029 /layers/layers.0/downsample/Constant" [id=1029, type=Constant]; +"1030 Constant_6558" [id=1030, type=Constant]; +"1031 /layers/layers.0/blocks.1/mlp/act/Mul_1/smooth_quant_const" [id=1031, type=Constant]; +"1032 Constant_6553" [id=1032, type=Constant]; +"1033 /layers/layers.0/blocks.1/norm2/Add_1/smooth_quant_const" [id=1033, type=Constant]; +"1034 Constant_7204" [id=1034, type=Constant]; +"1035 Constant_7203" [id=1035, type=Constant]; +"1036 Constant_506" [id=1036, type=Constant]; +"1037 /layers/layers.0/blocks.1/Constant_31" [id=1037, type=Constant]; +"1038 Constant_5311" [id=1038, type=Constant]; +"1039 Constant_5308" [id=1039, type=Constant]; +"1040 Constant_5305" [id=1040, type=Constant]; +"1041 Constant_5287" [id=1041, type=Constant]; +"1042 Constant_5284" [id=1042, type=Constant]; +"1043 Constant_5281" [id=1043, type=Constant]; +"1044 /layers/layers.0/blocks.1/Constant_18" [id=1044, type=Constant]; +"1045 Constant_453" [id=1045, type=Constant]; +"1046 /layers/layers.0/blocks.1/Constant_17" [id=1046, type=Constant]; +"1047 /layers/layers.0/blocks.1/Constant_16" [id=1047, type=Constant]; +"1048 Constant_6548" [id=1048, type=Constant]; +"1049 /layers/layers.0/blocks.1/attn/Reshape_3/smooth_quant_const" [id=1049, type=Constant]; +"1050 /layers/layers.0/blocks.1/attn/Constant_4" [id=1050, type=Constant]; +"1051 Constant_437" [id=1051, type=Constant]; +"1052 Constant_418" [id=1052, type=Constant]; +"1053 Constant_412" [id=1053, type=Constant]; +"1054 /layers/layers.0/blocks.1/attn/Constant" [id=1054, type=Constant]; +"1055 Constant_6544" [id=1055, type=Constant]; +"1056 /layers/layers.0/blocks.1/Reshape_3/smooth_quant_const" [id=1056, type=Constant]; +"1057 /layers/layers.0/blocks.1/Constant_15" [id=1057, type=Constant]; +"1058 /layers/layers.0/blocks.1/Constant_14" [id=1058, type=Constant]; +"1059 Constant_396" [id=1059, type=Constant]; +"1060 /layers/layers.0/blocks.1/Constant_13" [id=1060, type=Constant]; +"1061 Constant_5263" [id=1061, type=Constant]; +"1062 Constant_5260" [id=1062, type=Constant]; +"1063 Constant_5257" [id=1063, type=Constant]; +"1064 Constant_5239" [id=1064, type=Constant]; +"1065 Constant_5236" [id=1065, type=Constant]; +"1066 Constant_5233" [id=1066, type=Constant]; +"1067 /layers/layers.0/blocks.1/Constant" [id=1067, type=Constant]; +"1068 Constant_7199" [id=1068, type=Constant]; +"1069 Constant_7198" [id=1069, type=Constant]; +"1070 Constant_333" [id=1070, type=Constant]; +"1071 Constant_6539" [id=1071, type=Constant]; +"1072 /layers/layers.0/blocks.0/mlp/act/Mul_1/smooth_quant_const" [id=1072, type=Constant]; +"1073 Constant_6534" [id=1073, type=Constant]; +"1074 /layers/layers.0/blocks.0/norm2/Add_1/smooth_quant_const" [id=1074, type=Constant]; +"1075 Constant_7195" [id=1075, type=Constant]; +"1076 Constant_7194" [id=1076, type=Constant]; +"1077 Constant_307" [id=1077, type=Constant]; +"1078 /layers/layers.0/blocks.0/Constant_8" [id=1078, type=Constant]; +"1079 /layers/layers.0/blocks.0/Constant_7" [id=1079, type=Constant]; +"1080 Constant_296" [id=1080, type=Constant]; +"1081 /layers/layers.0/blocks.0/Constant_6" [id=1081, type=Constant]; +"1082 /layers/layers.0/blocks.0/Constant_5" [id=1082, type=Constant]; +"1083 Constant_6529" [id=1083, type=Constant]; +"1084 /layers/layers.0/blocks.0/attn/Reshape_1/smooth_quant_const" [id=1084, type=Constant]; +"1085 /layers/layers.0/blocks.0/attn/Constant_2" [id=1085, type=Constant]; +"1086 Constant_280" [id=1086, type=Constant]; +"1087 Constant_270" [id=1087, type=Constant]; +"1088 Constant_264" [id=1088, type=Constant]; +"1089 /layers/layers.0/blocks.0/attn/Constant" [id=1089, type=Constant]; +"1090 Constant_6525" [id=1090, type=Constant]; +"1091 /layers/layers.0/blocks.0/Reshape_3/smooth_quant_const" [id=1091, type=Constant]; +"1092 /layers/layers.0/blocks.0/Constant_4" [id=1092, type=Constant]; +"1093 /layers/layers.0/blocks.0/Constant_3" [id=1093, type=Constant]; +"1094 Constant_248" [id=1094, type=Constant]; +"1095 /layers/layers.0/blocks.0/Constant_2" [id=1095, type=Constant]; +"1096 Constant_7190" [id=1096, type=Constant]; +"1097 Constant_7189" [id=1097, type=Constant]; +"1098 Constant_227" [id=1098, type=Constant]; +"1099 Constant_7188" [id=1099, type=Constant]; +"1100 Constant_7187" [id=1100, type=Constant]; +"1101 Constant_213" [id=1101, type=Constant]; +"1102 Constant_211" [id=1102, type=Constant]; +"1103 /patch_embed/Constant_4" [id=1103, type=Constant]; +"1104 Broadcast_201" [id=1104, type=Constant]; +"1105 /patch_embed/Constant_3" [id=1105, type=Constant]; +"1106 /patch_embed/Constant_2" [id=1106, type=Constant]; +"1107 Reshape_190" [id=1107, type=Constant]; +"1108 Gather_7186" [id=1108, type=Constant]; +"1109 Gather_7183" [id=1109, type=Constant]; +"1110 Gather_7180" [id=1110, type=Constant]; +"1111 Constant_7191" [id=1111, type=Constant]; +"1112 onnx^^Add_2243" [id=1112, label="1112 onnx::Add_2243", type=Constant]; +"1113 Constant_268" [id=1113, type=Constant]; +"1114 /patch_embed/proj/Constant" [id=1114, type=Constant]; +"1115 Constant_7192" [id=1115, type=Constant]; +"1116 Constant_266" [id=1116, type=Constant]; +"1117 /layers/layers.0/blocks.0/Constant" [id=1117, type=Constant]; +"1118 Constant_7193" [id=1118, type=Constant]; +"1119 Constant_7196" [id=1119, type=Constant]; +"1120 Constant_7197" [id=1120, type=Constant]; +"1121 Constant_5227" [id=1121, type=Constant]; +"1122 Constant_5224" [id=1122, type=Constant]; +"1123 Constant_5221" [id=1123, type=Constant]; +"1124 Constant_5251" [id=1124, type=Constant]; +"1125 Constant_5248" [id=1125, type=Constant]; +"1126 Constant_5245" [id=1126, type=Constant]; +"1127 Constant_7200" [id=1127, type=Constant]; +"1128 /layers/layers.0/blocks.1/attn/Constant_3" [id=1128, type=Constant]; +"1129 onnx^^Add_2300" [id=1129, label="1129 onnx::Add_2300", type=Constant]; +"1130 /layers/layers.0/blocks.1/attn/Constant_2" [id=1130, type=Constant]; +"1131 onnx^^Add_2292" [id=1131, label="1131 onnx::Add_2292", type=Constant]; +"1132 Constant_416" [id=1132, type=Constant]; +"1133 Constant_7201" [id=1133, type=Constant]; +"1134 Constant_414" [id=1134, type=Constant]; +"1135 Constant_7202" [id=1135, type=Constant]; +"1136 Constant_5275" [id=1136, type=Constant]; +"1137 Constant_5272" [id=1137, type=Constant]; +"1138 Constant_5269" [id=1138, type=Constant]; +"1139 Constant_5299" [id=1139, type=Constant]; +"1140 Constant_5296" [id=1140, type=Constant]; +"1141 Constant_5293" [id=1141, type=Constant]; +"1142 Constant_7205" [id=1142, type=Constant]; +"1143 Constant_7206" [id=1143, type=Constant]; +"1144 Constant_5371" [id=1144, type=Constant]; +"1145 Constant_5368" [id=1145, type=Constant]; +"1146 Constant_5365" [id=1146, type=Constant]; +"1147 Constant_5323" [id=1147, type=Constant]; +"1148 Constant_5320" [id=1148, type=Constant]; +"1149 Constant_5317" [id=1149, type=Constant]; +"1150 Constant_5359" [id=1150, type=Constant]; +"1151 Constant_5356" [id=1151, type=Constant]; +"1152 Constant_5353" [id=1152, type=Constant]; +"1153 Constant_5335" [id=1153, type=Constant]; +"1154 Constant_5332" [id=1154, type=Constant]; +"1155 Constant_5329" [id=1155, type=Constant]; +"1156 Constant_7211" [id=1156, type=Constant]; +"1157 onnx^^Add_2364" [id=1157, label="1157 onnx::Add_2364", type=Constant]; +"1158 Constant_626" [id=1158, type=Constant]; +"1159 Constant_7212" [id=1159, type=Constant]; +"1160 Constant_624" [id=1160, type=Constant]; +"1161 Constant_7213" [id=1161, type=Constant]; +"1162 Constant_7216" [id=1162, type=Constant]; +"1163 Constant_7217" [id=1163, type=Constant]; +"1164 Constant_5395" [id=1164, type=Constant]; +"1165 Constant_5392" [id=1165, type=Constant]; +"1166 Constant_5389" [id=1166, type=Constant]; +"1167 Constant_5419" [id=1167, type=Constant]; +"1168 Constant_5416" [id=1168, type=Constant]; +"1169 Constant_5413" [id=1169, type=Constant]; +"1170 Constant_7220" [id=1170, type=Constant]; +"1171 /layers/layers.1/blocks.1/attn/Constant_3" [id=1171, type=Constant]; +"1172 onnx^^Add_2421" [id=1172, label="1172 onnx::Add_2421", type=Constant]; +"1173 /layers/layers.1/blocks.1/attn/Constant_2" [id=1173, type=Constant]; +"1174 onnx^^Add_2413" [id=1174, label="1174 onnx::Add_2413", type=Constant]; +"1175 Constant_774" [id=1175, type=Constant]; +"1176 Constant_7221" [id=1176, type=Constant]; +"1177 Constant_772" [id=1177, type=Constant]; +"1178 Constant_7222" [id=1178, type=Constant]; +"1179 Constant_5443" [id=1179, type=Constant]; +"1180 Constant_5440" [id=1180, type=Constant]; +"1181 Constant_5437" [id=1181, type=Constant]; +"1182 Constant_5467" [id=1182, type=Constant]; +"1183 Constant_5464" [id=1183, type=Constant]; +"1184 Constant_5461" [id=1184, type=Constant]; +"1185 Constant_7225" [id=1185, type=Constant]; +"1186 Constant_7226" [id=1186, type=Constant]; +"1187 Constant_5539" [id=1187, type=Constant]; +"1188 Constant_5536" [id=1188, type=Constant]; +"1189 Constant_5533" [id=1189, type=Constant]; +"1190 Constant_5491" [id=1190, type=Constant]; +"1191 Constant_5488" [id=1191, type=Constant]; +"1192 Constant_5485" [id=1192, type=Constant]; +"1193 Constant_5527" [id=1193, type=Constant]; +"1194 Constant_5524" [id=1194, type=Constant]; +"1195 Constant_5521" [id=1195, type=Constant]; +"1196 Constant_5503" [id=1196, type=Constant]; +"1197 Constant_5500" [id=1197, type=Constant]; +"1198 Constant_5497" [id=1198, type=Constant]; +"1199 Constant_7231" [id=1199, type=Constant]; +"1200 onnx^^Add_2485" [id=1200, label="1200 onnx::Add_2485", type=Constant]; +"1201 Constant_984" [id=1201, type=Constant]; +"1202 Constant_7232" [id=1202, type=Constant]; +"1203 Constant_982" [id=1203, type=Constant]; +"1204 Constant_7233" [id=1204, type=Constant]; +"1205 Constant_7236" [id=1205, type=Constant]; +"1206 Constant_7237" [id=1206, type=Constant]; +"1207 Constant_5563" [id=1207, type=Constant]; +"1208 Constant_5560" [id=1208, type=Constant]; +"1209 Constant_5557" [id=1209, type=Constant]; +"1210 Constant_5587" [id=1210, type=Constant]; +"1211 Constant_5584" [id=1211, type=Constant]; +"1212 Constant_5581" [id=1212, type=Constant]; +"1213 Constant_7240" [id=1213, type=Constant]; +"1214 /layers/layers.2/blocks.1/attn/Constant_3" [id=1214, type=Constant]; +"1215 onnx^^Add_2653" [id=1215, label="1215 onnx::Add_2653", type=Constant]; +"1216 /layers/layers.2/blocks.1/attn/Constant_2" [id=1216, type=Constant]; +"1217 onnx^^Add_2534" [id=1217, label="1217 onnx::Add_2534", type=Constant]; +"1218 Constant_1132" [id=1218, type=Constant]; +"1219 Constant_7241" [id=1219, type=Constant]; +"1220 Constant_1130" [id=1220, type=Constant]; +"1221 Constant_7242" [id=1221, type=Constant]; +"1222 Constant_5611" [id=1222, type=Constant]; +"1223 Constant_5608" [id=1223, type=Constant]; +"1224 Constant_5605" [id=1224, type=Constant]; +"1225 Constant_5635" [id=1225, type=Constant]; +"1226 Constant_5632" [id=1226, type=Constant]; +"1227 Constant_5629" [id=1227, type=Constant]; +"1228 Constant_7245" [id=1228, type=Constant]; +"1229 Constant_7246" [id=1229, type=Constant]; +"1230 Constant_7249" [id=1230, type=Constant]; +"1231 onnx^^Add_2596" [id=1231, label="1231 onnx::Add_2596", type=Constant]; +"1232 Constant_1289" [id=1232, type=Constant]; +"1233 Constant_7250" [id=1233, type=Constant]; +"1234 Constant_1287" [id=1234, type=Constant]; +"1235 Constant_7251" [id=1235, type=Constant]; +"1236 Constant_7254" [id=1236, type=Constant]; +"1237 Constant_7255" [id=1237, type=Constant]; +"1238 Constant_5659" [id=1238, type=Constant]; +"1239 Constant_5656" [id=1239, type=Constant]; +"1240 Constant_5653" [id=1240, type=Constant]; +"1241 Constant_5683" [id=1241, type=Constant]; +"1242 Constant_5680" [id=1242, type=Constant]; +"1243 Constant_5677" [id=1243, type=Constant]; +"1244 Constant_7258" [id=1244, type=Constant]; +"1245 /layers/layers.2/blocks.3/attn/Constant_3" [id=1245, type=Constant]; +"1246 /layers/layers.2/blocks.3/attn/Constant_2" [id=1246, type=Constant]; +"1247 onnx^^Add_2645" [id=1247, label="1247 onnx::Add_2645", type=Constant]; +"1248 Constant_1437" [id=1248, type=Constant]; +"1249 Constant_7259" [id=1249, type=Constant]; +"1250 Constant_1435" [id=1250, type=Constant]; +"1251 Constant_7260" [id=1251, type=Constant]; +"1252 Constant_5707" [id=1252, type=Constant]; +"1253 Constant_5704" [id=1253, type=Constant]; +"1254 Constant_5701" [id=1254, type=Constant]; +"1255 Constant_5731" [id=1255, type=Constant]; +"1256 Constant_5728" [id=1256, type=Constant]; +"1257 Constant_5725" [id=1257, type=Constant]; +"1258 Constant_7263" [id=1258, type=Constant]; +"1259 Constant_7264" [id=1259, type=Constant]; +"1260 Constant_7267" [id=1260, type=Constant]; +"1261 onnx^^Add_2707" [id=1261, label="1261 onnx::Add_2707", type=Constant]; +"1262 Constant_1594" [id=1262, type=Constant]; +"1263 Constant_7268" [id=1263, type=Constant]; +"1264 Constant_1592" [id=1264, type=Constant]; +"1265 Constant_7269" [id=1265, type=Constant]; +"1266 Constant_7272" [id=1266, type=Constant]; +"1267 Constant_7273" [id=1267, type=Constant]; +"1268 Constant_5755" [id=1268, type=Constant]; +"1269 Constant_5752" [id=1269, type=Constant]; +"1270 Constant_5749" [id=1270, type=Constant]; +"1271 Constant_5779" [id=1271, type=Constant]; +"1272 Constant_5776" [id=1272, type=Constant]; +"1273 Constant_5773" [id=1273, type=Constant]; +"1274 Constant_7276" [id=1274, type=Constant]; +"1275 /layers/layers.2/blocks.5/attn/Constant_3" [id=1275, type=Constant]; +"1276 /layers/layers.2/blocks.5/attn/Constant_2" [id=1276, type=Constant]; +"1277 onnx^^Add_2756" [id=1277, label="1277 onnx::Add_2756", type=Constant]; +"1278 Constant_1742" [id=1278, type=Constant]; +"1279 Constant_7277" [id=1279, type=Constant]; +"1280 Constant_1740" [id=1280, type=Constant]; +"1281 Constant_7278" [id=1281, type=Constant]; +"1282 Constant_5803" [id=1282, type=Constant]; +"1283 Constant_5800" [id=1283, type=Constant]; +"1284 Constant_5797" [id=1284, type=Constant]; +"1285 Constant_5827" [id=1285, type=Constant]; +"1286 Constant_5824" [id=1286, type=Constant]; +"1287 Constant_5821" [id=1287, type=Constant]; +"1288 Constant_7281" [id=1288, type=Constant]; +"1289 Constant_7282" [id=1289, type=Constant]; +"1290 Constant_5899" [id=1290, type=Constant]; +"1291 Constant_5896" [id=1291, type=Constant]; +"1292 Constant_5893" [id=1292, type=Constant]; +"1293 Constant_5851" [id=1293, type=Constant]; +"1294 Constant_5848" [id=1294, type=Constant]; +"1295 Constant_5845" [id=1295, type=Constant]; +"1296 Constant_5887" [id=1296, type=Constant]; +"1297 Constant_5884" [id=1297, type=Constant]; +"1298 Constant_5881" [id=1298, type=Constant]; +"1299 Constant_5863" [id=1299, type=Constant]; +"1300 Constant_5860" [id=1300, type=Constant]; +"1301 Constant_5857" [id=1301, type=Constant]; +"1302 Constant_7287" [id=1302, type=Constant]; +"1303 onnx^^Add_2828" [id=1303, label="1303 onnx::Add_2828", type=Constant]; +"1304 Constant_1952" [id=1304, type=Constant]; +"1305 Constant_7288" [id=1305, type=Constant]; +"1306 Constant_1950" [id=1306, type=Constant]; +"1307 Constant_7289" [id=1307, type=Constant]; +"1308 Constant_7292" [id=1308, type=Constant]; +"1309 Constant_7293" [id=1309, type=Constant]; +"1310 Constant_7296" [id=1310, type=Constant]; +"1311 onnx^^Add_2877" [id=1311, label="1311 onnx::Add_2877", type=Constant]; +"1312 Constant_2058" [id=1312, type=Constant]; +"1313 Constant_7297" [id=1313, type=Constant]; +"1314 Constant_2056" [id=1314, type=Constant]; +"1315 Constant_7298" [id=1315, type=Constant]; +"1316 Constant_7301" [id=1316, type=Constant]; +"1317 Constant_7302" [id=1317, type=Constant]; +"0 input" -> "1 Multiply_6794" [label="[1, 3, 224, 224]", style=solid]; +"1 Multiply_6794" -> "2 Divide_2169" [label="[1, 3, 224, 224]", style=solid]; +"2 Divide_2169" -> "3 /patch_embed/proj/Conv/WithoutBiases" [label="[1, 3, 224, 224]", style=solid]; +"3 /patch_embed/proj/Conv/WithoutBiases" -> "4 /patch_embed/proj/Conv" [label="[1, 96, 56, 56]", style=solid]; +"4 /patch_embed/proj/Conv" -> "5 /patch_embed/Reshape" [label="[1, 96, 56, 56]", style=solid]; +"4 /patch_embed/proj/Conv" -> "6 /patch_embed/Shape" [label="[1, 96, 56, 56]", style=solid]; +"5 /patch_embed/Reshape" -> "7 /patch_embed/Transpose" [label="[1, 96, 3136]", style=solid]; +"6 /patch_embed/Shape" -> "8 /patch_embed/Slice" [label="[4]", style=dashed]; +"7 /patch_embed/Transpose" -> "9 /patch_embed/norm/Div" [label="[1, 3136, 96]", style=solid]; +"8 /patch_embed/Slice" -> "10 /patch_embed/Concat" [label="[2]", style=dashed]; +"9 /patch_embed/norm/Div" -> "11 /patch_embed/norm/Mul" [label="[1, 3136, 96]", style=solid]; +"10 /patch_embed/Concat" -> "5 /patch_embed/Reshape" [label="[3]", style=dashed]; +"11 /patch_embed/norm/Mul" -> "12 /patch_embed/norm/Add_1" [label="[1, 3136, 96]", style=solid]; +"12 /patch_embed/norm/Add_1" -> "13 /layers/layers.0/blocks.0/Add" [label="[1, 3136, 96]", style=solid]; +"12 /patch_embed/norm/Add_1" -> "14 /layers/layers.0/blocks.0/norm1/Div" [label="[1, 3136, 96]", style=solid]; +"13 /layers/layers.0/blocks.0/Add" -> "15 /layers/layers.0/blocks.0/Add_1" [label="[1, 3136, 96]", style=solid]; +"13 /layers/layers.0/blocks.0/Add" -> "16 /layers/layers.0/blocks.0/norm2/Div" [label="[1, 3136, 96]", style=solid]; +"14 /layers/layers.0/blocks.0/norm1/Div" -> "17 /layers/layers.0/blocks.0/norm1/Mul" [label="[1, 3136, 96]", style=solid]; +"15 /layers/layers.0/blocks.0/Add_1" -> "18 /layers/layers.0/blocks.1/Add" [label="[1, 3136, 96]", style=solid]; +"15 /layers/layers.0/blocks.0/Add_1" -> "19 /layers/layers.0/blocks.1/norm1/Div" [label="[1, 3136, 96]", style=solid]; +"16 /layers/layers.0/blocks.0/norm2/Div" -> "20 /layers/layers.0/blocks.0/norm2/Mul" [label="[1, 3136, 96]", style=solid]; +"17 /layers/layers.0/blocks.0/norm1/Mul" -> "21 /layers/layers.0/blocks.0/norm1/Add_1" [label="[1, 3136, 96]", style=solid]; +"18 /layers/layers.0/blocks.1/Add" -> "22 /layers/layers.0/blocks.1/Add_1" [label="[1, 3136, 96]", style=solid]; +"18 /layers/layers.0/blocks.1/Add" -> "23 /layers/layers.0/blocks.1/norm2/Div" [label="[1, 3136, 96]", style=solid]; +"19 /layers/layers.0/blocks.1/norm1/Div" -> "24 /layers/layers.0/blocks.1/norm1/Mul" [label="[1, 3136, 96]", style=solid]; +"20 /layers/layers.0/blocks.0/norm2/Mul" -> "25 /layers/layers.0/blocks.0/norm2/Add_1" [label="[1, 3136, 96]", style=solid]; +"21 /layers/layers.0/blocks.0/norm1/Add_1" -> "26 /layers/layers.0/blocks.0/Reshape_1" [label="[1, 3136, 96]", style=solid]; +"22 /layers/layers.0/blocks.1/Add_1" -> "27 /layers/layers.0/downsample/Reshape" [label="[1, 3136, 96]", style=solid]; +"23 /layers/layers.0/blocks.1/norm2/Div" -> "28 /layers/layers.0/blocks.1/norm2/Mul" [label="[1, 3136, 96]", style=solid]; +"24 /layers/layers.0/blocks.1/norm1/Mul" -> "29 /layers/layers.0/blocks.1/norm1/Add_1" [label="[1, 3136, 96]", style=solid]; +"25 /layers/layers.0/blocks.0/norm2/Add_1" -> "30 /layers/layers.0/blocks.0/norm2/Add_1/smooth_quant_multiply" [label="[1, 3136, 96]", style=solid]; +"26 /layers/layers.0/blocks.0/Reshape_1" -> "31 /layers/layers.0/blocks.0/Transpose" [label="[1, 8, 7, 8, 7, 96]", style=solid]; +"27 /layers/layers.0/downsample/Reshape" -> "32 /layers/layers.0/downsample/Slice" [label="[1, 56, 56, 96]", style=solid]; +"27 /layers/layers.0/downsample/Reshape" -> "33 /layers/layers.0/downsample/Slice_2" [label="[1, 56, 56, 96]", style=solid]; +"28 /layers/layers.0/blocks.1/norm2/Mul" -> "34 /layers/layers.0/blocks.1/norm2/Add_1" [label="[1, 3136, 96]", style=solid]; +"29 /layers/layers.0/blocks.1/norm1/Add_1" -> "35 /layers/layers.0/blocks.1/Reshape" [label="[1, 3136, 96]", style=solid]; +"30 /layers/layers.0/blocks.0/norm2/Add_1/smooth_quant_multiply" -> "36 /layers/layers.0/blocks.0/mlp/fc1/MatMul" [label="[1, 3136, 96]", style=solid]; +"31 /layers/layers.0/blocks.0/Transpose" -> "37 /layers/layers.0/blocks.0/Reshape_2" [label="[1, 8, 8, 7, 7, 96]", style=solid]; +"32 /layers/layers.0/downsample/Slice" -> "38 /layers/layers.0/downsample/Slice_1" [label="[1, 28, 56, 96]", style=solid]; +"32 /layers/layers.0/downsample/Slice" -> "39 /layers/layers.0/downsample/Slice_4" [label="[1, 28, 56, 96]", style=solid]; +"33 /layers/layers.0/downsample/Slice_2" -> "40 /layers/layers.0/downsample/Slice_3" [label="[1, 28, 56, 96]", style=solid]; +"33 /layers/layers.0/downsample/Slice_2" -> "41 /layers/layers.0/downsample/Slice_5" [label="[1, 28, 56, 96]", style=solid]; +"34 /layers/layers.0/blocks.1/norm2/Add_1" -> "42 /layers/layers.0/blocks.1/norm2/Add_1/smooth_quant_multiply" [label="[1, 3136, 96]", style=solid]; +"35 /layers/layers.0/blocks.1/Reshape" -> "43 /layers/layers.0/blocks.1/Slice" [label="[1, 56, 56, 96]", style=solid]; +"35 /layers/layers.0/blocks.1/Reshape" -> "44 /layers/layers.0/blocks.1/Slice_1" [label="[1, 56, 56, 96]", style=solid]; +"36 /layers/layers.0/blocks.0/mlp/fc1/MatMul" -> "45 /layers/layers.0/blocks.0/mlp/fc1/Add" [label="[1, 3136, 384]", style=solid]; +"37 /layers/layers.0/blocks.0/Reshape_2" -> "46 /layers/layers.0/blocks.0/Reshape_3" [label="[64, 7, 7, 96]", style=solid]; +"38 /layers/layers.0/downsample/Slice_1" -> "47 /layers/layers.0/downsample/Concat" [label="[1, 28, 28, 96]", style=solid]; +"39 /layers/layers.0/downsample/Slice_4" -> "47 /layers/layers.0/downsample/Concat" [label="[1, 28, 28, 96]", style=solid]; +"40 /layers/layers.0/downsample/Slice_3" -> "47 /layers/layers.0/downsample/Concat" [label="[1, 28, 28, 96]", style=solid]; +"41 /layers/layers.0/downsample/Slice_5" -> "47 /layers/layers.0/downsample/Concat" [label="[1, 28, 28, 96]", style=solid]; +"42 /layers/layers.0/blocks.1/norm2/Add_1/smooth_quant_multiply" -> "48 /layers/layers.0/blocks.1/mlp/fc1/MatMul" [label="[1, 3136, 96]", style=solid]; +"43 /layers/layers.0/blocks.1/Slice" -> "49 /layers/layers.0/blocks.1/Concat" [label="[1, 53, 56, 96]", style=solid]; +"44 /layers/layers.0/blocks.1/Slice_1" -> "49 /layers/layers.0/blocks.1/Concat" [label="[1, 3, 56, 96]", style=solid]; +"45 /layers/layers.0/blocks.0/mlp/fc1/Add" -> "50 /layers/layers.0/blocks.0/mlp/act/Mul_1" [label="[1, 3136, 384]", style=solid]; +"46 /layers/layers.0/blocks.0/Reshape_3" -> "51 /layers/layers.0/blocks.0/Reshape_3/smooth_quant_multiply" [label="[64, 49, 96]", style=solid]; +"47 /layers/layers.0/downsample/Concat" -> "52 /layers/layers.0/downsample/Reshape_1" [label="[1, 28, 28, 384]", style=solid]; +"48 /layers/layers.0/blocks.1/mlp/fc1/MatMul" -> "53 /layers/layers.0/blocks.1/mlp/fc1/Add" [label="[1, 3136, 384]", style=solid]; +"49 /layers/layers.0/blocks.1/Concat" -> "54 /layers/layers.0/blocks.1/Slice_2" [label="[1, 56, 56, 96]", style=solid]; +"49 /layers/layers.0/blocks.1/Concat" -> "55 /layers/layers.0/blocks.1/Slice_3" [label="[1, 56, 56, 96]", style=solid]; +"50 /layers/layers.0/blocks.0/mlp/act/Mul_1" -> "56 /layers/layers.0/blocks.0/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 3136, 384]", style=solid]; +"51 /layers/layers.0/blocks.0/Reshape_3/smooth_quant_multiply" -> "57 /layers/layers.0/blocks.0/attn/qkv/MatMul" [label="[64, 49, 96]", style=solid]; +"52 /layers/layers.0/downsample/Reshape_1" -> "58 /layers/layers.0/downsample/norm/Div" [label="[1, 784, 384]", style=solid]; +"53 /layers/layers.0/blocks.1/mlp/fc1/Add" -> "59 /layers/layers.0/blocks.1/mlp/act/Mul_1" [label="[1, 3136, 384]", style=solid]; +"54 /layers/layers.0/blocks.1/Slice_2" -> "60 /layers/layers.0/blocks.1/Concat_1" [label="[1, 56, 53, 96]", style=solid]; +"55 /layers/layers.0/blocks.1/Slice_3" -> "60 /layers/layers.0/blocks.1/Concat_1" [label="[1, 56, 3, 96]", style=solid]; +"56 /layers/layers.0/blocks.0/mlp/act/Mul_1/smooth_quant_multiply" -> "61 /layers/layers.0/blocks.0/mlp/fc2/MatMul" [label="[1, 3136, 384]", style=solid]; +"57 /layers/layers.0/blocks.0/attn/qkv/MatMul" -> "62 /layers/layers.0/blocks.0/attn/qkv/Add" [label="[64, 49, 288]", style=solid]; +"58 /layers/layers.0/downsample/norm/Div" -> "63 /layers/layers.0/downsample/norm/Mul" [label="[1, 784, 384]", style=solid]; +"59 /layers/layers.0/blocks.1/mlp/act/Mul_1" -> "64 /layers/layers.0/blocks.1/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 3136, 384]", style=solid]; +"60 /layers/layers.0/blocks.1/Concat_1" -> "65 /layers/layers.0/blocks.1/Reshape_1" [label="[1, 56, 56, 96]", style=solid]; +"61 /layers/layers.0/blocks.0/mlp/fc2/MatMul" -> "66 /layers/layers.0/blocks.0/mlp/fc2/Add" [label="[1, 3136, 96]", style=solid]; +"62 /layers/layers.0/blocks.0/attn/qkv/Add" -> "67 /layers/layers.0/blocks.0/attn/Reshape" [label="[64, 49, 288]", style=solid]; +"63 /layers/layers.0/downsample/norm/Mul" -> "68 /layers/layers.0/downsample/norm/Add_1" [label="[1, 784, 384]", style=solid]; +"64 /layers/layers.0/blocks.1/mlp/act/Mul_1/smooth_quant_multiply" -> "69 /layers/layers.0/blocks.1/mlp/fc2/MatMul" [label="[1, 3136, 384]", style=solid]; +"65 /layers/layers.0/blocks.1/Reshape_1" -> "70 /layers/layers.0/blocks.1/Transpose" [label="[1, 8, 7, 8, 7, 96]", style=solid]; +"66 /layers/layers.0/blocks.0/mlp/fc2/Add" -> "15 /layers/layers.0/blocks.0/Add_1" [label="[1, 3136, 96]", style=solid]; +"67 /layers/layers.0/blocks.0/attn/Reshape" -> "71 /layers/layers.0/blocks.0/attn/Transpose" [label="[64, 49, 3, 3, 32]", style=solid]; +"68 /layers/layers.0/downsample/norm/Add_1" -> "72 /layers/layers.0/downsample/norm/Add_1/smooth_quant_multiply" [label="[1, 784, 384]", style=solid]; +"69 /layers/layers.0/blocks.1/mlp/fc2/MatMul" -> "73 /layers/layers.0/blocks.1/mlp/fc2/Add" [label="[1, 3136, 96]", style=solid]; +"70 /layers/layers.0/blocks.1/Transpose" -> "74 /layers/layers.0/blocks.1/Reshape_2" [label="[1, 8, 8, 7, 7, 96]", style=solid]; +"71 /layers/layers.0/blocks.0/attn/Transpose" -> "75 /layers/layers.0/blocks.0/attn/Gather" [label="[3, 64, 3, 49, 32]", style=solid]; +"71 /layers/layers.0/blocks.0/attn/Transpose" -> "76 /layers/layers.0/blocks.0/attn/Gather_1" [label="[3, 64, 3, 49, 32]", style=solid]; +"71 /layers/layers.0/blocks.0/attn/Transpose" -> "77 /layers/layers.0/blocks.0/attn/Gather_2" [label="[3, 64, 3, 49, 32]", style=solid]; +"72 /layers/layers.0/downsample/norm/Add_1/smooth_quant_multiply" -> "78 /layers/layers.0/downsample/reduction/MatMul" [label="[1, 784, 384]", style=solid]; +"73 /layers/layers.0/blocks.1/mlp/fc2/Add" -> "22 /layers/layers.0/blocks.1/Add_1" [label="[1, 3136, 96]", style=solid]; +"74 /layers/layers.0/blocks.1/Reshape_2" -> "79 /layers/layers.0/blocks.1/Reshape_3" [label="[64, 7, 7, 96]", style=solid]; +"75 /layers/layers.0/blocks.0/attn/Gather" -> "80 /layers/layers.0/blocks.0/attn/Mul" [label="[64, 3, 49, 32]", style=solid]; +"76 /layers/layers.0/blocks.0/attn/Gather_1" -> "81 /layers/layers.0/blocks.0/attn/MatMul" [label="[64, 3, 49, 32]", style=solid]; +"77 /layers/layers.0/blocks.0/attn/Gather_2" -> "82 /layers/layers.0/blocks.0/attn/MatMul_1" [label="[64, 3, 49, 32]", style=solid]; +"78 /layers/layers.0/downsample/reduction/MatMul" -> "83 /layers/layers.1/blocks.0/Add" [label="[1, 784, 192]", style=solid]; +"78 /layers/layers.0/downsample/reduction/MatMul" -> "84 /layers/layers.1/blocks.0/norm1/Div" [label="[1, 784, 192]", style=solid]; +"79 /layers/layers.0/blocks.1/Reshape_3" -> "85 /layers/layers.0/blocks.1/Reshape_3/smooth_quant_multiply" [label="[64, 49, 96]", style=solid]; +"80 /layers/layers.0/blocks.0/attn/Mul" -> "81 /layers/layers.0/blocks.0/attn/MatMul" [label="[64, 3, 49, 32]", style=solid]; +"81 /layers/layers.0/blocks.0/attn/MatMul" -> "86 /layers/layers.0/blocks.0/attn/Add" [label="[64, 3, 49, 49]", style=solid]; +"82 /layers/layers.0/blocks.0/attn/MatMul_1" -> "87 /layers/layers.0/blocks.0/attn/Transpose_2" [label="[64, 3, 49, 32]", style=solid]; +"83 /layers/layers.1/blocks.0/Add" -> "88 /layers/layers.1/blocks.0/Add_1" [label="[1, 784, 192]", style=solid]; +"83 /layers/layers.1/blocks.0/Add" -> "89 /layers/layers.1/blocks.0/norm2/Div" [label="[1, 784, 192]", style=solid]; +"84 /layers/layers.1/blocks.0/norm1/Div" -> "90 /layers/layers.1/blocks.0/norm1/Mul" [label="[1, 784, 192]", style=solid]; +"85 /layers/layers.0/blocks.1/Reshape_3/smooth_quant_multiply" -> "91 /layers/layers.0/blocks.1/attn/qkv/MatMul" [label="[64, 49, 96]", style=solid]; +"86 /layers/layers.0/blocks.0/attn/Add" -> "92 /layers/layers.0/blocks.0/attn/softmax/Softmax" [label="[64, 3, 49, 49]", style=solid]; +"87 /layers/layers.0/blocks.0/attn/Transpose_2" -> "93 /layers/layers.0/blocks.0/attn/Reshape_1" [label="[64, 49, 3, 32]", style=solid]; +"88 /layers/layers.1/blocks.0/Add_1" -> "94 /layers/layers.1/blocks.1/Add" [label="[1, 784, 192]", style=solid]; +"88 /layers/layers.1/blocks.0/Add_1" -> "95 /layers/layers.1/blocks.1/norm1/Div" [label="[1, 784, 192]", style=solid]; +"89 /layers/layers.1/blocks.0/norm2/Div" -> "96 /layers/layers.1/blocks.0/norm2/Mul" [label="[1, 784, 192]", style=solid]; +"90 /layers/layers.1/blocks.0/norm1/Mul" -> "97 /layers/layers.1/blocks.0/norm1/Add_1" [label="[1, 784, 192]", style=solid]; +"91 /layers/layers.0/blocks.1/attn/qkv/MatMul" -> "98 /layers/layers.0/blocks.1/attn/qkv/Add" [label="[64, 49, 288]", style=solid]; +"92 /layers/layers.0/blocks.0/attn/softmax/Softmax" -> "82 /layers/layers.0/blocks.0/attn/MatMul_1" [label="[64, 3, 49, 49]", style=solid]; +"93 /layers/layers.0/blocks.0/attn/Reshape_1" -> "99 /layers/layers.0/blocks.0/attn/Reshape_1/smooth_quant_multiply" [label="[64, 49, 96]", style=solid]; +"94 /layers/layers.1/blocks.1/Add" -> "100 /layers/layers.1/blocks.1/Add_1" [label="[1, 784, 192]", style=solid]; +"94 /layers/layers.1/blocks.1/Add" -> "101 /layers/layers.1/blocks.1/norm2/Div" [label="[1, 784, 192]", style=solid]; +"95 /layers/layers.1/blocks.1/norm1/Div" -> "102 /layers/layers.1/blocks.1/norm1/Mul" [label="[1, 784, 192]", style=solid]; +"96 /layers/layers.1/blocks.0/norm2/Mul" -> "103 /layers/layers.1/blocks.0/norm2/Add_1" [label="[1, 784, 192]", style=solid]; +"97 /layers/layers.1/blocks.0/norm1/Add_1" -> "104 /layers/layers.1/blocks.0/Reshape_1" [label="[1, 784, 192]", style=solid]; +"98 /layers/layers.0/blocks.1/attn/qkv/Add" -> "105 /layers/layers.0/blocks.1/attn/Reshape" [label="[64, 49, 288]", style=solid]; +"99 /layers/layers.0/blocks.0/attn/Reshape_1/smooth_quant_multiply" -> "106 /layers/layers.0/blocks.0/attn/proj/MatMul" [label="[64, 49, 96]", style=solid]; +"100 /layers/layers.1/blocks.1/Add_1" -> "107 /layers/layers.1/downsample/Reshape" [label="[1, 784, 192]", style=solid]; +"101 /layers/layers.1/blocks.1/norm2/Div" -> "108 /layers/layers.1/blocks.1/norm2/Mul" [label="[1, 784, 192]", style=solid]; +"102 /layers/layers.1/blocks.1/norm1/Mul" -> "109 /layers/layers.1/blocks.1/norm1/Add_1" [label="[1, 784, 192]", style=solid]; +"103 /layers/layers.1/blocks.0/norm2/Add_1" -> "110 /layers/layers.1/blocks.0/norm2/Add_1/smooth_quant_multiply" [label="[1, 784, 192]", style=solid]; +"104 /layers/layers.1/blocks.0/Reshape_1" -> "111 /layers/layers.1/blocks.0/Transpose" [label="[1, 4, 7, 4, 7, 192]", style=solid]; +"105 /layers/layers.0/blocks.1/attn/Reshape" -> "112 /layers/layers.0/blocks.1/attn/Transpose" [label="[64, 49, 3, 3, 32]", style=solid]; +"106 /layers/layers.0/blocks.0/attn/proj/MatMul" -> "113 /layers/layers.0/blocks.0/attn/proj/Add" [label="[64, 49, 96]", style=solid]; +"107 /layers/layers.1/downsample/Reshape" -> "114 /layers/layers.1/downsample/Slice" [label="[1, 28, 28, 192]", style=solid]; +"107 /layers/layers.1/downsample/Reshape" -> "115 /layers/layers.1/downsample/Slice_2" [label="[1, 28, 28, 192]", style=solid]; +"108 /layers/layers.1/blocks.1/norm2/Mul" -> "116 /layers/layers.1/blocks.1/norm2/Add_1" [label="[1, 784, 192]", style=solid]; +"109 /layers/layers.1/blocks.1/norm1/Add_1" -> "117 /layers/layers.1/blocks.1/Reshape" [label="[1, 784, 192]", style=solid]; +"110 /layers/layers.1/blocks.0/norm2/Add_1/smooth_quant_multiply" -> "118 /layers/layers.1/blocks.0/mlp/fc1/MatMul" [label="[1, 784, 192]", style=solid]; +"111 /layers/layers.1/blocks.0/Transpose" -> "119 /layers/layers.1/blocks.0/Reshape_2" [label="[1, 4, 4, 7, 7, 192]", style=solid]; +"112 /layers/layers.0/blocks.1/attn/Transpose" -> "120 /layers/layers.0/blocks.1/attn/Gather" [label="[3, 64, 3, 49, 32]", style=solid]; +"112 /layers/layers.0/blocks.1/attn/Transpose" -> "121 /layers/layers.0/blocks.1/attn/Gather_1" [label="[3, 64, 3, 49, 32]", style=solid]; +"112 /layers/layers.0/blocks.1/attn/Transpose" -> "122 /layers/layers.0/blocks.1/attn/Gather_2" [label="[3, 64, 3, 49, 32]", style=solid]; +"113 /layers/layers.0/blocks.0/attn/proj/Add" -> "123 /layers/layers.0/blocks.0/Reshape_4" [label="[64, 49, 96]", style=solid]; +"114 /layers/layers.1/downsample/Slice" -> "124 /layers/layers.1/downsample/Slice_1" [label="[1, 14, 28, 192]", style=solid]; +"114 /layers/layers.1/downsample/Slice" -> "125 /layers/layers.1/downsample/Slice_4" [label="[1, 14, 28, 192]", style=solid]; +"115 /layers/layers.1/downsample/Slice_2" -> "126 /layers/layers.1/downsample/Slice_3" [label="[1, 14, 28, 192]", style=solid]; +"115 /layers/layers.1/downsample/Slice_2" -> "127 /layers/layers.1/downsample/Slice_5" [label="[1, 14, 28, 192]", style=solid]; +"116 /layers/layers.1/blocks.1/norm2/Add_1" -> "128 /layers/layers.1/blocks.1/norm2/Add_1/smooth_quant_multiply" [label="[1, 784, 192]", style=solid]; +"117 /layers/layers.1/blocks.1/Reshape" -> "129 /layers/layers.1/blocks.1/Slice" [label="[1, 28, 28, 192]", style=solid]; +"117 /layers/layers.1/blocks.1/Reshape" -> "130 /layers/layers.1/blocks.1/Slice_1" [label="[1, 28, 28, 192]", style=solid]; +"118 /layers/layers.1/blocks.0/mlp/fc1/MatMul" -> "131 /layers/layers.1/blocks.0/mlp/fc1/Add" [label="[1, 784, 768]", style=solid]; +"119 /layers/layers.1/blocks.0/Reshape_2" -> "132 /layers/layers.1/blocks.0/Reshape_3" [label="[16, 7, 7, 192]", style=solid]; +"120 /layers/layers.0/blocks.1/attn/Gather" -> "133 /layers/layers.0/blocks.1/attn/Mul" [label="[64, 3, 49, 32]", style=solid]; +"121 /layers/layers.0/blocks.1/attn/Gather_1" -> "134 /layers/layers.0/blocks.1/attn/MatMul" [label="[64, 3, 49, 32]", style=solid]; +"122 /layers/layers.0/blocks.1/attn/Gather_2" -> "135 /layers/layers.0/blocks.1/attn/MatMul_1" [label="[64, 3, 49, 32]", style=solid]; +"123 /layers/layers.0/blocks.0/Reshape_4" -> "136 /layers/layers.0/blocks.0/Reshape_5" [label="[64, 7, 7, 96]", style=solid]; +"124 /layers/layers.1/downsample/Slice_1" -> "137 /layers/layers.1/downsample/Concat" [label="[1, 14, 14, 192]", style=solid]; +"125 /layers/layers.1/downsample/Slice_4" -> "137 /layers/layers.1/downsample/Concat" [label="[1, 14, 14, 192]", style=solid]; +"126 /layers/layers.1/downsample/Slice_3" -> "137 /layers/layers.1/downsample/Concat" [label="[1, 14, 14, 192]", style=solid]; +"127 /layers/layers.1/downsample/Slice_5" -> "137 /layers/layers.1/downsample/Concat" [label="[1, 14, 14, 192]", style=solid]; +"128 /layers/layers.1/blocks.1/norm2/Add_1/smooth_quant_multiply" -> "138 /layers/layers.1/blocks.1/mlp/fc1/MatMul" [label="[1, 784, 192]", style=solid]; +"129 /layers/layers.1/blocks.1/Slice" -> "139 /layers/layers.1/blocks.1/Concat" [label="[1, 25, 28, 192]", style=solid]; +"130 /layers/layers.1/blocks.1/Slice_1" -> "139 /layers/layers.1/blocks.1/Concat" [label="[1, 3, 28, 192]", style=solid]; +"131 /layers/layers.1/blocks.0/mlp/fc1/Add" -> "140 /layers/layers.1/blocks.0/mlp/act/Mul_1" [label="[1, 784, 768]", style=solid]; +"132 /layers/layers.1/blocks.0/Reshape_3" -> "141 /layers/layers.1/blocks.0/Reshape_3/smooth_quant_multiply" [label="[16, 49, 192]", style=solid]; +"133 /layers/layers.0/blocks.1/attn/Mul" -> "134 /layers/layers.0/blocks.1/attn/MatMul" [label="[64, 3, 49, 32]", style=solid]; +"134 /layers/layers.0/blocks.1/attn/MatMul" -> "142 /layers/layers.0/blocks.1/attn/Add" [label="[64, 3, 49, 49]", style=solid]; +"135 /layers/layers.0/blocks.1/attn/MatMul_1" -> "143 /layers/layers.0/blocks.1/attn/Transpose_2" [label="[64, 3, 49, 32]", style=solid]; +"136 /layers/layers.0/blocks.0/Reshape_5" -> "144 /layers/layers.0/blocks.0/Transpose_1" [label="[1, 8, 8, 7, 7, 96]", style=solid]; +"137 /layers/layers.1/downsample/Concat" -> "145 /layers/layers.1/downsample/Reshape_1" [label="[1, 14, 14, 768]", style=solid]; +"138 /layers/layers.1/blocks.1/mlp/fc1/MatMul" -> "146 /layers/layers.1/blocks.1/mlp/fc1/Add" [label="[1, 784, 768]", style=solid]; +"139 /layers/layers.1/blocks.1/Concat" -> "147 /layers/layers.1/blocks.1/Slice_2" [label="[1, 28, 28, 192]", style=solid]; +"139 /layers/layers.1/blocks.1/Concat" -> "148 /layers/layers.1/blocks.1/Slice_3" [label="[1, 28, 28, 192]", style=solid]; +"140 /layers/layers.1/blocks.0/mlp/act/Mul_1" -> "149 /layers/layers.1/blocks.0/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 784, 768]", style=solid]; +"141 /layers/layers.1/blocks.0/Reshape_3/smooth_quant_multiply" -> "150 /layers/layers.1/blocks.0/attn/qkv/MatMul" [label="[16, 49, 192]", style=solid]; +"142 /layers/layers.0/blocks.1/attn/Add" -> "151 /layers/layers.0/blocks.1/attn/Reshape_1" [label="[64, 3, 49, 49]", style=solid]; +"143 /layers/layers.0/blocks.1/attn/Transpose_2" -> "152 /layers/layers.0/blocks.1/attn/Reshape_3" [label="[64, 49, 3, 32]", style=solid]; +"144 /layers/layers.0/blocks.0/Transpose_1" -> "153 /layers/layers.0/blocks.0/Reshape_6" [label="[1, 8, 7, 8, 7, 96]", style=solid]; +"145 /layers/layers.1/downsample/Reshape_1" -> "154 /layers/layers.1/downsample/norm/Div" [label="[1, 196, 768]", style=solid]; +"146 /layers/layers.1/blocks.1/mlp/fc1/Add" -> "155 /layers/layers.1/blocks.1/mlp/act/Mul_1" [label="[1, 784, 768]", style=solid]; +"147 /layers/layers.1/blocks.1/Slice_2" -> "156 /layers/layers.1/blocks.1/Concat_1" [label="[1, 28, 25, 192]", style=solid]; +"148 /layers/layers.1/blocks.1/Slice_3" -> "156 /layers/layers.1/blocks.1/Concat_1" [label="[1, 28, 3, 192]", style=solid]; +"149 /layers/layers.1/blocks.0/mlp/act/Mul_1/smooth_quant_multiply" -> "157 /layers/layers.1/blocks.0/mlp/fc2/MatMul" [label="[1, 784, 768]", style=solid]; +"150 /layers/layers.1/blocks.0/attn/qkv/MatMul" -> "158 /layers/layers.1/blocks.0/attn/qkv/Add" [label="[16, 49, 576]", style=solid]; +"151 /layers/layers.0/blocks.1/attn/Reshape_1" -> "159 /layers/layers.0/blocks.1/attn/Add_1" [label="[1, 64, 3, 49, 49]", style=solid]; +"152 /layers/layers.0/blocks.1/attn/Reshape_3" -> "160 /layers/layers.0/blocks.1/attn/Reshape_3/smooth_quant_multiply" [label="[64, 49, 96]", style=solid]; +"153 /layers/layers.0/blocks.0/Reshape_6" -> "161 /layers/layers.0/blocks.0/Reshape_7" [label="[1, 56, 56, 96]", style=solid]; +"154 /layers/layers.1/downsample/norm/Div" -> "162 /layers/layers.1/downsample/norm/Mul" [label="[1, 196, 768]", style=solid]; +"155 /layers/layers.1/blocks.1/mlp/act/Mul_1" -> "163 /layers/layers.1/blocks.1/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 784, 768]", style=solid]; +"156 /layers/layers.1/blocks.1/Concat_1" -> "164 /layers/layers.1/blocks.1/Reshape_1" [label="[1, 28, 28, 192]", style=solid]; +"157 /layers/layers.1/blocks.0/mlp/fc2/MatMul" -> "165 /layers/layers.1/blocks.0/mlp/fc2/Add" [label="[1, 784, 192]", style=solid]; +"158 /layers/layers.1/blocks.0/attn/qkv/Add" -> "166 /layers/layers.1/blocks.0/attn/Reshape" [label="[16, 49, 576]", style=solid]; +"159 /layers/layers.0/blocks.1/attn/Add_1" -> "167 /layers/layers.0/blocks.1/attn/Reshape_2" [label="[1, 64, 3, 49, 49]", style=solid]; +"160 /layers/layers.0/blocks.1/attn/Reshape_3/smooth_quant_multiply" -> "168 /layers/layers.0/blocks.1/attn/proj/MatMul" [label="[64, 49, 96]", style=solid]; +"161 /layers/layers.0/blocks.0/Reshape_7" -> "13 /layers/layers.0/blocks.0/Add" [label="[1, 3136, 96]", style=solid]; +"162 /layers/layers.1/downsample/norm/Mul" -> "169 /layers/layers.1/downsample/norm/Add_1" [label="[1, 196, 768]", style=solid]; +"163 /layers/layers.1/blocks.1/mlp/act/Mul_1/smooth_quant_multiply" -> "170 /layers/layers.1/blocks.1/mlp/fc2/MatMul" [label="[1, 784, 768]", style=solid]; +"164 /layers/layers.1/blocks.1/Reshape_1" -> "171 /layers/layers.1/blocks.1/Transpose" [label="[1, 4, 7, 4, 7, 192]", style=solid]; +"165 /layers/layers.1/blocks.0/mlp/fc2/Add" -> "88 /layers/layers.1/blocks.0/Add_1" [label="[1, 784, 192]", style=solid]; +"166 /layers/layers.1/blocks.0/attn/Reshape" -> "172 /layers/layers.1/blocks.0/attn/Transpose" [label="[16, 49, 3, 6, 32]", style=solid]; +"167 /layers/layers.0/blocks.1/attn/Reshape_2" -> "173 /layers/layers.0/blocks.1/attn/softmax/Softmax" [label="[64, 3, 49, 49]", style=solid]; +"168 /layers/layers.0/blocks.1/attn/proj/MatMul" -> "174 /layers/layers.0/blocks.1/attn/proj/Add" [label="[64, 49, 96]", style=solid]; +"169 /layers/layers.1/downsample/norm/Add_1" -> "175 /layers/layers.1/downsample/norm/Add_1/smooth_quant_multiply" [label="[1, 196, 768]", style=solid]; +"170 /layers/layers.1/blocks.1/mlp/fc2/MatMul" -> "176 /layers/layers.1/blocks.1/mlp/fc2/Add" [label="[1, 784, 192]", style=solid]; +"171 /layers/layers.1/blocks.1/Transpose" -> "177 /layers/layers.1/blocks.1/Reshape_2" [label="[1, 4, 4, 7, 7, 192]", style=solid]; +"172 /layers/layers.1/blocks.0/attn/Transpose" -> "178 /layers/layers.1/blocks.0/attn/Gather" [label="[3, 16, 6, 49, 32]", style=solid]; +"172 /layers/layers.1/blocks.0/attn/Transpose" -> "179 /layers/layers.1/blocks.0/attn/Gather_1" [label="[3, 16, 6, 49, 32]", style=solid]; +"172 /layers/layers.1/blocks.0/attn/Transpose" -> "180 /layers/layers.1/blocks.0/attn/Gather_2" [label="[3, 16, 6, 49, 32]", style=solid]; +"173 /layers/layers.0/blocks.1/attn/softmax/Softmax" -> "135 /layers/layers.0/blocks.1/attn/MatMul_1" [label="[64, 3, 49, 49]", style=solid]; +"174 /layers/layers.0/blocks.1/attn/proj/Add" -> "181 /layers/layers.0/blocks.1/Reshape_4" [label="[64, 49, 96]", style=solid]; +"175 /layers/layers.1/downsample/norm/Add_1/smooth_quant_multiply" -> "182 /layers/layers.1/downsample/reduction/MatMul" [label="[1, 196, 768]", style=solid]; +"176 /layers/layers.1/blocks.1/mlp/fc2/Add" -> "100 /layers/layers.1/blocks.1/Add_1" [label="[1, 784, 192]", style=solid]; +"177 /layers/layers.1/blocks.1/Reshape_2" -> "183 /layers/layers.1/blocks.1/Reshape_3" [label="[16, 7, 7, 192]", style=solid]; +"178 /layers/layers.1/blocks.0/attn/Gather" -> "184 /layers/layers.1/blocks.0/attn/Mul" [label="[16, 6, 49, 32]", style=solid]; +"179 /layers/layers.1/blocks.0/attn/Gather_1" -> "185 /layers/layers.1/blocks.0/attn/MatMul" [label="[16, 6, 49, 32]", style=solid]; +"180 /layers/layers.1/blocks.0/attn/Gather_2" -> "186 /layers/layers.1/blocks.0/attn/MatMul_1" [label="[16, 6, 49, 32]", style=solid]; +"181 /layers/layers.0/blocks.1/Reshape_4" -> "187 /layers/layers.0/blocks.1/Reshape_5" [label="[64, 7, 7, 96]", style=solid]; +"182 /layers/layers.1/downsample/reduction/MatMul" -> "188 /layers/layers.2/blocks.0/Add" [label="[1, 196, 384]", style=solid]; +"182 /layers/layers.1/downsample/reduction/MatMul" -> "189 /layers/layers.2/blocks.0/norm1/Div" [label="[1, 196, 384]", style=solid]; +"183 /layers/layers.1/blocks.1/Reshape_3" -> "190 /layers/layers.1/blocks.1/Reshape_3/smooth_quant_multiply" [label="[16, 49, 192]", style=solid]; +"184 /layers/layers.1/blocks.0/attn/Mul" -> "185 /layers/layers.1/blocks.0/attn/MatMul" [label="[16, 6, 49, 32]", style=solid]; +"185 /layers/layers.1/blocks.0/attn/MatMul" -> "191 /layers/layers.1/blocks.0/attn/Add" [label="[16, 6, 49, 49]", style=solid]; +"186 /layers/layers.1/blocks.0/attn/MatMul_1" -> "192 /layers/layers.1/blocks.0/attn/Transpose_2" [label="[16, 6, 49, 32]", style=solid]; +"187 /layers/layers.0/blocks.1/Reshape_5" -> "193 /layers/layers.0/blocks.1/Transpose_1" [label="[1, 8, 8, 7, 7, 96]", style=solid]; +"188 /layers/layers.2/blocks.0/Add" -> "194 /layers/layers.2/blocks.0/Add_1" [label="[1, 196, 384]", style=solid]; +"188 /layers/layers.2/blocks.0/Add" -> "195 /layers/layers.2/blocks.0/norm2/Div" [label="[1, 196, 384]", style=solid]; +"189 /layers/layers.2/blocks.0/norm1/Div" -> "196 /layers/layers.2/blocks.0/norm1/Mul" [label="[1, 196, 384]", style=solid]; +"190 /layers/layers.1/blocks.1/Reshape_3/smooth_quant_multiply" -> "197 /layers/layers.1/blocks.1/attn/qkv/MatMul" [label="[16, 49, 192]", style=solid]; +"191 /layers/layers.1/blocks.0/attn/Add" -> "198 /layers/layers.1/blocks.0/attn/softmax/Softmax" [label="[16, 6, 49, 49]", style=solid]; +"192 /layers/layers.1/blocks.0/attn/Transpose_2" -> "199 /layers/layers.1/blocks.0/attn/Reshape_1" [label="[16, 49, 6, 32]", style=solid]; +"193 /layers/layers.0/blocks.1/Transpose_1" -> "200 /layers/layers.0/blocks.1/Reshape_6" [label="[1, 8, 7, 8, 7, 96]", style=solid]; +"194 /layers/layers.2/blocks.0/Add_1" -> "201 /layers/layers.2/blocks.1/Add" [label="[1, 196, 384]", style=solid]; +"194 /layers/layers.2/blocks.0/Add_1" -> "202 /layers/layers.2/blocks.1/norm1/Div" [label="[1, 196, 384]", style=solid]; +"195 /layers/layers.2/blocks.0/norm2/Div" -> "203 /layers/layers.2/blocks.0/norm2/Mul" [label="[1, 196, 384]", style=solid]; +"196 /layers/layers.2/blocks.0/norm1/Mul" -> "204 /layers/layers.2/blocks.0/norm1/Add_1" [label="[1, 196, 384]", style=solid]; +"197 /layers/layers.1/blocks.1/attn/qkv/MatMul" -> "205 /layers/layers.1/blocks.1/attn/qkv/Add" [label="[16, 49, 576]", style=solid]; +"198 /layers/layers.1/blocks.0/attn/softmax/Softmax" -> "186 /layers/layers.1/blocks.0/attn/MatMul_1" [label="[16, 6, 49, 49]", style=solid]; +"199 /layers/layers.1/blocks.0/attn/Reshape_1" -> "206 /layers/layers.1/blocks.0/attn/Reshape_1/smooth_quant_multiply" [label="[16, 49, 192]", style=solid]; +"200 /layers/layers.0/blocks.1/Reshape_6" -> "207 /layers/layers.0/blocks.1/Slice_4" [label="[1, 56, 56, 96]", style=solid]; +"200 /layers/layers.0/blocks.1/Reshape_6" -> "208 /layers/layers.0/blocks.1/Slice_5" [label="[1, 56, 56, 96]", style=solid]; +"201 /layers/layers.2/blocks.1/Add" -> "209 /layers/layers.2/blocks.1/Add_1" [label="[1, 196, 384]", style=solid]; +"201 /layers/layers.2/blocks.1/Add" -> "210 /layers/layers.2/blocks.1/norm2/Div" [label="[1, 196, 384]", style=solid]; +"202 /layers/layers.2/blocks.1/norm1/Div" -> "211 /layers/layers.2/blocks.1/norm1/Mul" [label="[1, 196, 384]", style=solid]; +"203 /layers/layers.2/blocks.0/norm2/Mul" -> "212 /layers/layers.2/blocks.0/norm2/Add_1" [label="[1, 196, 384]", style=solid]; +"204 /layers/layers.2/blocks.0/norm1/Add_1" -> "213 /layers/layers.2/blocks.0/Reshape_1" [label="[1, 196, 384]", style=solid]; +"205 /layers/layers.1/blocks.1/attn/qkv/Add" -> "214 /layers/layers.1/blocks.1/attn/Reshape" [label="[16, 49, 576]", style=solid]; +"206 /layers/layers.1/blocks.0/attn/Reshape_1/smooth_quant_multiply" -> "215 /layers/layers.1/blocks.0/attn/proj/MatMul" [label="[16, 49, 192]", style=solid]; +"207 /layers/layers.0/blocks.1/Slice_4" -> "216 /layers/layers.0/blocks.1/Concat_2" [label="[1, 3, 56, 96]", style=solid]; +"208 /layers/layers.0/blocks.1/Slice_5" -> "216 /layers/layers.0/blocks.1/Concat_2" [label="[1, 53, 56, 96]", style=solid]; +"209 /layers/layers.2/blocks.1/Add_1" -> "217 /layers/layers.2/blocks.2/Add" [label="[1, 196, 384]", style=solid]; +"209 /layers/layers.2/blocks.1/Add_1" -> "218 /layers/layers.2/blocks.2/norm1/Div" [label="[1, 196, 384]", style=solid]; +"210 /layers/layers.2/blocks.1/norm2/Div" -> "219 /layers/layers.2/blocks.1/norm2/Mul" [label="[1, 196, 384]", style=solid]; +"211 /layers/layers.2/blocks.1/norm1/Mul" -> "220 /layers/layers.2/blocks.1/norm1/Add_1" [label="[1, 196, 384]", style=solid]; +"212 /layers/layers.2/blocks.0/norm2/Add_1" -> "221 /layers/layers.2/blocks.0/norm2/Add_1/smooth_quant_multiply" [label="[1, 196, 384]", style=solid]; +"213 /layers/layers.2/blocks.0/Reshape_1" -> "222 /layers/layers.2/blocks.0/Transpose" [label="[1, 2, 7, 2, 7, 384]", style=solid]; +"214 /layers/layers.1/blocks.1/attn/Reshape" -> "223 /layers/layers.1/blocks.1/attn/Transpose" [label="[16, 49, 3, 6, 32]", style=solid]; +"215 /layers/layers.1/blocks.0/attn/proj/MatMul" -> "224 /layers/layers.1/blocks.0/attn/proj/Add" [label="[16, 49, 192]", style=solid]; +"216 /layers/layers.0/blocks.1/Concat_2" -> "225 /layers/layers.0/blocks.1/Slice_6" [label="[1, 56, 56, 96]", style=solid]; +"216 /layers/layers.0/blocks.1/Concat_2" -> "226 /layers/layers.0/blocks.1/Slice_7" [label="[1, 56, 56, 96]", style=solid]; +"217 /layers/layers.2/blocks.2/Add" -> "227 /layers/layers.2/blocks.2/Add_1" [label="[1, 196, 384]", style=solid]; +"217 /layers/layers.2/blocks.2/Add" -> "228 /layers/layers.2/blocks.2/norm2/Div" [label="[1, 196, 384]", style=solid]; +"218 /layers/layers.2/blocks.2/norm1/Div" -> "229 /layers/layers.2/blocks.2/norm1/Mul" [label="[1, 196, 384]", style=solid]; +"219 /layers/layers.2/blocks.1/norm2/Mul" -> "230 /layers/layers.2/blocks.1/norm2/Add_1" [label="[1, 196, 384]", style=solid]; +"220 /layers/layers.2/blocks.1/norm1/Add_1" -> "231 /layers/layers.2/blocks.1/Reshape" [label="[1, 196, 384]", style=solid]; +"221 /layers/layers.2/blocks.0/norm2/Add_1/smooth_quant_multiply" -> "232 /layers/layers.2/blocks.0/mlp/fc1/MatMul" [label="[1, 196, 384]", style=solid]; +"222 /layers/layers.2/blocks.0/Transpose" -> "233 /layers/layers.2/blocks.0/Reshape_2" [label="[1, 2, 2, 7, 7, 384]", style=solid]; +"223 /layers/layers.1/blocks.1/attn/Transpose" -> "234 /layers/layers.1/blocks.1/attn/Gather" [label="[3, 16, 6, 49, 32]", style=solid]; +"223 /layers/layers.1/blocks.1/attn/Transpose" -> "235 /layers/layers.1/blocks.1/attn/Gather_1" [label="[3, 16, 6, 49, 32]", style=solid]; +"223 /layers/layers.1/blocks.1/attn/Transpose" -> "236 /layers/layers.1/blocks.1/attn/Gather_2" [label="[3, 16, 6, 49, 32]", style=solid]; +"224 /layers/layers.1/blocks.0/attn/proj/Add" -> "237 /layers/layers.1/blocks.0/Reshape_4" [label="[16, 49, 192]", style=solid]; +"225 /layers/layers.0/blocks.1/Slice_6" -> "238 /layers/layers.0/blocks.1/Concat_3" [label="[1, 56, 3, 96]", style=solid]; +"226 /layers/layers.0/blocks.1/Slice_7" -> "238 /layers/layers.0/blocks.1/Concat_3" [label="[1, 56, 53, 96]", style=solid]; +"227 /layers/layers.2/blocks.2/Add_1" -> "239 /layers/layers.2/blocks.3/Add" [label="[1, 196, 384]", style=solid]; +"227 /layers/layers.2/blocks.2/Add_1" -> "240 /layers/layers.2/blocks.3/norm1/Div" [label="[1, 196, 384]", style=solid]; +"228 /layers/layers.2/blocks.2/norm2/Div" -> "241 /layers/layers.2/blocks.2/norm2/Mul" [label="[1, 196, 384]", style=solid]; +"229 /layers/layers.2/blocks.2/norm1/Mul" -> "242 /layers/layers.2/blocks.2/norm1/Add_1" [label="[1, 196, 384]", style=solid]; +"230 /layers/layers.2/blocks.1/norm2/Add_1" -> "243 /layers/layers.2/blocks.1/norm2/Add_1/smooth_quant_multiply" [label="[1, 196, 384]", style=solid]; +"231 /layers/layers.2/blocks.1/Reshape" -> "244 /layers/layers.2/blocks.1/Slice" [label="[1, 14, 14, 384]", style=solid]; +"231 /layers/layers.2/blocks.1/Reshape" -> "245 /layers/layers.2/blocks.1/Slice_1" [label="[1, 14, 14, 384]", style=solid]; +"232 /layers/layers.2/blocks.0/mlp/fc1/MatMul" -> "246 /layers/layers.2/blocks.0/mlp/fc1/Add" [label="[1, 196, 1536]", style=solid]; +"233 /layers/layers.2/blocks.0/Reshape_2" -> "247 /layers/layers.2/blocks.0/Reshape_3" [label="[4, 7, 7, 384]", style=solid]; +"234 /layers/layers.1/blocks.1/attn/Gather" -> "248 /layers/layers.1/blocks.1/attn/Mul" [label="[16, 6, 49, 32]", style=solid]; +"235 /layers/layers.1/blocks.1/attn/Gather_1" -> "249 /layers/layers.1/blocks.1/attn/MatMul" [label="[16, 6, 49, 32]", style=solid]; +"236 /layers/layers.1/blocks.1/attn/Gather_2" -> "250 /layers/layers.1/blocks.1/attn/MatMul_1" [label="[16, 6, 49, 32]", style=solid]; +"237 /layers/layers.1/blocks.0/Reshape_4" -> "251 /layers/layers.1/blocks.0/Reshape_5" [label="[16, 7, 7, 192]", style=solid]; +"238 /layers/layers.0/blocks.1/Concat_3" -> "252 /layers/layers.0/blocks.1/Reshape_7" [label="[1, 56, 56, 96]", style=solid]; +"239 /layers/layers.2/blocks.3/Add" -> "253 /layers/layers.2/blocks.3/Add_1" [label="[1, 196, 384]", style=solid]; +"239 /layers/layers.2/blocks.3/Add" -> "254 /layers/layers.2/blocks.3/norm2/Div" [label="[1, 196, 384]", style=solid]; +"240 /layers/layers.2/blocks.3/norm1/Div" -> "255 /layers/layers.2/blocks.3/norm1/Mul" [label="[1, 196, 384]", style=solid]; +"241 /layers/layers.2/blocks.2/norm2/Mul" -> "256 /layers/layers.2/blocks.2/norm2/Add_1" [label="[1, 196, 384]", style=solid]; +"242 /layers/layers.2/blocks.2/norm1/Add_1" -> "257 /layers/layers.2/blocks.2/Reshape_1" [label="[1, 196, 384]", style=solid]; +"243 /layers/layers.2/blocks.1/norm2/Add_1/smooth_quant_multiply" -> "258 /layers/layers.2/blocks.1/mlp/fc1/MatMul" [label="[1, 196, 384]", style=solid]; +"244 /layers/layers.2/blocks.1/Slice" -> "259 /layers/layers.2/blocks.1/Concat" [label="[1, 11, 14, 384]", style=solid]; +"245 /layers/layers.2/blocks.1/Slice_1" -> "259 /layers/layers.2/blocks.1/Concat" [label="[1, 3, 14, 384]", style=solid]; +"246 /layers/layers.2/blocks.0/mlp/fc1/Add" -> "260 /layers/layers.2/blocks.0/mlp/act/Mul_1" [label="[1, 196, 1536]", style=solid]; +"247 /layers/layers.2/blocks.0/Reshape_3" -> "261 /layers/layers.2/blocks.0/Reshape_3/smooth_quant_multiply" [label="[4, 49, 384]", style=solid]; +"248 /layers/layers.1/blocks.1/attn/Mul" -> "249 /layers/layers.1/blocks.1/attn/MatMul" [label="[16, 6, 49, 32]", style=solid]; +"249 /layers/layers.1/blocks.1/attn/MatMul" -> "262 /layers/layers.1/blocks.1/attn/Add" [label="[16, 6, 49, 49]", style=solid]; +"250 /layers/layers.1/blocks.1/attn/MatMul_1" -> "263 /layers/layers.1/blocks.1/attn/Transpose_2" [label="[16, 6, 49, 32]", style=solid]; +"251 /layers/layers.1/blocks.0/Reshape_5" -> "264 /layers/layers.1/blocks.0/Transpose_1" [label="[1, 4, 4, 7, 7, 192]", style=solid]; +"252 /layers/layers.0/blocks.1/Reshape_7" -> "18 /layers/layers.0/blocks.1/Add" [label="[1, 3136, 96]", style=solid]; +"253 /layers/layers.2/blocks.3/Add_1" -> "265 /layers/layers.2/blocks.4/Add" [label="[1, 196, 384]", style=solid]; +"253 /layers/layers.2/blocks.3/Add_1" -> "266 /layers/layers.2/blocks.4/norm1/Div" [label="[1, 196, 384]", style=solid]; +"254 /layers/layers.2/blocks.3/norm2/Div" -> "267 /layers/layers.2/blocks.3/norm2/Mul" [label="[1, 196, 384]", style=solid]; +"255 /layers/layers.2/blocks.3/norm1/Mul" -> "268 /layers/layers.2/blocks.3/norm1/Add_1" [label="[1, 196, 384]", style=solid]; +"256 /layers/layers.2/blocks.2/norm2/Add_1" -> "269 /layers/layers.2/blocks.2/norm2/Add_1/smooth_quant_multiply" [label="[1, 196, 384]", style=solid]; +"257 /layers/layers.2/blocks.2/Reshape_1" -> "270 /layers/layers.2/blocks.2/Transpose" [label="[1, 2, 7, 2, 7, 384]", style=solid]; +"258 /layers/layers.2/blocks.1/mlp/fc1/MatMul" -> "271 /layers/layers.2/blocks.1/mlp/fc1/Add" [label="[1, 196, 1536]", style=solid]; +"259 /layers/layers.2/blocks.1/Concat" -> "272 /layers/layers.2/blocks.1/Slice_2" [label="[1, 14, 14, 384]", style=solid]; +"259 /layers/layers.2/blocks.1/Concat" -> "273 /layers/layers.2/blocks.1/Slice_3" [label="[1, 14, 14, 384]", style=solid]; +"260 /layers/layers.2/blocks.0/mlp/act/Mul_1" -> "274 /layers/layers.2/blocks.0/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 196, 1536]", style=solid]; +"261 /layers/layers.2/blocks.0/Reshape_3/smooth_quant_multiply" -> "275 /layers/layers.2/blocks.0/attn/qkv/MatMul" [label="[4, 49, 384]", style=solid]; +"262 /layers/layers.1/blocks.1/attn/Add" -> "276 /layers/layers.1/blocks.1/attn/Reshape_1" [label="[16, 6, 49, 49]", style=solid]; +"263 /layers/layers.1/blocks.1/attn/Transpose_2" -> "277 /layers/layers.1/blocks.1/attn/Reshape_3" [label="[16, 49, 6, 32]", style=solid]; +"264 /layers/layers.1/blocks.0/Transpose_1" -> "278 /layers/layers.1/blocks.0/Reshape_6" [label="[1, 4, 7, 4, 7, 192]", style=solid]; +"265 /layers/layers.2/blocks.4/Add" -> "279 /layers/layers.2/blocks.4/Add_1" [label="[1, 196, 384]", style=solid]; +"265 /layers/layers.2/blocks.4/Add" -> "280 /layers/layers.2/blocks.4/norm2/Div" [label="[1, 196, 384]", style=solid]; +"266 /layers/layers.2/blocks.4/norm1/Div" -> "281 /layers/layers.2/blocks.4/norm1/Mul" [label="[1, 196, 384]", style=solid]; +"267 /layers/layers.2/blocks.3/norm2/Mul" -> "282 /layers/layers.2/blocks.3/norm2/Add_1" [label="[1, 196, 384]", style=solid]; +"268 /layers/layers.2/blocks.3/norm1/Add_1" -> "283 /layers/layers.2/blocks.3/Reshape" [label="[1, 196, 384]", style=solid]; +"269 /layers/layers.2/blocks.2/norm2/Add_1/smooth_quant_multiply" -> "284 /layers/layers.2/blocks.2/mlp/fc1/MatMul" [label="[1, 196, 384]", style=solid]; +"270 /layers/layers.2/blocks.2/Transpose" -> "285 /layers/layers.2/blocks.2/Reshape_2" [label="[1, 2, 2, 7, 7, 384]", style=solid]; +"271 /layers/layers.2/blocks.1/mlp/fc1/Add" -> "286 /layers/layers.2/blocks.1/mlp/act/Mul_1" [label="[1, 196, 1536]", style=solid]; +"272 /layers/layers.2/blocks.1/Slice_2" -> "287 /layers/layers.2/blocks.1/Concat_1" [label="[1, 14, 11, 384]", style=solid]; +"273 /layers/layers.2/blocks.1/Slice_3" -> "287 /layers/layers.2/blocks.1/Concat_1" [label="[1, 14, 3, 384]", style=solid]; +"274 /layers/layers.2/blocks.0/mlp/act/Mul_1/smooth_quant_multiply" -> "288 /layers/layers.2/blocks.0/mlp/fc2/MatMul" [label="[1, 196, 1536]", style=solid]; +"275 /layers/layers.2/blocks.0/attn/qkv/MatMul" -> "289 /layers/layers.2/blocks.0/attn/qkv/Add" [label="[4, 49, 1152]", style=solid]; +"276 /layers/layers.1/blocks.1/attn/Reshape_1" -> "290 /layers/layers.1/blocks.1/attn/Add_1" [label="[1, 16, 6, 49, 49]", style=solid]; +"277 /layers/layers.1/blocks.1/attn/Reshape_3" -> "291 /layers/layers.1/blocks.1/attn/Reshape_3/smooth_quant_multiply" [label="[16, 49, 192]", style=solid]; +"278 /layers/layers.1/blocks.0/Reshape_6" -> "292 /layers/layers.1/blocks.0/Reshape_7" [label="[1, 28, 28, 192]", style=solid]; +"279 /layers/layers.2/blocks.4/Add_1" -> "293 /layers/layers.2/blocks.5/Add" [label="[1, 196, 384]", style=solid]; +"279 /layers/layers.2/blocks.4/Add_1" -> "294 /layers/layers.2/blocks.5/norm1/Div" [label="[1, 196, 384]", style=solid]; +"280 /layers/layers.2/blocks.4/norm2/Div" -> "295 /layers/layers.2/blocks.4/norm2/Mul" [label="[1, 196, 384]", style=solid]; +"281 /layers/layers.2/blocks.4/norm1/Mul" -> "296 /layers/layers.2/blocks.4/norm1/Add_1" [label="[1, 196, 384]", style=solid]; +"282 /layers/layers.2/blocks.3/norm2/Add_1" -> "297 /layers/layers.2/blocks.3/norm2/Add_1/smooth_quant_multiply" [label="[1, 196, 384]", style=solid]; +"283 /layers/layers.2/blocks.3/Reshape" -> "298 /layers/layers.2/blocks.3/Slice" [label="[1, 14, 14, 384]", style=solid]; +"283 /layers/layers.2/blocks.3/Reshape" -> "299 /layers/layers.2/blocks.3/Slice_1" [label="[1, 14, 14, 384]", style=solid]; +"284 /layers/layers.2/blocks.2/mlp/fc1/MatMul" -> "300 /layers/layers.2/blocks.2/mlp/fc1/Add" [label="[1, 196, 1536]", style=solid]; +"285 /layers/layers.2/blocks.2/Reshape_2" -> "301 /layers/layers.2/blocks.2/Reshape_3" [label="[4, 7, 7, 384]", style=solid]; +"286 /layers/layers.2/blocks.1/mlp/act/Mul_1" -> "302 /layers/layers.2/blocks.1/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 196, 1536]", style=solid]; +"287 /layers/layers.2/blocks.1/Concat_1" -> "303 /layers/layers.2/blocks.1/Reshape_1" [label="[1, 14, 14, 384]", style=solid]; +"288 /layers/layers.2/blocks.0/mlp/fc2/MatMul" -> "304 /layers/layers.2/blocks.0/mlp/fc2/Add" [label="[1, 196, 384]", style=solid]; +"289 /layers/layers.2/blocks.0/attn/qkv/Add" -> "305 /layers/layers.2/blocks.0/attn/Reshape" [label="[4, 49, 1152]", style=solid]; +"290 /layers/layers.1/blocks.1/attn/Add_1" -> "306 /layers/layers.1/blocks.1/attn/Reshape_2" [label="[1, 16, 6, 49, 49]", style=solid]; +"291 /layers/layers.1/blocks.1/attn/Reshape_3/smooth_quant_multiply" -> "307 /layers/layers.1/blocks.1/attn/proj/MatMul" [label="[16, 49, 192]", style=solid]; +"292 /layers/layers.1/blocks.0/Reshape_7" -> "83 /layers/layers.1/blocks.0/Add" [label="[1, 784, 192]", style=solid]; +"293 /layers/layers.2/blocks.5/Add" -> "308 /layers/layers.2/blocks.5/Add_1" [label="[1, 196, 384]", style=solid]; +"293 /layers/layers.2/blocks.5/Add" -> "309 /layers/layers.2/blocks.5/norm2/Div" [label="[1, 196, 384]", style=solid]; +"294 /layers/layers.2/blocks.5/norm1/Div" -> "310 /layers/layers.2/blocks.5/norm1/Mul" [label="[1, 196, 384]", style=solid]; +"295 /layers/layers.2/blocks.4/norm2/Mul" -> "311 /layers/layers.2/blocks.4/norm2/Add_1" [label="[1, 196, 384]", style=solid]; +"296 /layers/layers.2/blocks.4/norm1/Add_1" -> "312 /layers/layers.2/blocks.4/Reshape_1" [label="[1, 196, 384]", style=solid]; +"297 /layers/layers.2/blocks.3/norm2/Add_1/smooth_quant_multiply" -> "313 /layers/layers.2/blocks.3/mlp/fc1/MatMul" [label="[1, 196, 384]", style=solid]; +"298 /layers/layers.2/blocks.3/Slice" -> "314 /layers/layers.2/blocks.3/Concat" [label="[1, 11, 14, 384]", style=solid]; +"299 /layers/layers.2/blocks.3/Slice_1" -> "314 /layers/layers.2/blocks.3/Concat" [label="[1, 3, 14, 384]", style=solid]; +"300 /layers/layers.2/blocks.2/mlp/fc1/Add" -> "315 /layers/layers.2/blocks.2/mlp/act/Mul_1" [label="[1, 196, 1536]", style=solid]; +"301 /layers/layers.2/blocks.2/Reshape_3" -> "316 /layers/layers.2/blocks.2/Reshape_3/smooth_quant_multiply" [label="[4, 49, 384]", style=solid]; +"302 /layers/layers.2/blocks.1/mlp/act/Mul_1/smooth_quant_multiply" -> "317 /layers/layers.2/blocks.1/mlp/fc2/MatMul" [label="[1, 196, 1536]", style=solid]; +"303 /layers/layers.2/blocks.1/Reshape_1" -> "318 /layers/layers.2/blocks.1/Transpose" [label="[1, 2, 7, 2, 7, 384]", style=solid]; +"304 /layers/layers.2/blocks.0/mlp/fc2/Add" -> "194 /layers/layers.2/blocks.0/Add_1" [label="[1, 196, 384]", style=solid]; +"305 /layers/layers.2/blocks.0/attn/Reshape" -> "319 /layers/layers.2/blocks.0/attn/Transpose" [label="[4, 49, 3, 12, 32]", style=solid]; +"306 /layers/layers.1/blocks.1/attn/Reshape_2" -> "320 /layers/layers.1/blocks.1/attn/softmax/Softmax" [label="[16, 6, 49, 49]", style=solid]; +"307 /layers/layers.1/blocks.1/attn/proj/MatMul" -> "321 /layers/layers.1/blocks.1/attn/proj/Add" [label="[16, 49, 192]", style=solid]; +"308 /layers/layers.2/blocks.5/Add_1" -> "322 /layers/layers.2/downsample/Reshape" [label="[1, 196, 384]", style=solid]; +"309 /layers/layers.2/blocks.5/norm2/Div" -> "323 /layers/layers.2/blocks.5/norm2/Mul" [label="[1, 196, 384]", style=solid]; +"310 /layers/layers.2/blocks.5/norm1/Mul" -> "324 /layers/layers.2/blocks.5/norm1/Add_1" [label="[1, 196, 384]", style=solid]; +"311 /layers/layers.2/blocks.4/norm2/Add_1" -> "325 /layers/layers.2/blocks.4/norm2/Add_1/smooth_quant_multiply" [label="[1, 196, 384]", style=solid]; +"312 /layers/layers.2/blocks.4/Reshape_1" -> "326 /layers/layers.2/blocks.4/Transpose" [label="[1, 2, 7, 2, 7, 384]", style=solid]; +"313 /layers/layers.2/blocks.3/mlp/fc1/MatMul" -> "327 /layers/layers.2/blocks.3/mlp/fc1/Add" [label="[1, 196, 1536]", style=solid]; +"314 /layers/layers.2/blocks.3/Concat" -> "328 /layers/layers.2/blocks.3/Slice_2" [label="[1, 14, 14, 384]", style=solid]; +"314 /layers/layers.2/blocks.3/Concat" -> "329 /layers/layers.2/blocks.3/Slice_3" [label="[1, 14, 14, 384]", style=solid]; +"315 /layers/layers.2/blocks.2/mlp/act/Mul_1" -> "330 /layers/layers.2/blocks.2/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 196, 1536]", style=solid]; +"316 /layers/layers.2/blocks.2/Reshape_3/smooth_quant_multiply" -> "331 /layers/layers.2/blocks.2/attn/qkv/MatMul" [label="[4, 49, 384]", style=solid]; +"317 /layers/layers.2/blocks.1/mlp/fc2/MatMul" -> "332 /layers/layers.2/blocks.1/mlp/fc2/Add" [label="[1, 196, 384]", style=solid]; +"318 /layers/layers.2/blocks.1/Transpose" -> "333 /layers/layers.2/blocks.1/Reshape_2" [label="[1, 2, 2, 7, 7, 384]", style=solid]; +"319 /layers/layers.2/blocks.0/attn/Transpose" -> "334 /layers/layers.2/blocks.0/attn/Gather" [label="[3, 4, 12, 49, 32]", style=solid]; +"319 /layers/layers.2/blocks.0/attn/Transpose" -> "335 /layers/layers.2/blocks.0/attn/Gather_1" [label="[3, 4, 12, 49, 32]", style=solid]; +"319 /layers/layers.2/blocks.0/attn/Transpose" -> "336 /layers/layers.2/blocks.0/attn/Gather_2" [label="[3, 4, 12, 49, 32]", style=solid]; +"320 /layers/layers.1/blocks.1/attn/softmax/Softmax" -> "250 /layers/layers.1/blocks.1/attn/MatMul_1" [label="[16, 6, 49, 49]", style=solid]; +"321 /layers/layers.1/blocks.1/attn/proj/Add" -> "337 /layers/layers.1/blocks.1/Reshape_4" [label="[16, 49, 192]", style=solid]; +"322 /layers/layers.2/downsample/Reshape" -> "338 /layers/layers.2/downsample/Slice" [label="[1, 14, 14, 384]", style=solid]; +"322 /layers/layers.2/downsample/Reshape" -> "339 /layers/layers.2/downsample/Slice_2" [label="[1, 14, 14, 384]", style=solid]; +"323 /layers/layers.2/blocks.5/norm2/Mul" -> "340 /layers/layers.2/blocks.5/norm2/Add_1" [label="[1, 196, 384]", style=solid]; +"324 /layers/layers.2/blocks.5/norm1/Add_1" -> "341 /layers/layers.2/blocks.5/Reshape" [label="[1, 196, 384]", style=solid]; +"325 /layers/layers.2/blocks.4/norm2/Add_1/smooth_quant_multiply" -> "342 /layers/layers.2/blocks.4/mlp/fc1/MatMul" [label="[1, 196, 384]", style=solid]; +"326 /layers/layers.2/blocks.4/Transpose" -> "343 /layers/layers.2/blocks.4/Reshape_2" [label="[1, 2, 2, 7, 7, 384]", style=solid]; +"327 /layers/layers.2/blocks.3/mlp/fc1/Add" -> "344 /layers/layers.2/blocks.3/mlp/act/Mul_1" [label="[1, 196, 1536]", style=solid]; +"328 /layers/layers.2/blocks.3/Slice_2" -> "345 /layers/layers.2/blocks.3/Concat_1" [label="[1, 14, 11, 384]", style=solid]; +"329 /layers/layers.2/blocks.3/Slice_3" -> "345 /layers/layers.2/blocks.3/Concat_1" [label="[1, 14, 3, 384]", style=solid]; +"330 /layers/layers.2/blocks.2/mlp/act/Mul_1/smooth_quant_multiply" -> "346 /layers/layers.2/blocks.2/mlp/fc2/MatMul" [label="[1, 196, 1536]", style=solid]; +"331 /layers/layers.2/blocks.2/attn/qkv/MatMul" -> "347 /layers/layers.2/blocks.2/attn/qkv/Add" [label="[4, 49, 1152]", style=solid]; +"332 /layers/layers.2/blocks.1/mlp/fc2/Add" -> "209 /layers/layers.2/blocks.1/Add_1" [label="[1, 196, 384]", style=solid]; +"333 /layers/layers.2/blocks.1/Reshape_2" -> "348 /layers/layers.2/blocks.1/Reshape_3" [label="[4, 7, 7, 384]", style=solid]; +"334 /layers/layers.2/blocks.0/attn/Gather" -> "349 /layers/layers.2/blocks.0/attn/Mul" [label="[4, 12, 49, 32]", style=solid]; +"335 /layers/layers.2/blocks.0/attn/Gather_1" -> "350 /layers/layers.2/blocks.0/attn/MatMul" [label="[4, 12, 49, 32]", style=solid]; +"336 /layers/layers.2/blocks.0/attn/Gather_2" -> "351 /layers/layers.2/blocks.0/attn/MatMul_1" [label="[4, 12, 49, 32]", style=solid]; +"337 /layers/layers.1/blocks.1/Reshape_4" -> "352 /layers/layers.1/blocks.1/Reshape_5" [label="[16, 7, 7, 192]", style=solid]; +"338 /layers/layers.2/downsample/Slice" -> "353 /layers/layers.2/downsample/Slice_1" [label="[1, 7, 14, 384]", style=solid]; +"338 /layers/layers.2/downsample/Slice" -> "354 /layers/layers.2/downsample/Slice_4" [label="[1, 7, 14, 384]", style=solid]; +"339 /layers/layers.2/downsample/Slice_2" -> "355 /layers/layers.2/downsample/Slice_3" [label="[1, 7, 14, 384]", style=solid]; +"339 /layers/layers.2/downsample/Slice_2" -> "356 /layers/layers.2/downsample/Slice_5" [label="[1, 7, 14, 384]", style=solid]; +"340 /layers/layers.2/blocks.5/norm2/Add_1" -> "357 /layers/layers.2/blocks.5/norm2/Add_1/smooth_quant_multiply" [label="[1, 196, 384]", style=solid]; +"341 /layers/layers.2/blocks.5/Reshape" -> "358 /layers/layers.2/blocks.5/Slice" [label="[1, 14, 14, 384]", style=solid]; +"341 /layers/layers.2/blocks.5/Reshape" -> "359 /layers/layers.2/blocks.5/Slice_1" [label="[1, 14, 14, 384]", style=solid]; +"342 /layers/layers.2/blocks.4/mlp/fc1/MatMul" -> "360 /layers/layers.2/blocks.4/mlp/fc1/Add" [label="[1, 196, 1536]", style=solid]; +"343 /layers/layers.2/blocks.4/Reshape_2" -> "361 /layers/layers.2/blocks.4/Reshape_3" [label="[4, 7, 7, 384]", style=solid]; +"344 /layers/layers.2/blocks.3/mlp/act/Mul_1" -> "362 /layers/layers.2/blocks.3/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 196, 1536]", style=solid]; +"345 /layers/layers.2/blocks.3/Concat_1" -> "363 /layers/layers.2/blocks.3/Reshape_1" [label="[1, 14, 14, 384]", style=solid]; +"346 /layers/layers.2/blocks.2/mlp/fc2/MatMul" -> "364 /layers/layers.2/blocks.2/mlp/fc2/Add" [label="[1, 196, 384]", style=solid]; +"347 /layers/layers.2/blocks.2/attn/qkv/Add" -> "365 /layers/layers.2/blocks.2/attn/Reshape" [label="[4, 49, 1152]", style=solid]; +"348 /layers/layers.2/blocks.1/Reshape_3" -> "366 /layers/layers.2/blocks.1/Reshape_3/smooth_quant_multiply" [label="[4, 49, 384]", style=solid]; +"349 /layers/layers.2/blocks.0/attn/Mul" -> "350 /layers/layers.2/blocks.0/attn/MatMul" [label="[4, 12, 49, 32]", style=solid]; +"350 /layers/layers.2/blocks.0/attn/MatMul" -> "367 /layers/layers.2/blocks.0/attn/Add" [label="[4, 12, 49, 49]", style=solid]; +"351 /layers/layers.2/blocks.0/attn/MatMul_1" -> "368 /layers/layers.2/blocks.0/attn/Transpose_2" [label="[4, 12, 49, 32]", style=solid]; +"352 /layers/layers.1/blocks.1/Reshape_5" -> "369 /layers/layers.1/blocks.1/Transpose_1" [label="[1, 4, 4, 7, 7, 192]", style=solid]; +"353 /layers/layers.2/downsample/Slice_1" -> "370 /layers/layers.2/downsample/Concat" [label="[1, 7, 7, 384]", style=solid]; +"354 /layers/layers.2/downsample/Slice_4" -> "370 /layers/layers.2/downsample/Concat" [label="[1, 7, 7, 384]", style=solid]; +"355 /layers/layers.2/downsample/Slice_3" -> "370 /layers/layers.2/downsample/Concat" [label="[1, 7, 7, 384]", style=solid]; +"356 /layers/layers.2/downsample/Slice_5" -> "370 /layers/layers.2/downsample/Concat" [label="[1, 7, 7, 384]", style=solid]; +"357 /layers/layers.2/blocks.5/norm2/Add_1/smooth_quant_multiply" -> "371 /layers/layers.2/blocks.5/mlp/fc1/MatMul" [label="[1, 196, 384]", style=solid]; +"358 /layers/layers.2/blocks.5/Slice" -> "372 /layers/layers.2/blocks.5/Concat" [label="[1, 11, 14, 384]", style=solid]; +"359 /layers/layers.2/blocks.5/Slice_1" -> "372 /layers/layers.2/blocks.5/Concat" [label="[1, 3, 14, 384]", style=solid]; +"360 /layers/layers.2/blocks.4/mlp/fc1/Add" -> "373 /layers/layers.2/blocks.4/mlp/act/Mul_1" [label="[1, 196, 1536]", style=solid]; +"361 /layers/layers.2/blocks.4/Reshape_3" -> "374 /layers/layers.2/blocks.4/Reshape_3/smooth_quant_multiply" [label="[4, 49, 384]", style=solid]; +"362 /layers/layers.2/blocks.3/mlp/act/Mul_1/smooth_quant_multiply" -> "375 /layers/layers.2/blocks.3/mlp/fc2/MatMul" [label="[1, 196, 1536]", style=solid]; +"363 /layers/layers.2/blocks.3/Reshape_1" -> "376 /layers/layers.2/blocks.3/Transpose" [label="[1, 2, 7, 2, 7, 384]", style=solid]; +"364 /layers/layers.2/blocks.2/mlp/fc2/Add" -> "227 /layers/layers.2/blocks.2/Add_1" [label="[1, 196, 384]", style=solid]; +"365 /layers/layers.2/blocks.2/attn/Reshape" -> "377 /layers/layers.2/blocks.2/attn/Transpose" [label="[4, 49, 3, 12, 32]", style=solid]; +"366 /layers/layers.2/blocks.1/Reshape_3/smooth_quant_multiply" -> "378 /layers/layers.2/blocks.1/attn/qkv/MatMul" [label="[4, 49, 384]", style=solid]; +"367 /layers/layers.2/blocks.0/attn/Add" -> "379 /layers/layers.2/blocks.0/attn/softmax/Softmax" [label="[4, 12, 49, 49]", style=solid]; +"368 /layers/layers.2/blocks.0/attn/Transpose_2" -> "380 /layers/layers.2/blocks.0/attn/Reshape_1" [label="[4, 49, 12, 32]", style=solid]; +"369 /layers/layers.1/blocks.1/Transpose_1" -> "381 /layers/layers.1/blocks.1/Reshape_6" [label="[1, 4, 7, 4, 7, 192]", style=solid]; +"370 /layers/layers.2/downsample/Concat" -> "382 /layers/layers.2/downsample/Reshape_1" [label="[1, 7, 7, 1536]", style=solid]; +"371 /layers/layers.2/blocks.5/mlp/fc1/MatMul" -> "383 /layers/layers.2/blocks.5/mlp/fc1/Add" [label="[1, 196, 1536]", style=solid]; +"372 /layers/layers.2/blocks.5/Concat" -> "384 /layers/layers.2/blocks.5/Slice_2" [label="[1, 14, 14, 384]", style=solid]; +"372 /layers/layers.2/blocks.5/Concat" -> "385 /layers/layers.2/blocks.5/Slice_3" [label="[1, 14, 14, 384]", style=solid]; +"373 /layers/layers.2/blocks.4/mlp/act/Mul_1" -> "386 /layers/layers.2/blocks.4/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 196, 1536]", style=solid]; +"374 /layers/layers.2/blocks.4/Reshape_3/smooth_quant_multiply" -> "387 /layers/layers.2/blocks.4/attn/qkv/MatMul" [label="[4, 49, 384]", style=solid]; +"375 /layers/layers.2/blocks.3/mlp/fc2/MatMul" -> "388 /layers/layers.2/blocks.3/mlp/fc2/Add" [label="[1, 196, 384]", style=solid]; +"376 /layers/layers.2/blocks.3/Transpose" -> "389 /layers/layers.2/blocks.3/Reshape_2" [label="[1, 2, 2, 7, 7, 384]", style=solid]; +"377 /layers/layers.2/blocks.2/attn/Transpose" -> "390 /layers/layers.2/blocks.2/attn/Gather" [label="[3, 4, 12, 49, 32]", style=solid]; +"377 /layers/layers.2/blocks.2/attn/Transpose" -> "391 /layers/layers.2/blocks.2/attn/Gather_1" [label="[3, 4, 12, 49, 32]", style=solid]; +"377 /layers/layers.2/blocks.2/attn/Transpose" -> "392 /layers/layers.2/blocks.2/attn/Gather_2" [label="[3, 4, 12, 49, 32]", style=solid]; +"378 /layers/layers.2/blocks.1/attn/qkv/MatMul" -> "393 /layers/layers.2/blocks.1/attn/qkv/Add" [label="[4, 49, 1152]", style=solid]; +"379 /layers/layers.2/blocks.0/attn/softmax/Softmax" -> "351 /layers/layers.2/blocks.0/attn/MatMul_1" [label="[4, 12, 49, 49]", style=solid]; +"380 /layers/layers.2/blocks.0/attn/Reshape_1" -> "394 /layers/layers.2/blocks.0/attn/Reshape_1/smooth_quant_multiply" [label="[4, 49, 384]", style=solid]; +"381 /layers/layers.1/blocks.1/Reshape_6" -> "395 /layers/layers.1/blocks.1/Slice_4" [label="[1, 28, 28, 192]", style=solid]; +"381 /layers/layers.1/blocks.1/Reshape_6" -> "396 /layers/layers.1/blocks.1/Slice_5" [label="[1, 28, 28, 192]", style=solid]; +"382 /layers/layers.2/downsample/Reshape_1" -> "397 /layers/layers.2/downsample/norm/Div" [label="[1, 49, 1536]", style=solid]; +"383 /layers/layers.2/blocks.5/mlp/fc1/Add" -> "398 /layers/layers.2/blocks.5/mlp/act/Mul_1" [label="[1, 196, 1536]", style=solid]; +"384 /layers/layers.2/blocks.5/Slice_2" -> "399 /layers/layers.2/blocks.5/Concat_1" [label="[1, 14, 11, 384]", style=solid]; +"385 /layers/layers.2/blocks.5/Slice_3" -> "399 /layers/layers.2/blocks.5/Concat_1" [label="[1, 14, 3, 384]", style=solid]; +"386 /layers/layers.2/blocks.4/mlp/act/Mul_1/smooth_quant_multiply" -> "400 /layers/layers.2/blocks.4/mlp/fc2/MatMul" [label="[1, 196, 1536]", style=solid]; +"387 /layers/layers.2/blocks.4/attn/qkv/MatMul" -> "401 /layers/layers.2/blocks.4/attn/qkv/Add" [label="[4, 49, 1152]", style=solid]; +"388 /layers/layers.2/blocks.3/mlp/fc2/Add" -> "253 /layers/layers.2/blocks.3/Add_1" [label="[1, 196, 384]", style=solid]; +"389 /layers/layers.2/blocks.3/Reshape_2" -> "402 /layers/layers.2/blocks.3/Reshape_3" [label="[4, 7, 7, 384]", style=solid]; +"390 /layers/layers.2/blocks.2/attn/Gather" -> "403 /layers/layers.2/blocks.2/attn/Mul" [label="[4, 12, 49, 32]", style=solid]; +"391 /layers/layers.2/blocks.2/attn/Gather_1" -> "404 /layers/layers.2/blocks.2/attn/MatMul" [label="[4, 12, 49, 32]", style=solid]; +"392 /layers/layers.2/blocks.2/attn/Gather_2" -> "405 /layers/layers.2/blocks.2/attn/MatMul_1" [label="[4, 12, 49, 32]", style=solid]; +"393 /layers/layers.2/blocks.1/attn/qkv/Add" -> "406 /layers/layers.2/blocks.1/attn/Reshape" [label="[4, 49, 1152]", style=solid]; +"394 /layers/layers.2/blocks.0/attn/Reshape_1/smooth_quant_multiply" -> "407 /layers/layers.2/blocks.0/attn/proj/MatMul" [label="[4, 49, 384]", style=solid]; +"395 /layers/layers.1/blocks.1/Slice_4" -> "408 /layers/layers.1/blocks.1/Concat_2" [label="[1, 3, 28, 192]", style=solid]; +"396 /layers/layers.1/blocks.1/Slice_5" -> "408 /layers/layers.1/blocks.1/Concat_2" [label="[1, 25, 28, 192]", style=solid]; +"397 /layers/layers.2/downsample/norm/Div" -> "409 /layers/layers.2/downsample/norm/Mul" [label="[1, 49, 1536]", style=solid]; +"398 /layers/layers.2/blocks.5/mlp/act/Mul_1" -> "410 /layers/layers.2/blocks.5/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 196, 1536]", style=solid]; +"399 /layers/layers.2/blocks.5/Concat_1" -> "411 /layers/layers.2/blocks.5/Reshape_1" [label="[1, 14, 14, 384]", style=solid]; +"400 /layers/layers.2/blocks.4/mlp/fc2/MatMul" -> "412 /layers/layers.2/blocks.4/mlp/fc2/Add" [label="[1, 196, 384]", style=solid]; +"401 /layers/layers.2/blocks.4/attn/qkv/Add" -> "413 /layers/layers.2/blocks.4/attn/Reshape" [label="[4, 49, 1152]", style=solid]; +"402 /layers/layers.2/blocks.3/Reshape_3" -> "414 /layers/layers.2/blocks.3/Reshape_3/smooth_quant_multiply" [label="[4, 49, 384]", style=solid]; +"403 /layers/layers.2/blocks.2/attn/Mul" -> "404 /layers/layers.2/blocks.2/attn/MatMul" [label="[4, 12, 49, 32]", style=solid]; +"404 /layers/layers.2/blocks.2/attn/MatMul" -> "415 /layers/layers.2/blocks.2/attn/Add" [label="[4, 12, 49, 49]", style=solid]; +"405 /layers/layers.2/blocks.2/attn/MatMul_1" -> "416 /layers/layers.2/blocks.2/attn/Transpose_2" [label="[4, 12, 49, 32]", style=solid]; +"406 /layers/layers.2/blocks.1/attn/Reshape" -> "417 /layers/layers.2/blocks.1/attn/Transpose" [label="[4, 49, 3, 12, 32]", style=solid]; +"407 /layers/layers.2/blocks.0/attn/proj/MatMul" -> "418 /layers/layers.2/blocks.0/attn/proj/Add" [label="[4, 49, 384]", style=solid]; +"408 /layers/layers.1/blocks.1/Concat_2" -> "419 /layers/layers.1/blocks.1/Slice_6" [label="[1, 28, 28, 192]", style=solid]; +"408 /layers/layers.1/blocks.1/Concat_2" -> "420 /layers/layers.1/blocks.1/Slice_7" [label="[1, 28, 28, 192]", style=solid]; +"409 /layers/layers.2/downsample/norm/Mul" -> "421 /layers/layers.2/downsample/norm/Add_1" [label="[1, 49, 1536]", style=solid]; +"410 /layers/layers.2/blocks.5/mlp/act/Mul_1/smooth_quant_multiply" -> "422 /layers/layers.2/blocks.5/mlp/fc2/MatMul" [label="[1, 196, 1536]", style=solid]; +"411 /layers/layers.2/blocks.5/Reshape_1" -> "423 /layers/layers.2/blocks.5/Transpose" [label="[1, 2, 7, 2, 7, 384]", style=solid]; +"412 /layers/layers.2/blocks.4/mlp/fc2/Add" -> "279 /layers/layers.2/blocks.4/Add_1" [label="[1, 196, 384]", style=solid]; +"413 /layers/layers.2/blocks.4/attn/Reshape" -> "424 /layers/layers.2/blocks.4/attn/Transpose" [label="[4, 49, 3, 12, 32]", style=solid]; +"414 /layers/layers.2/blocks.3/Reshape_3/smooth_quant_multiply" -> "425 /layers/layers.2/blocks.3/attn/qkv/MatMul" [label="[4, 49, 384]", style=solid]; +"415 /layers/layers.2/blocks.2/attn/Add" -> "426 /layers/layers.2/blocks.2/attn/softmax/Softmax" [label="[4, 12, 49, 49]", style=solid]; +"416 /layers/layers.2/blocks.2/attn/Transpose_2" -> "427 /layers/layers.2/blocks.2/attn/Reshape_1" [label="[4, 49, 12, 32]", style=solid]; +"417 /layers/layers.2/blocks.1/attn/Transpose" -> "428 /layers/layers.2/blocks.1/attn/Gather" [label="[3, 4, 12, 49, 32]", style=solid]; +"417 /layers/layers.2/blocks.1/attn/Transpose" -> "429 /layers/layers.2/blocks.1/attn/Gather_1" [label="[3, 4, 12, 49, 32]", style=solid]; +"417 /layers/layers.2/blocks.1/attn/Transpose" -> "430 /layers/layers.2/blocks.1/attn/Gather_2" [label="[3, 4, 12, 49, 32]", style=solid]; +"418 /layers/layers.2/blocks.0/attn/proj/Add" -> "431 /layers/layers.2/blocks.0/Reshape_4" [label="[4, 49, 384]", style=solid]; +"419 /layers/layers.1/blocks.1/Slice_6" -> "432 /layers/layers.1/blocks.1/Concat_3" [label="[1, 28, 3, 192]", style=solid]; +"420 /layers/layers.1/blocks.1/Slice_7" -> "432 /layers/layers.1/blocks.1/Concat_3" [label="[1, 28, 25, 192]", style=solid]; +"421 /layers/layers.2/downsample/norm/Add_1" -> "433 /layers/layers.2/downsample/norm/Add_1/smooth_quant_multiply" [label="[1, 49, 1536]", style=solid]; +"422 /layers/layers.2/blocks.5/mlp/fc2/MatMul" -> "434 /layers/layers.2/blocks.5/mlp/fc2/Add" [label="[1, 196, 384]", style=solid]; +"423 /layers/layers.2/blocks.5/Transpose" -> "435 /layers/layers.2/blocks.5/Reshape_2" [label="[1, 2, 2, 7, 7, 384]", style=solid]; +"424 /layers/layers.2/blocks.4/attn/Transpose" -> "436 /layers/layers.2/blocks.4/attn/Gather" [label="[3, 4, 12, 49, 32]", style=solid]; +"424 /layers/layers.2/blocks.4/attn/Transpose" -> "437 /layers/layers.2/blocks.4/attn/Gather_1" [label="[3, 4, 12, 49, 32]", style=solid]; +"424 /layers/layers.2/blocks.4/attn/Transpose" -> "438 /layers/layers.2/blocks.4/attn/Gather_2" [label="[3, 4, 12, 49, 32]", style=solid]; +"425 /layers/layers.2/blocks.3/attn/qkv/MatMul" -> "439 /layers/layers.2/blocks.3/attn/qkv/Add" [label="[4, 49, 1152]", style=solid]; +"426 /layers/layers.2/blocks.2/attn/softmax/Softmax" -> "405 /layers/layers.2/blocks.2/attn/MatMul_1" [label="[4, 12, 49, 49]", style=solid]; +"427 /layers/layers.2/blocks.2/attn/Reshape_1" -> "440 /layers/layers.2/blocks.2/attn/Reshape_1/smooth_quant_multiply" [label="[4, 49, 384]", style=solid]; +"428 /layers/layers.2/blocks.1/attn/Gather" -> "441 /layers/layers.2/blocks.1/attn/Mul" [label="[4, 12, 49, 32]", style=solid]; +"429 /layers/layers.2/blocks.1/attn/Gather_1" -> "442 /layers/layers.2/blocks.1/attn/MatMul" [label="[4, 12, 49, 32]", style=solid]; +"430 /layers/layers.2/blocks.1/attn/Gather_2" -> "443 /layers/layers.2/blocks.1/attn/MatMul_1" [label="[4, 12, 49, 32]", style=solid]; +"431 /layers/layers.2/blocks.0/Reshape_4" -> "444 /layers/layers.2/blocks.0/Reshape_5" [label="[4, 7, 7, 384]", style=solid]; +"432 /layers/layers.1/blocks.1/Concat_3" -> "445 /layers/layers.1/blocks.1/Reshape_7" [label="[1, 28, 28, 192]", style=solid]; +"433 /layers/layers.2/downsample/norm/Add_1/smooth_quant_multiply" -> "446 /layers/layers.2/downsample/reduction/MatMul" [label="[1, 49, 1536]", style=solid]; +"434 /layers/layers.2/blocks.5/mlp/fc2/Add" -> "308 /layers/layers.2/blocks.5/Add_1" [label="[1, 196, 384]", style=solid]; +"435 /layers/layers.2/blocks.5/Reshape_2" -> "447 /layers/layers.2/blocks.5/Reshape_3" [label="[4, 7, 7, 384]", style=solid]; +"436 /layers/layers.2/blocks.4/attn/Gather" -> "448 /layers/layers.2/blocks.4/attn/Mul" [label="[4, 12, 49, 32]", style=solid]; +"437 /layers/layers.2/blocks.4/attn/Gather_1" -> "449 /layers/layers.2/blocks.4/attn/MatMul" [label="[4, 12, 49, 32]", style=solid]; +"438 /layers/layers.2/blocks.4/attn/Gather_2" -> "450 /layers/layers.2/blocks.4/attn/MatMul_1" [label="[4, 12, 49, 32]", style=solid]; +"439 /layers/layers.2/blocks.3/attn/qkv/Add" -> "451 /layers/layers.2/blocks.3/attn/Reshape" [label="[4, 49, 1152]", style=solid]; +"440 /layers/layers.2/blocks.2/attn/Reshape_1/smooth_quant_multiply" -> "452 /layers/layers.2/blocks.2/attn/proj/MatMul" [label="[4, 49, 384]", style=solid]; +"441 /layers/layers.2/blocks.1/attn/Mul" -> "442 /layers/layers.2/blocks.1/attn/MatMul" [label="[4, 12, 49, 32]", style=solid]; +"442 /layers/layers.2/blocks.1/attn/MatMul" -> "453 /layers/layers.2/blocks.1/attn/Add" [label="[4, 12, 49, 49]", style=solid]; +"443 /layers/layers.2/blocks.1/attn/MatMul_1" -> "454 /layers/layers.2/blocks.1/attn/Transpose_2" [label="[4, 12, 49, 32]", style=solid]; +"444 /layers/layers.2/blocks.0/Reshape_5" -> "455 /layers/layers.2/blocks.0/Transpose_1" [label="[1, 2, 2, 7, 7, 384]", style=solid]; +"445 /layers/layers.1/blocks.1/Reshape_7" -> "94 /layers/layers.1/blocks.1/Add" [label="[1, 784, 192]", style=solid]; +"446 /layers/layers.2/downsample/reduction/MatMul" -> "456 /layers/layers.3/blocks.0/Add" [label="[1, 49, 768]", style=solid]; +"446 /layers/layers.2/downsample/reduction/MatMul" -> "457 /layers/layers.3/blocks.0/norm1/Div" [label="[1, 49, 768]", style=solid]; +"447 /layers/layers.2/blocks.5/Reshape_3" -> "458 /layers/layers.2/blocks.5/Reshape_3/smooth_quant_multiply" [label="[4, 49, 384]", style=solid]; +"448 /layers/layers.2/blocks.4/attn/Mul" -> "449 /layers/layers.2/blocks.4/attn/MatMul" [label="[4, 12, 49, 32]", style=solid]; +"449 /layers/layers.2/blocks.4/attn/MatMul" -> "459 /layers/layers.2/blocks.4/attn/Add" [label="[4, 12, 49, 49]", style=solid]; +"450 /layers/layers.2/blocks.4/attn/MatMul_1" -> "460 /layers/layers.2/blocks.4/attn/Transpose_2" [label="[4, 12, 49, 32]", style=solid]; +"451 /layers/layers.2/blocks.3/attn/Reshape" -> "461 /layers/layers.2/blocks.3/attn/Transpose" [label="[4, 49, 3, 12, 32]", style=solid]; +"452 /layers/layers.2/blocks.2/attn/proj/MatMul" -> "462 /layers/layers.2/blocks.2/attn/proj/Add" [label="[4, 49, 384]", style=solid]; +"453 /layers/layers.2/blocks.1/attn/Add" -> "463 /layers/layers.2/blocks.1/attn/Reshape_1" [label="[4, 12, 49, 49]", style=solid]; +"454 /layers/layers.2/blocks.1/attn/Transpose_2" -> "464 /layers/layers.2/blocks.1/attn/Reshape_3" [label="[4, 49, 12, 32]", style=solid]; +"455 /layers/layers.2/blocks.0/Transpose_1" -> "465 /layers/layers.2/blocks.0/Reshape_6" [label="[1, 2, 7, 2, 7, 384]", style=solid]; +"456 /layers/layers.3/blocks.0/Add" -> "466 /layers/layers.3/blocks.0/Add_1" [label="[1, 49, 768]", style=solid]; +"456 /layers/layers.3/blocks.0/Add" -> "467 /layers/layers.3/blocks.0/norm2/Div" [label="[1, 49, 768]", style=solid]; +"457 /layers/layers.3/blocks.0/norm1/Div" -> "468 /layers/layers.3/blocks.0/norm1/Mul" [label="[1, 49, 768]", style=solid]; +"458 /layers/layers.2/blocks.5/Reshape_3/smooth_quant_multiply" -> "469 /layers/layers.2/blocks.5/attn/qkv/MatMul" [label="[4, 49, 384]", style=solid]; +"459 /layers/layers.2/blocks.4/attn/Add" -> "470 /layers/layers.2/blocks.4/attn/softmax/Softmax" [label="[4, 12, 49, 49]", style=solid]; +"460 /layers/layers.2/blocks.4/attn/Transpose_2" -> "471 /layers/layers.2/blocks.4/attn/Reshape_1" [label="[4, 49, 12, 32]", style=solid]; +"461 /layers/layers.2/blocks.3/attn/Transpose" -> "472 /layers/layers.2/blocks.3/attn/Gather" [label="[3, 4, 12, 49, 32]", style=solid]; +"461 /layers/layers.2/blocks.3/attn/Transpose" -> "473 /layers/layers.2/blocks.3/attn/Gather_1" [label="[3, 4, 12, 49, 32]", style=solid]; +"461 /layers/layers.2/blocks.3/attn/Transpose" -> "474 /layers/layers.2/blocks.3/attn/Gather_2" [label="[3, 4, 12, 49, 32]", style=solid]; +"462 /layers/layers.2/blocks.2/attn/proj/Add" -> "475 /layers/layers.2/blocks.2/Reshape_4" [label="[4, 49, 384]", style=solid]; +"463 /layers/layers.2/blocks.1/attn/Reshape_1" -> "476 /layers/layers.2/blocks.1/attn/Add_1" [label="[1, 4, 12, 49, 49]", style=solid]; +"464 /layers/layers.2/blocks.1/attn/Reshape_3" -> "477 /layers/layers.2/blocks.1/attn/Reshape_3/smooth_quant_multiply" [label="[4, 49, 384]", style=solid]; +"465 /layers/layers.2/blocks.0/Reshape_6" -> "478 /layers/layers.2/blocks.0/Reshape_7" [label="[1, 14, 14, 384]", style=solid]; +"466 /layers/layers.3/blocks.0/Add_1" -> "479 /layers/layers.3/blocks.1/Add" [label="[1, 49, 768]", style=solid]; +"466 /layers/layers.3/blocks.0/Add_1" -> "480 /layers/layers.3/blocks.1/norm1/Div" [label="[1, 49, 768]", style=solid]; +"467 /layers/layers.3/blocks.0/norm2/Div" -> "481 /layers/layers.3/blocks.0/norm2/Mul" [label="[1, 49, 768]", style=solid]; +"468 /layers/layers.3/blocks.0/norm1/Mul" -> "482 /layers/layers.3/blocks.0/norm1/Add_1" [label="[1, 49, 768]", style=solid]; +"469 /layers/layers.2/blocks.5/attn/qkv/MatMul" -> "483 /layers/layers.2/blocks.5/attn/qkv/Add" [label="[4, 49, 1152]", style=solid]; +"470 /layers/layers.2/blocks.4/attn/softmax/Softmax" -> "450 /layers/layers.2/blocks.4/attn/MatMul_1" [label="[4, 12, 49, 49]", style=solid]; +"471 /layers/layers.2/blocks.4/attn/Reshape_1" -> "484 /layers/layers.2/blocks.4/attn/Reshape_1/smooth_quant_multiply" [label="[4, 49, 384]", style=solid]; +"472 /layers/layers.2/blocks.3/attn/Gather" -> "485 /layers/layers.2/blocks.3/attn/Mul" [label="[4, 12, 49, 32]", style=solid]; +"473 /layers/layers.2/blocks.3/attn/Gather_1" -> "486 /layers/layers.2/blocks.3/attn/MatMul" [label="[4, 12, 49, 32]", style=solid]; +"474 /layers/layers.2/blocks.3/attn/Gather_2" -> "487 /layers/layers.2/blocks.3/attn/MatMul_1" [label="[4, 12, 49, 32]", style=solid]; +"475 /layers/layers.2/blocks.2/Reshape_4" -> "488 /layers/layers.2/blocks.2/Reshape_5" [label="[4, 7, 7, 384]", style=solid]; +"476 /layers/layers.2/blocks.1/attn/Add_1" -> "489 /layers/layers.2/blocks.1/attn/Reshape_2" [label="[1, 4, 12, 49, 49]", style=solid]; +"477 /layers/layers.2/blocks.1/attn/Reshape_3/smooth_quant_multiply" -> "490 /layers/layers.2/blocks.1/attn/proj/MatMul" [label="[4, 49, 384]", style=solid]; +"478 /layers/layers.2/blocks.0/Reshape_7" -> "188 /layers/layers.2/blocks.0/Add" [label="[1, 196, 384]", style=solid]; +"479 /layers/layers.3/blocks.1/Add" -> "491 /layers/layers.3/blocks.1/Add_1" [label="[1, 49, 768]", style=solid]; +"479 /layers/layers.3/blocks.1/Add" -> "492 /layers/layers.3/blocks.1/norm2/Div" [label="[1, 49, 768]", style=solid]; +"480 /layers/layers.3/blocks.1/norm1/Div" -> "493 /layers/layers.3/blocks.1/norm1/Mul" [label="[1, 49, 768]", style=solid]; +"481 /layers/layers.3/blocks.0/norm2/Mul" -> "494 /layers/layers.3/blocks.0/norm2/Add_1" [label="[1, 49, 768]", style=solid]; +"482 /layers/layers.3/blocks.0/norm1/Add_1" -> "495 /layers/layers.3/blocks.0/Reshape_1" [label="[1, 49, 768]", style=solid]; +"483 /layers/layers.2/blocks.5/attn/qkv/Add" -> "496 /layers/layers.2/blocks.5/attn/Reshape" [label="[4, 49, 1152]", style=solid]; +"484 /layers/layers.2/blocks.4/attn/Reshape_1/smooth_quant_multiply" -> "497 /layers/layers.2/blocks.4/attn/proj/MatMul" [label="[4, 49, 384]", style=solid]; +"485 /layers/layers.2/blocks.3/attn/Mul" -> "486 /layers/layers.2/blocks.3/attn/MatMul" [label="[4, 12, 49, 32]", style=solid]; +"486 /layers/layers.2/blocks.3/attn/MatMul" -> "498 /layers/layers.2/blocks.3/attn/Add" [label="[4, 12, 49, 49]", style=solid]; +"487 /layers/layers.2/blocks.3/attn/MatMul_1" -> "499 /layers/layers.2/blocks.3/attn/Transpose_2" [label="[4, 12, 49, 32]", style=solid]; +"488 /layers/layers.2/blocks.2/Reshape_5" -> "500 /layers/layers.2/blocks.2/Transpose_1" [label="[1, 2, 2, 7, 7, 384]", style=solid]; +"489 /layers/layers.2/blocks.1/attn/Reshape_2" -> "501 /layers/layers.2/blocks.1/attn/softmax/Softmax" [label="[4, 12, 49, 49]", style=solid]; +"490 /layers/layers.2/blocks.1/attn/proj/MatMul" -> "502 /layers/layers.2/blocks.1/attn/proj/Add" [label="[4, 49, 384]", style=solid]; +"491 /layers/layers.3/blocks.1/Add_1" -> "503 /norm/Div" [label="[1, 49, 768]", style=solid]; +"492 /layers/layers.3/blocks.1/norm2/Div" -> "504 /layers/layers.3/blocks.1/norm2/Mul" [label="[1, 49, 768]", style=solid]; +"493 /layers/layers.3/blocks.1/norm1/Mul" -> "505 /layers/layers.3/blocks.1/norm1/Add_1" [label="[1, 49, 768]", style=solid]; +"494 /layers/layers.3/blocks.0/norm2/Add_1" -> "506 /layers/layers.3/blocks.0/norm2/Add_1/smooth_quant_multiply" [label="[1, 49, 768]", style=solid]; +"495 /layers/layers.3/blocks.0/Reshape_1" -> "507 /layers/layers.3/blocks.0/Transpose" [label="[1, 1, 7, 1, 7, 768]", style=solid]; +"496 /layers/layers.2/blocks.5/attn/Reshape" -> "508 /layers/layers.2/blocks.5/attn/Transpose" [label="[4, 49, 3, 12, 32]", style=solid]; +"497 /layers/layers.2/blocks.4/attn/proj/MatMul" -> "509 /layers/layers.2/blocks.4/attn/proj/Add" [label="[4, 49, 384]", style=solid]; +"498 /layers/layers.2/blocks.3/attn/Add" -> "510 /layers/layers.2/blocks.3/attn/Reshape_1" [label="[4, 12, 49, 49]", style=solid]; +"499 /layers/layers.2/blocks.3/attn/Transpose_2" -> "511 /layers/layers.2/blocks.3/attn/Reshape_3" [label="[4, 49, 12, 32]", style=solid]; +"500 /layers/layers.2/blocks.2/Transpose_1" -> "512 /layers/layers.2/blocks.2/Reshape_6" [label="[1, 2, 7, 2, 7, 384]", style=solid]; +"501 /layers/layers.2/blocks.1/attn/softmax/Softmax" -> "443 /layers/layers.2/blocks.1/attn/MatMul_1" [label="[4, 12, 49, 49]", style=solid]; +"502 /layers/layers.2/blocks.1/attn/proj/Add" -> "513 /layers/layers.2/blocks.1/Reshape_4" [label="[4, 49, 384]", style=solid]; +"503 /norm/Div" -> "514 /norm/Mul" [label="[1, 49, 768]", style=solid]; +"504 /layers/layers.3/blocks.1/norm2/Mul" -> "515 /layers/layers.3/blocks.1/norm2/Add_1" [label="[1, 49, 768]", style=solid]; +"505 /layers/layers.3/blocks.1/norm1/Add_1" -> "516 /layers/layers.3/blocks.1/Reshape_1" [label="[1, 49, 768]", style=solid]; +"506 /layers/layers.3/blocks.0/norm2/Add_1/smooth_quant_multiply" -> "517 /layers/layers.3/blocks.0/mlp/fc1/MatMul" [label="[1, 49, 768]", style=solid]; +"507 /layers/layers.3/blocks.0/Transpose" -> "518 /layers/layers.3/blocks.0/Reshape_2" [label="[1, 1, 1, 7, 7, 768]", style=solid]; +"508 /layers/layers.2/blocks.5/attn/Transpose" -> "519 /layers/layers.2/blocks.5/attn/Gather" [label="[3, 4, 12, 49, 32]", style=solid]; +"508 /layers/layers.2/blocks.5/attn/Transpose" -> "520 /layers/layers.2/blocks.5/attn/Gather_1" [label="[3, 4, 12, 49, 32]", style=solid]; +"508 /layers/layers.2/blocks.5/attn/Transpose" -> "521 /layers/layers.2/blocks.5/attn/Gather_2" [label="[3, 4, 12, 49, 32]", style=solid]; +"509 /layers/layers.2/blocks.4/attn/proj/Add" -> "522 /layers/layers.2/blocks.4/Reshape_4" [label="[4, 49, 384]", style=solid]; +"510 /layers/layers.2/blocks.3/attn/Reshape_1" -> "523 /layers/layers.2/blocks.3/attn/Add_1" [label="[1, 4, 12, 49, 49]", style=solid]; +"511 /layers/layers.2/blocks.3/attn/Reshape_3" -> "524 /layers/layers.2/blocks.3/attn/Reshape_3/smooth_quant_multiply" [label="[4, 49, 384]", style=solid]; +"512 /layers/layers.2/blocks.2/Reshape_6" -> "525 /layers/layers.2/blocks.2/Reshape_7" [label="[1, 14, 14, 384]", style=solid]; +"513 /layers/layers.2/blocks.1/Reshape_4" -> "526 /layers/layers.2/blocks.1/Reshape_5" [label="[4, 7, 7, 384]", style=solid]; +"514 /norm/Mul" -> "527 /norm/Add_1" [label="[1, 49, 768]", style=solid]; +"515 /layers/layers.3/blocks.1/norm2/Add_1" -> "528 /layers/layers.3/blocks.1/norm2/Add_1/smooth_quant_multiply" [label="[1, 49, 768]", style=solid]; +"516 /layers/layers.3/blocks.1/Reshape_1" -> "529 /layers/layers.3/blocks.1/Transpose" [label="[1, 1, 7, 1, 7, 768]", style=solid]; +"517 /layers/layers.3/blocks.0/mlp/fc1/MatMul" -> "530 /layers/layers.3/blocks.0/mlp/fc1/Add" [label="[1, 49, 3072]", style=solid]; +"518 /layers/layers.3/blocks.0/Reshape_2" -> "531 /layers/layers.3/blocks.0/Reshape_3" [label="[1, 7, 7, 768]", style=solid]; +"519 /layers/layers.2/blocks.5/attn/Gather" -> "532 /layers/layers.2/blocks.5/attn/Mul" [label="[4, 12, 49, 32]", style=solid]; +"520 /layers/layers.2/blocks.5/attn/Gather_1" -> "533 /layers/layers.2/blocks.5/attn/MatMul" [label="[4, 12, 49, 32]", style=solid]; +"521 /layers/layers.2/blocks.5/attn/Gather_2" -> "534 /layers/layers.2/blocks.5/attn/MatMul_1" [label="[4, 12, 49, 32]", style=solid]; +"522 /layers/layers.2/blocks.4/Reshape_4" -> "535 /layers/layers.2/blocks.4/Reshape_5" [label="[4, 7, 7, 384]", style=solid]; +"523 /layers/layers.2/blocks.3/attn/Add_1" -> "536 /layers/layers.2/blocks.3/attn/Reshape_2" [label="[1, 4, 12, 49, 49]", style=solid]; +"524 /layers/layers.2/blocks.3/attn/Reshape_3/smooth_quant_multiply" -> "537 /layers/layers.2/blocks.3/attn/proj/MatMul" [label="[4, 49, 384]", style=solid]; +"525 /layers/layers.2/blocks.2/Reshape_7" -> "217 /layers/layers.2/blocks.2/Add" [label="[1, 196, 384]", style=solid]; +"526 /layers/layers.2/blocks.1/Reshape_5" -> "538 /layers/layers.2/blocks.1/Transpose_1" [label="[1, 2, 2, 7, 7, 384]", style=solid]; +"527 /norm/Add_1" -> "539 ReduceMean_6197" [label="[1, 49, 768]", style=solid]; +"528 /layers/layers.3/blocks.1/norm2/Add_1/smooth_quant_multiply" -> "540 /layers/layers.3/blocks.1/mlp/fc1/MatMul" [label="[1, 49, 768]", style=solid]; +"529 /layers/layers.3/blocks.1/Transpose" -> "541 /layers/layers.3/blocks.1/Reshape_2" [label="[1, 1, 1, 7, 7, 768]", style=solid]; +"530 /layers/layers.3/blocks.0/mlp/fc1/Add" -> "542 /layers/layers.3/blocks.0/mlp/act/Mul_1" [label="[1, 49, 3072]", style=solid]; +"531 /layers/layers.3/blocks.0/Reshape_3" -> "543 /layers/layers.3/blocks.0/Reshape_3/smooth_quant_multiply" [label="[1, 49, 768]", style=solid]; +"532 /layers/layers.2/blocks.5/attn/Mul" -> "533 /layers/layers.2/blocks.5/attn/MatMul" [label="[4, 12, 49, 32]", style=solid]; +"533 /layers/layers.2/blocks.5/attn/MatMul" -> "544 /layers/layers.2/blocks.5/attn/Add" [label="[4, 12, 49, 49]", style=solid]; +"534 /layers/layers.2/blocks.5/attn/MatMul_1" -> "545 /layers/layers.2/blocks.5/attn/Transpose_2" [label="[4, 12, 49, 32]", style=solid]; +"535 /layers/layers.2/blocks.4/Reshape_5" -> "546 /layers/layers.2/blocks.4/Transpose_1" [label="[1, 2, 2, 7, 7, 384]", style=solid]; +"536 /layers/layers.2/blocks.3/attn/Reshape_2" -> "547 /layers/layers.2/blocks.3/attn/softmax/Softmax" [label="[4, 12, 49, 49]", style=solid]; +"537 /layers/layers.2/blocks.3/attn/proj/MatMul" -> "548 /layers/layers.2/blocks.3/attn/proj/Add" [label="[4, 49, 384]", style=solid]; +"538 /layers/layers.2/blocks.1/Transpose_1" -> "549 /layers/layers.2/blocks.1/Reshape_6" [label="[1, 2, 7, 2, 7, 384]", style=solid]; +"539 ReduceMean_6197" -> "550 /avgpool/GlobalAveragePool" [label="[1, 1, 768]", style=solid]; +"540 /layers/layers.3/blocks.1/mlp/fc1/MatMul" -> "551 /layers/layers.3/blocks.1/mlp/fc1/Add" [label="[1, 49, 3072]", style=solid]; +"541 /layers/layers.3/blocks.1/Reshape_2" -> "552 /layers/layers.3/blocks.1/Reshape_3" [label="[1, 7, 7, 768]", style=solid]; +"542 /layers/layers.3/blocks.0/mlp/act/Mul_1" -> "553 /layers/layers.3/blocks.0/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 49, 3072]", style=solid]; +"543 /layers/layers.3/blocks.0/Reshape_3/smooth_quant_multiply" -> "554 /layers/layers.3/blocks.0/attn/qkv/MatMul" [label="[1, 49, 768]", style=solid]; +"544 /layers/layers.2/blocks.5/attn/Add" -> "555 /layers/layers.2/blocks.5/attn/Reshape_1" [label="[4, 12, 49, 49]", style=solid]; +"545 /layers/layers.2/blocks.5/attn/Transpose_2" -> "556 /layers/layers.2/blocks.5/attn/Reshape_3" [label="[4, 49, 12, 32]", style=solid]; +"546 /layers/layers.2/blocks.4/Transpose_1" -> "557 /layers/layers.2/blocks.4/Reshape_6" [label="[1, 2, 7, 2, 7, 384]", style=solid]; +"547 /layers/layers.2/blocks.3/attn/softmax/Softmax" -> "487 /layers/layers.2/blocks.3/attn/MatMul_1" [label="[4, 12, 49, 49]", style=solid]; +"548 /layers/layers.2/blocks.3/attn/proj/Add" -> "558 /layers/layers.2/blocks.3/Reshape_4" [label="[4, 49, 384]", style=solid]; +"549 /layers/layers.2/blocks.1/Reshape_6" -> "559 /layers/layers.2/blocks.1/Slice_4" [label="[1, 14, 14, 384]", style=solid]; +"549 /layers/layers.2/blocks.1/Reshape_6" -> "560 /layers/layers.2/blocks.1/Slice_5" [label="[1, 14, 14, 384]", style=solid]; +"550 /avgpool/GlobalAveragePool" -> "561 /Flatten" [label="[1, 768, 1]", style=solid]; +"551 /layers/layers.3/blocks.1/mlp/fc1/Add" -> "562 /layers/layers.3/blocks.1/mlp/act/Mul_1" [label="[1, 49, 3072]", style=solid]; +"552 /layers/layers.3/blocks.1/Reshape_3" -> "563 /layers/layers.3/blocks.1/Reshape_3/smooth_quant_multiply" [label="[1, 49, 768]", style=solid]; +"553 /layers/layers.3/blocks.0/mlp/act/Mul_1/smooth_quant_multiply" -> "564 /layers/layers.3/blocks.0/mlp/fc2/MatMul" [label="[1, 49, 3072]", style=solid]; +"554 /layers/layers.3/blocks.0/attn/qkv/MatMul" -> "565 /layers/layers.3/blocks.0/attn/qkv/Add" [label="[1, 49, 2304]", style=solid]; +"555 /layers/layers.2/blocks.5/attn/Reshape_1" -> "566 /layers/layers.2/blocks.5/attn/Add_1" [label="[1, 4, 12, 49, 49]", style=solid]; +"556 /layers/layers.2/blocks.5/attn/Reshape_3" -> "567 /layers/layers.2/blocks.5/attn/Reshape_3/smooth_quant_multiply" [label="[4, 49, 384]", style=solid]; +"557 /layers/layers.2/blocks.4/Reshape_6" -> "568 /layers/layers.2/blocks.4/Reshape_7" [label="[1, 14, 14, 384]", style=solid]; +"558 /layers/layers.2/blocks.3/Reshape_4" -> "569 /layers/layers.2/blocks.3/Reshape_5" [label="[4, 7, 7, 384]", style=solid]; +"559 /layers/layers.2/blocks.1/Slice_4" -> "570 /layers/layers.2/blocks.1/Concat_2" [label="[1, 3, 14, 384]", style=solid]; +"560 /layers/layers.2/blocks.1/Slice_5" -> "570 /layers/layers.2/blocks.1/Concat_2" [label="[1, 11, 14, 384]", style=solid]; +"561 /Flatten" -> "571 /Flatten/smooth_quant_multiply" [label="[1, 768]", style=solid]; +"562 /layers/layers.3/blocks.1/mlp/act/Mul_1" -> "572 /layers/layers.3/blocks.1/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 49, 3072]", style=solid]; +"563 /layers/layers.3/blocks.1/Reshape_3/smooth_quant_multiply" -> "573 /layers/layers.3/blocks.1/attn/qkv/MatMul" [label="[1, 49, 768]", style=solid]; +"564 /layers/layers.3/blocks.0/mlp/fc2/MatMul" -> "574 /layers/layers.3/blocks.0/mlp/fc2/Add" [label="[1, 49, 768]", style=solid]; +"565 /layers/layers.3/blocks.0/attn/qkv/Add" -> "575 /layers/layers.3/blocks.0/attn/Reshape" [label="[1, 49, 2304]", style=solid]; +"566 /layers/layers.2/blocks.5/attn/Add_1" -> "576 /layers/layers.2/blocks.5/attn/Reshape_2" [label="[1, 4, 12, 49, 49]", style=solid]; +"567 /layers/layers.2/blocks.5/attn/Reshape_3/smooth_quant_multiply" -> "577 /layers/layers.2/blocks.5/attn/proj/MatMul" [label="[4, 49, 384]", style=solid]; +"568 /layers/layers.2/blocks.4/Reshape_7" -> "265 /layers/layers.2/blocks.4/Add" [label="[1, 196, 384]", style=solid]; +"569 /layers/layers.2/blocks.3/Reshape_5" -> "578 /layers/layers.2/blocks.3/Transpose_1" [label="[1, 2, 2, 7, 7, 384]", style=solid]; +"570 /layers/layers.2/blocks.1/Concat_2" -> "579 /layers/layers.2/blocks.1/Slice_6" [label="[1, 14, 14, 384]", style=solid]; +"570 /layers/layers.2/blocks.1/Concat_2" -> "580 /layers/layers.2/blocks.1/Slice_7" [label="[1, 14, 14, 384]", style=solid]; +"571 /Flatten/smooth_quant_multiply" -> "581 /head/Gemm/WithoutBiases" [label="[1, 768]", style=solid]; +"572 /layers/layers.3/blocks.1/mlp/act/Mul_1/smooth_quant_multiply" -> "582 /layers/layers.3/blocks.1/mlp/fc2/MatMul" [label="[1, 49, 3072]", style=solid]; +"573 /layers/layers.3/blocks.1/attn/qkv/MatMul" -> "583 /layers/layers.3/blocks.1/attn/qkv/Add" [label="[1, 49, 2304]", style=solid]; +"574 /layers/layers.3/blocks.0/mlp/fc2/Add" -> "466 /layers/layers.3/blocks.0/Add_1" [label="[1, 49, 768]", style=solid]; +"575 /layers/layers.3/blocks.0/attn/Reshape" -> "584 /layers/layers.3/blocks.0/attn/Transpose" [label="[1, 49, 3, 24, 32]", style=solid]; +"576 /layers/layers.2/blocks.5/attn/Reshape_2" -> "585 /layers/layers.2/blocks.5/attn/softmax/Softmax" [label="[4, 12, 49, 49]", style=solid]; +"577 /layers/layers.2/blocks.5/attn/proj/MatMul" -> "586 /layers/layers.2/blocks.5/attn/proj/Add" [label="[4, 49, 384]", style=solid]; +"578 /layers/layers.2/blocks.3/Transpose_1" -> "587 /layers/layers.2/blocks.3/Reshape_6" [label="[1, 2, 7, 2, 7, 384]", style=solid]; +"579 /layers/layers.2/blocks.1/Slice_6" -> "588 /layers/layers.2/blocks.1/Concat_3" [label="[1, 14, 3, 384]", style=solid]; +"580 /layers/layers.2/blocks.1/Slice_7" -> "588 /layers/layers.2/blocks.1/Concat_3" [label="[1, 14, 11, 384]", style=solid]; +"581 /head/Gemm/WithoutBiases" -> "589 probs" [label="[1, 1000]", style=solid]; +"582 /layers/layers.3/blocks.1/mlp/fc2/MatMul" -> "590 /layers/layers.3/blocks.1/mlp/fc2/Add" [label="[1, 49, 768]", style=solid]; +"583 /layers/layers.3/blocks.1/attn/qkv/Add" -> "591 /layers/layers.3/blocks.1/attn/Reshape" [label="[1, 49, 2304]", style=solid]; +"584 /layers/layers.3/blocks.0/attn/Transpose" -> "592 /layers/layers.3/blocks.0/attn/Gather" [label="[3, 1, 24, 49, 32]", style=solid]; +"584 /layers/layers.3/blocks.0/attn/Transpose" -> "593 /layers/layers.3/blocks.0/attn/Gather_1" [label="[3, 1, 24, 49, 32]", style=solid]; +"584 /layers/layers.3/blocks.0/attn/Transpose" -> "594 /layers/layers.3/blocks.0/attn/Gather_2" [label="[3, 1, 24, 49, 32]", style=solid]; +"585 /layers/layers.2/blocks.5/attn/softmax/Softmax" -> "534 /layers/layers.2/blocks.5/attn/MatMul_1" [label="[4, 12, 49, 49]", style=solid]; +"586 /layers/layers.2/blocks.5/attn/proj/Add" -> "595 /layers/layers.2/blocks.5/Reshape_4" [label="[4, 49, 384]", style=solid]; +"587 /layers/layers.2/blocks.3/Reshape_6" -> "596 /layers/layers.2/blocks.3/Slice_4" [label="[1, 14, 14, 384]", style=solid]; +"587 /layers/layers.2/blocks.3/Reshape_6" -> "597 /layers/layers.2/blocks.3/Slice_5" [label="[1, 14, 14, 384]", style=solid]; +"588 /layers/layers.2/blocks.1/Concat_3" -> "598 /layers/layers.2/blocks.1/Reshape_7" [label="[1, 14, 14, 384]", style=solid]; +"589 probs" -> "599 probs/sink_port_0" [label="[1, 1000]", style=solid]; +"590 /layers/layers.3/blocks.1/mlp/fc2/Add" -> "491 /layers/layers.3/blocks.1/Add_1" [label="[1, 49, 768]", style=solid]; +"591 /layers/layers.3/blocks.1/attn/Reshape" -> "600 /layers/layers.3/blocks.1/attn/Transpose" [label="[1, 49, 3, 24, 32]", style=solid]; +"592 /layers/layers.3/blocks.0/attn/Gather" -> "601 /layers/layers.3/blocks.0/attn/Mul" [label="[1, 24, 49, 32]", style=solid]; +"593 /layers/layers.3/blocks.0/attn/Gather_1" -> "602 /layers/layers.3/blocks.0/attn/MatMul" [label="[1, 24, 49, 32]", style=solid]; +"594 /layers/layers.3/blocks.0/attn/Gather_2" -> "603 /layers/layers.3/blocks.0/attn/MatMul_1" [label="[1, 24, 49, 32]", style=solid]; +"595 /layers/layers.2/blocks.5/Reshape_4" -> "604 /layers/layers.2/blocks.5/Reshape_5" [label="[4, 7, 7, 384]", style=solid]; +"596 /layers/layers.2/blocks.3/Slice_4" -> "605 /layers/layers.2/blocks.3/Concat_2" [label="[1, 3, 14, 384]", style=solid]; +"597 /layers/layers.2/blocks.3/Slice_5" -> "605 /layers/layers.2/blocks.3/Concat_2" [label="[1, 11, 14, 384]", style=solid]; +"598 /layers/layers.2/blocks.1/Reshape_7" -> "201 /layers/layers.2/blocks.1/Add" [label="[1, 196, 384]", style=solid]; +"600 /layers/layers.3/blocks.1/attn/Transpose" -> "606 /layers/layers.3/blocks.1/attn/Gather" [label="[3, 1, 24, 49, 32]", style=solid]; +"600 /layers/layers.3/blocks.1/attn/Transpose" -> "607 /layers/layers.3/blocks.1/attn/Gather_1" [label="[3, 1, 24, 49, 32]", style=solid]; +"600 /layers/layers.3/blocks.1/attn/Transpose" -> "608 /layers/layers.3/blocks.1/attn/Gather_2" [label="[3, 1, 24, 49, 32]", style=solid]; +"601 /layers/layers.3/blocks.0/attn/Mul" -> "602 /layers/layers.3/blocks.0/attn/MatMul" [label="[1, 24, 49, 32]", style=solid]; +"602 /layers/layers.3/blocks.0/attn/MatMul" -> "609 /layers/layers.3/blocks.0/attn/Add" [label="[1, 24, 49, 49]", style=solid]; +"603 /layers/layers.3/blocks.0/attn/MatMul_1" -> "610 /layers/layers.3/blocks.0/attn/Transpose_2" [label="[1, 24, 49, 32]", style=solid]; +"604 /layers/layers.2/blocks.5/Reshape_5" -> "611 /layers/layers.2/blocks.5/Transpose_1" [label="[1, 2, 2, 7, 7, 384]", style=solid]; +"605 /layers/layers.2/blocks.3/Concat_2" -> "612 /layers/layers.2/blocks.3/Slice_6" [label="[1, 14, 14, 384]", style=solid]; +"605 /layers/layers.2/blocks.3/Concat_2" -> "613 /layers/layers.2/blocks.3/Slice_7" [label="[1, 14, 14, 384]", style=solid]; +"606 /layers/layers.3/blocks.1/attn/Gather" -> "614 /layers/layers.3/blocks.1/attn/Mul" [label="[1, 24, 49, 32]", style=solid]; +"607 /layers/layers.3/blocks.1/attn/Gather_1" -> "615 /layers/layers.3/blocks.1/attn/MatMul" [label="[1, 24, 49, 32]", style=solid]; +"608 /layers/layers.3/blocks.1/attn/Gather_2" -> "616 /layers/layers.3/blocks.1/attn/MatMul_1" [label="[1, 24, 49, 32]", style=solid]; +"609 /layers/layers.3/blocks.0/attn/Add" -> "617 /layers/layers.3/blocks.0/attn/softmax/Softmax" [label="[1, 24, 49, 49]", style=solid]; +"610 /layers/layers.3/blocks.0/attn/Transpose_2" -> "618 /layers/layers.3/blocks.0/attn/Reshape_1" [label="[1, 49, 24, 32]", style=solid]; +"611 /layers/layers.2/blocks.5/Transpose_1" -> "619 /layers/layers.2/blocks.5/Reshape_6" [label="[1, 2, 7, 2, 7, 384]", style=solid]; +"612 /layers/layers.2/blocks.3/Slice_6" -> "620 /layers/layers.2/blocks.3/Concat_3" [label="[1, 14, 3, 384]", style=solid]; +"613 /layers/layers.2/blocks.3/Slice_7" -> "620 /layers/layers.2/blocks.3/Concat_3" [label="[1, 14, 11, 384]", style=solid]; +"614 /layers/layers.3/blocks.1/attn/Mul" -> "615 /layers/layers.3/blocks.1/attn/MatMul" [label="[1, 24, 49, 32]", style=solid]; +"615 /layers/layers.3/blocks.1/attn/MatMul" -> "621 /layers/layers.3/blocks.1/attn/Add" [label="[1, 24, 49, 49]", style=solid]; +"616 /layers/layers.3/blocks.1/attn/MatMul_1" -> "622 /layers/layers.3/blocks.1/attn/Transpose_2" [label="[1, 24, 49, 32]", style=solid]; +"617 /layers/layers.3/blocks.0/attn/softmax/Softmax" -> "603 /layers/layers.3/blocks.0/attn/MatMul_1" [label="[1, 24, 49, 49]", style=solid]; +"618 /layers/layers.3/blocks.0/attn/Reshape_1" -> "623 /layers/layers.3/blocks.0/attn/Reshape_1/smooth_quant_multiply" [label="[1, 49, 768]", style=solid]; +"619 /layers/layers.2/blocks.5/Reshape_6" -> "624 /layers/layers.2/blocks.5/Slice_4" [label="[1, 14, 14, 384]", style=solid]; +"619 /layers/layers.2/blocks.5/Reshape_6" -> "625 /layers/layers.2/blocks.5/Slice_5" [label="[1, 14, 14, 384]", style=solid]; +"620 /layers/layers.2/blocks.3/Concat_3" -> "626 /layers/layers.2/blocks.3/Reshape_7" [label="[1, 14, 14, 384]", style=solid]; +"621 /layers/layers.3/blocks.1/attn/Add" -> "627 /layers/layers.3/blocks.1/attn/softmax/Softmax" [label="[1, 24, 49, 49]", style=solid]; +"622 /layers/layers.3/blocks.1/attn/Transpose_2" -> "628 /layers/layers.3/blocks.1/attn/Reshape_1" [label="[1, 49, 24, 32]", style=solid]; +"623 /layers/layers.3/blocks.0/attn/Reshape_1/smooth_quant_multiply" -> "629 /layers/layers.3/blocks.0/attn/proj/MatMul" [label="[1, 49, 768]", style=solid]; +"624 /layers/layers.2/blocks.5/Slice_4" -> "630 /layers/layers.2/blocks.5/Concat_2" [label="[1, 3, 14, 384]", style=solid]; +"625 /layers/layers.2/blocks.5/Slice_5" -> "630 /layers/layers.2/blocks.5/Concat_2" [label="[1, 11, 14, 384]", style=solid]; +"626 /layers/layers.2/blocks.3/Reshape_7" -> "239 /layers/layers.2/blocks.3/Add" [label="[1, 196, 384]", style=solid]; +"627 /layers/layers.3/blocks.1/attn/softmax/Softmax" -> "616 /layers/layers.3/blocks.1/attn/MatMul_1" [label="[1, 24, 49, 49]", style=solid]; +"628 /layers/layers.3/blocks.1/attn/Reshape_1" -> "631 /layers/layers.3/blocks.1/attn/Reshape_1/smooth_quant_multiply" [label="[1, 49, 768]", style=solid]; +"629 /layers/layers.3/blocks.0/attn/proj/MatMul" -> "632 /layers/layers.3/blocks.0/attn/proj/Add" [label="[1, 49, 768]", style=solid]; +"630 /layers/layers.2/blocks.5/Concat_2" -> "633 /layers/layers.2/blocks.5/Slice_6" [label="[1, 14, 14, 384]", style=solid]; +"630 /layers/layers.2/blocks.5/Concat_2" -> "634 /layers/layers.2/blocks.5/Slice_7" [label="[1, 14, 14, 384]", style=solid]; +"631 /layers/layers.3/blocks.1/attn/Reshape_1/smooth_quant_multiply" -> "635 /layers/layers.3/blocks.1/attn/proj/MatMul" [label="[1, 49, 768]", style=solid]; +"632 /layers/layers.3/blocks.0/attn/proj/Add" -> "636 /layers/layers.3/blocks.0/Reshape_4" [label="[1, 49, 768]", style=solid]; +"633 /layers/layers.2/blocks.5/Slice_6" -> "637 /layers/layers.2/blocks.5/Concat_3" [label="[1, 14, 3, 384]", style=solid]; +"634 /layers/layers.2/blocks.5/Slice_7" -> "637 /layers/layers.2/blocks.5/Concat_3" [label="[1, 14, 11, 384]", style=solid]; +"635 /layers/layers.3/blocks.1/attn/proj/MatMul" -> "638 /layers/layers.3/blocks.1/attn/proj/Add" [label="[1, 49, 768]", style=solid]; +"636 /layers/layers.3/blocks.0/Reshape_4" -> "639 /layers/layers.3/blocks.0/Reshape_5" [label="[1, 7, 7, 768]", style=solid]; +"637 /layers/layers.2/blocks.5/Concat_3" -> "640 /layers/layers.2/blocks.5/Reshape_7" [label="[1, 14, 14, 384]", style=solid]; +"638 /layers/layers.3/blocks.1/attn/proj/Add" -> "641 /layers/layers.3/blocks.1/Reshape_4" [label="[1, 49, 768]", style=solid]; +"639 /layers/layers.3/blocks.0/Reshape_5" -> "642 /layers/layers.3/blocks.0/Transpose_1" [label="[1, 1, 1, 7, 7, 768]", style=solid]; +"640 /layers/layers.2/blocks.5/Reshape_7" -> "293 /layers/layers.2/blocks.5/Add" [label="[1, 196, 384]", style=solid]; +"641 /layers/layers.3/blocks.1/Reshape_4" -> "643 /layers/layers.3/blocks.1/Reshape_5" [label="[1, 7, 7, 768]", style=solid]; +"642 /layers/layers.3/blocks.0/Transpose_1" -> "644 /layers/layers.3/blocks.0/Reshape_6" [label="[1, 1, 7, 1, 7, 768]", style=solid]; +"643 /layers/layers.3/blocks.1/Reshape_5" -> "645 /layers/layers.3/blocks.1/Transpose_1" [label="[1, 1, 1, 7, 7, 768]", style=solid]; +"644 /layers/layers.3/blocks.0/Reshape_6" -> "646 /layers/layers.3/blocks.0/Reshape_7" [label="[1, 7, 7, 768]", style=solid]; +"645 /layers/layers.3/blocks.1/Transpose_1" -> "647 /layers/layers.3/blocks.1/Reshape_6" [label="[1, 1, 7, 1, 7, 768]", style=solid]; +"646 /layers/layers.3/blocks.0/Reshape_7" -> "456 /layers/layers.3/blocks.0/Add" [label="[1, 49, 768]", style=solid]; +"647 /layers/layers.3/blocks.1/Reshape_6" -> "648 /layers/layers.3/blocks.1/Reshape_7" [label="[1, 7, 7, 768]", style=solid]; +"648 /layers/layers.3/blocks.1/Reshape_7" -> "479 /layers/layers.3/blocks.1/Add" [label="[1, 49, 768]", style=solid]; +"649 Constant_7305" -> "589 probs" [label="[1, 1000]", style=solid]; +"650 head.weight" -> "581 /head/Gemm/WithoutBiases" [label="[1000, 768]", style=solid]; +"651 /Flatten/smooth_quant_const" -> "571 /Flatten/smooth_quant_multiply" [label="[1, 768]", style=solid]; +"652 Constant_2148" -> "561 /Flatten" [label="[2]", style=dashed]; +"653 Constant_6782" -> "550 /avgpool/GlobalAveragePool" [label="[3]", style=dashed]; +"654 Constant_6196" -> "539 ReduceMean_6197" [label="[1]", style=dashed]; +"655 Constant_7304" -> "527 /norm/Add_1" [label="[1, 1, 768]", style=solid]; +"656 Constant_7303" -> "514 /norm/Mul" [label="[1, 1, 768]", style=solid]; +"657 Constant_2123" -> "503 /norm/Div" [label="[1]", style=dashed]; +"658 Constant_6779" -> "582 /layers/layers.3/blocks.1/mlp/fc2/MatMul" [label="[768, 3072]", style=solid]; +"659 /layers/layers.3/blocks.1/mlp/act/Mul_1/smooth_quant_const" -> "572 /layers/layers.3/blocks.1/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 1, 3072]", style=solid]; +"660 Constant_6774" -> "540 /layers/layers.3/blocks.1/mlp/fc1/MatMul" [label="[3072, 768]", style=solid]; +"661 /layers/layers.3/blocks.1/norm2/Add_1/smooth_quant_const" -> "528 /layers/layers.3/blocks.1/norm2/Add_1/smooth_quant_multiply" [label="[1, 1, 768]", style=solid]; +"662 Constant_7300" -> "515 /layers/layers.3/blocks.1/norm2/Add_1" [label="[1, 1, 768]", style=solid]; +"663 Constant_7299" -> "504 /layers/layers.3/blocks.1/norm2/Mul" [label="[1, 1, 768]", style=solid]; +"664 Constant_2097" -> "492 /layers/layers.3/blocks.1/norm2/Div" [label="[1]", style=dashed]; +"665 /layers/layers.3/blocks.1/Constant_7" -> "648 /layers/layers.3/blocks.1/Reshape_7" [label="[3]", style=dashed]; +"666 /layers/layers.3/blocks.1/Constant_6" -> "647 /layers/layers.3/blocks.1/Reshape_6" [label="[4]", style=dashed]; +"667 Constant_6767" -> "645 /layers/layers.3/blocks.1/Transpose_1" [label="[6]", style=dashed]; +"668 /layers/layers.3/blocks.1/Constant_5" -> "643 /layers/layers.3/blocks.1/Reshape_5" [label="[6]", style=dashed]; +"669 /layers/layers.3/blocks.1/Constant_4" -> "641 /layers/layers.3/blocks.1/Reshape_4" [label="[4]", style=dashed]; +"670 Constant_6765" -> "635 /layers/layers.3/blocks.1/attn/proj/MatMul" [label="[768, 768]", style=solid]; +"671 /layers/layers.3/blocks.1/attn/Reshape_1/smooth_quant_const" -> "631 /layers/layers.3/blocks.1/attn/Reshape_1/smooth_quant_multiply" [label="[1, 1, 768]", style=solid]; +"672 /layers/layers.3/blocks.1/attn/Constant_2" -> "628 /layers/layers.3/blocks.1/attn/Reshape_1" [label="[3]", style=dashed]; +"673 Constant_2070" -> "622 /layers/layers.3/blocks.1/attn/Transpose_2" [label="[4]", style=dashed]; +"674 Constant_2060" -> "608 /layers/layers.3/blocks.1/attn/Gather_2" [label="[]", style=dashed]; +"675 /patch_embed/Constant" -> "77 /layers/layers.0/blocks.0/attn/Gather_2" [label="[]", style=dashed]; +"675 /patch_embed/Constant" -> "122 /layers/layers.0/blocks.1/attn/Gather_2" [label="[]", style=dashed]; +"675 /patch_embed/Constant" -> "180 /layers/layers.1/blocks.0/attn/Gather_2" [label="[]", style=dashed]; +"675 /patch_embed/Constant" -> "236 /layers/layers.1/blocks.1/attn/Gather_2" [label="[]", style=dashed]; +"675 /patch_embed/Constant" -> "336 /layers/layers.2/blocks.0/attn/Gather_2" [label="[]", style=dashed]; +"675 /patch_embed/Constant" -> "392 /layers/layers.2/blocks.2/attn/Gather_2" [label="[]", style=dashed]; +"675 /patch_embed/Constant" -> "430 /layers/layers.2/blocks.1/attn/Gather_2" [label="[]", style=dashed]; +"675 /patch_embed/Constant" -> "438 /layers/layers.2/blocks.4/attn/Gather_2" [label="[]", style=dashed]; +"675 /patch_embed/Constant" -> "474 /layers/layers.2/blocks.3/attn/Gather_2" [label="[]", style=dashed]; +"675 /patch_embed/Constant" -> "521 /layers/layers.2/blocks.5/attn/Gather_2" [label="[]", style=dashed]; +"675 /patch_embed/Constant" -> "594 /layers/layers.3/blocks.0/attn/Gather_2" [label="[]", style=dashed]; +"675 /patch_embed/Constant" -> "608 /layers/layers.3/blocks.1/attn/Gather_2" [label="[]", style=dashed]; +"676 Constant_2054" -> "600 /layers/layers.3/blocks.1/attn/Transpose" [label="[5]", style=dashed]; +"677 /layers/layers.3/blocks.1/attn/Constant" -> "591 /layers/layers.3/blocks.1/attn/Reshape" [label="[5]", style=dashed]; +"678 Constant_6761" -> "573 /layers/layers.3/blocks.1/attn/qkv/MatMul" [label="[2304, 768]", style=solid]; +"679 /layers/layers.3/blocks.1/Reshape_3/smooth_quant_const" -> "563 /layers/layers.3/blocks.1/Reshape_3/smooth_quant_multiply" [label="[1, 1, 768]", style=solid]; +"680 /layers/layers.3/blocks.1/Constant_3" -> "552 /layers/layers.3/blocks.1/Reshape_3" [label="[3]", style=dashed]; +"681 /layers/layers.3/blocks.1/Constant_2" -> "541 /layers/layers.3/blocks.1/Reshape_2" [label="[4]", style=dashed]; +"682 Constant_6755" -> "529 /layers/layers.3/blocks.1/Transpose" [label="[6]", style=dashed]; +"683 /layers/layers.3/blocks.1/Constant_1" -> "516 /layers/layers.3/blocks.1/Reshape_1" [label="[6]", style=dashed]; +"684 Constant_7295" -> "505 /layers/layers.3/blocks.1/norm1/Add_1" [label="[1, 1, 768]", style=solid]; +"685 Constant_7294" -> "493 /layers/layers.3/blocks.1/norm1/Mul" [label="[1, 1, 768]", style=solid]; +"686 Constant_2017" -> "480 /layers/layers.3/blocks.1/norm1/Div" [label="[1]", style=dashed]; +"687 Constant_6752" -> "564 /layers/layers.3/blocks.0/mlp/fc2/MatMul" [label="[768, 3072]", style=solid]; +"688 /layers/layers.3/blocks.0/mlp/act/Mul_1/smooth_quant_const" -> "553 /layers/layers.3/blocks.0/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 1, 3072]", style=solid]; +"689 Constant_6747" -> "517 /layers/layers.3/blocks.0/mlp/fc1/MatMul" [label="[3072, 768]", style=solid]; +"690 /layers/layers.3/blocks.0/norm2/Add_1/smooth_quant_const" -> "506 /layers/layers.3/blocks.0/norm2/Add_1/smooth_quant_multiply" [label="[1, 1, 768]", style=solid]; +"691 Constant_7291" -> "494 /layers/layers.3/blocks.0/norm2/Add_1" [label="[1, 1, 768]", style=solid]; +"692 Constant_7290" -> "481 /layers/layers.3/blocks.0/norm2/Mul" [label="[1, 1, 768]", style=solid]; +"693 Constant_1991" -> "467 /layers/layers.3/blocks.0/norm2/Div" [label="[1]", style=dashed]; +"694 /layers/layers.3/blocks.0/Constant_7" -> "646 /layers/layers.3/blocks.0/Reshape_7" [label="[3]", style=dashed]; +"695 /layers/layers.3/blocks.0/Constant_6" -> "644 /layers/layers.3/blocks.0/Reshape_6" [label="[4]", style=dashed]; +"696 Constant_6740" -> "642 /layers/layers.3/blocks.0/Transpose_1" [label="[6]", style=dashed]; +"697 /layers/layers.3/blocks.0/Constant_5" -> "639 /layers/layers.3/blocks.0/Reshape_5" [label="[6]", style=dashed]; +"698 /layers/layers.3/blocks.0/Constant_4" -> "636 /layers/layers.3/blocks.0/Reshape_4" [label="[4]", style=dashed]; +"699 Constant_6738" -> "629 /layers/layers.3/blocks.0/attn/proj/MatMul" [label="[768, 768]", style=solid]; +"700 /layers/layers.3/blocks.0/attn/Reshape_1/smooth_quant_const" -> "623 /layers/layers.3/blocks.0/attn/Reshape_1/smooth_quant_multiply" [label="[1, 1, 768]", style=solid]; +"701 /layers/layers.3/blocks.0/attn/Constant_2" -> "618 /layers/layers.3/blocks.0/attn/Reshape_1" [label="[3]", style=dashed]; +"702 Constant_1964" -> "610 /layers/layers.3/blocks.0/attn/Transpose_2" [label="[4]", style=dashed]; +"703 Constant_1954" -> "594 /layers/layers.3/blocks.0/attn/Gather_2" [label="[]", style=dashed]; +"704 Constant_1948" -> "584 /layers/layers.3/blocks.0/attn/Transpose" [label="[5]", style=dashed]; +"705 /layers/layers.3/blocks.0/attn/Constant" -> "575 /layers/layers.3/blocks.0/attn/Reshape" [label="[5]", style=dashed]; +"706 Constant_6734" -> "554 /layers/layers.3/blocks.0/attn/qkv/MatMul" [label="[2304, 768]", style=solid]; +"707 /layers/layers.3/blocks.0/Reshape_3/smooth_quant_const" -> "543 /layers/layers.3/blocks.0/Reshape_3/smooth_quant_multiply" [label="[1, 1, 768]", style=solid]; +"708 /layers/layers.3/blocks.0/Constant_3" -> "531 /layers/layers.3/blocks.0/Reshape_3" [label="[3]", style=dashed]; +"709 /layers/layers.3/blocks.0/Constant_2" -> "518 /layers/layers.3/blocks.0/Reshape_2" [label="[4]", style=dashed]; +"710 Constant_6728" -> "507 /layers/layers.3/blocks.0/Transpose" [label="[6]", style=dashed]; +"711 /layers/layers.3/blocks.0/Constant_1" -> "495 /layers/layers.3/blocks.0/Reshape_1" [label="[6]", style=dashed]; +"712 Constant_7286" -> "482 /layers/layers.3/blocks.0/norm1/Add_1" [label="[1, 1, 768]", style=solid]; +"713 Constant_7285" -> "468 /layers/layers.3/blocks.0/norm1/Mul" [label="[1, 1, 768]", style=solid]; +"714 Constant_1911" -> "457 /layers/layers.3/blocks.0/norm1/Div" [label="[1]", style=dashed]; +"715 Constant_6725" -> "446 /layers/layers.2/downsample/reduction/MatMul" [label="[768, 1536]", style=solid]; +"716 /layers/layers.2/downsample/norm/Add_1/smooth_quant_const" -> "433 /layers/layers.2/downsample/norm/Add_1/smooth_quant_multiply" [label="[1, 1, 1536]", style=solid]; +"717 Constant_7284" -> "421 /layers/layers.2/downsample/norm/Add_1" [label="[1, 1, 1536]", style=solid]; +"718 Constant_7283" -> "409 /layers/layers.2/downsample/norm/Mul" [label="[1, 1, 1536]", style=solid]; +"719 Constant_1897" -> "397 /layers/layers.2/downsample/norm/Div" [label="[1]", style=dashed]; +"720 /layers/layers.2/downsample/Constant_25" -> "382 /layers/layers.2/downsample/Reshape_1" [label="[3]", style=dashed]; +"721 Constant_5911" -> "356 /layers/layers.2/downsample/Slice_5" [label="[3]", style=dashed]; +"722 Constant_5908" -> "356 /layers/layers.2/downsample/Slice_5" [label="[3]", style=dashed]; +"723 Constant_5905" -> "356 /layers/layers.2/downsample/Slice_5" [label="[3]", style=dashed]; +"724 Constant_5875" -> "339 /layers/layers.2/downsample/Slice_2" [label="[2]", style=dashed]; +"725 Constant_5872" -> "339 /layers/layers.2/downsample/Slice_2" [label="[2]", style=dashed]; +"726 Constant_5869" -> "339 /layers/layers.2/downsample/Slice_2" [label="[2]", style=dashed]; +"727 /layers/layers.2/downsample/Constant" -> "322 /layers/layers.2/downsample/Reshape" [label="[4]", style=dashed]; +"728 Constant_6720" -> "422 /layers/layers.2/blocks.5/mlp/fc2/MatMul" [label="[384, 1536]", style=solid]; +"729 /layers/layers.2/blocks.5/mlp/act/Mul_1/smooth_quant_const" -> "410 /layers/layers.2/blocks.5/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 1, 1536]", style=solid]; +"730 Constant_6715" -> "371 /layers/layers.2/blocks.5/mlp/fc1/MatMul" [label="[1536, 384]", style=solid]; +"731 /layers/layers.2/blocks.5/norm2/Add_1/smooth_quant_const" -> "357 /layers/layers.2/blocks.5/norm2/Add_1/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"732 Constant_7280" -> "340 /layers/layers.2/blocks.5/norm2/Add_1" [label="[1, 1, 384]", style=solid]; +"733 Constant_7279" -> "323 /layers/layers.2/blocks.5/norm2/Mul" [label="[1, 1, 384]", style=solid]; +"734 Constant_1832" -> "309 /layers/layers.2/blocks.5/norm2/Div" [label="[1]", style=dashed]; +"735 /layers/layers.2/blocks.5/Constant_31" -> "640 /layers/layers.2/blocks.5/Reshape_7" [label="[3]", style=dashed]; +"736 Constant_5839" -> "634 /layers/layers.2/blocks.5/Slice_7" [label="[3]", style=dashed]; +"737 Constant_5836" -> "634 /layers/layers.2/blocks.5/Slice_7" [label="[3]", style=dashed]; +"738 Constant_5833" -> "634 /layers/layers.2/blocks.5/Slice_7" [label="[3]", style=dashed]; +"739 Constant_5815" -> "625 /layers/layers.2/blocks.5/Slice_5" [label="[2]", style=dashed]; +"740 Constant_5812" -> "625 /layers/layers.2/blocks.5/Slice_5" [label="[2]", style=dashed]; +"741 Constant_5809" -> "625 /layers/layers.2/blocks.5/Slice_5" [label="[2]", style=dashed]; +"742 /layers/layers.2/blocks.5/Constant_18" -> "619 /layers/layers.2/blocks.5/Reshape_6" [label="[4]", style=dashed]; +"743 Constant_1779" -> "611 /layers/layers.2/blocks.5/Transpose_1" [label="[6]", style=dashed]; +"744 /layers/layers.2/blocks.5/Constant_17" -> "604 /layers/layers.2/blocks.5/Reshape_5" [label="[6]", style=dashed]; +"745 /layers/layers.2/blocks.5/Constant_16" -> "595 /layers/layers.2/blocks.5/Reshape_4" [label="[4]", style=dashed]; +"746 Constant_6710" -> "577 /layers/layers.2/blocks.5/attn/proj/MatMul" [label="[384, 384]", style=solid]; +"747 /layers/layers.2/blocks.5/attn/Reshape_3/smooth_quant_const" -> "567 /layers/layers.2/blocks.5/attn/Reshape_3/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"748 /layers/layers.2/blocks.5/attn/Constant_4" -> "556 /layers/layers.2/blocks.5/attn/Reshape_3" [label="[3]", style=dashed]; +"749 Constant_1763" -> "545 /layers/layers.2/blocks.5/attn/Transpose_2" [label="[4]", style=dashed]; +"750 Constant_1744" -> "521 /layers/layers.2/blocks.5/attn/Gather_2" [label="[]", style=dashed]; +"751 Constant_1738" -> "508 /layers/layers.2/blocks.5/attn/Transpose" [label="[5]", style=dashed]; +"752 /layers/layers.2/blocks.5/attn/Constant" -> "496 /layers/layers.2/blocks.5/attn/Reshape" [label="[5]", style=dashed]; +"753 Constant_6706" -> "469 /layers/layers.2/blocks.5/attn/qkv/MatMul" [label="[1152, 384]", style=solid]; +"754 /layers/layers.2/blocks.5/Reshape_3/smooth_quant_const" -> "458 /layers/layers.2/blocks.5/Reshape_3/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"755 /layers/layers.2/blocks.5/Constant_15" -> "447 /layers/layers.2/blocks.5/Reshape_3" [label="[3]", style=dashed]; +"756 /layers/layers.2/blocks.5/Constant_14" -> "435 /layers/layers.2/blocks.5/Reshape_2" [label="[4]", style=dashed]; +"757 Constant_1722" -> "423 /layers/layers.2/blocks.5/Transpose" [label="[6]", style=dashed]; +"758 /layers/layers.2/blocks.5/Constant_13" -> "411 /layers/layers.2/blocks.5/Reshape_1" [label="[6]", style=dashed]; +"759 Constant_5791" -> "385 /layers/layers.2/blocks.5/Slice_3" [label="[3]", style=dashed]; +"760 Constant_5788" -> "385 /layers/layers.2/blocks.5/Slice_3" [label="[3]", style=dashed]; +"761 Constant_5785" -> "385 /layers/layers.2/blocks.5/Slice_3" [label="[3]", style=dashed]; +"762 Constant_5767" -> "359 /layers/layers.2/blocks.5/Slice_1" [label="[2]", style=dashed]; +"763 Constant_5764" -> "359 /layers/layers.2/blocks.5/Slice_1" [label="[2]", style=dashed]; +"764 Constant_5761" -> "359 /layers/layers.2/blocks.5/Slice_1" [label="[2]", style=dashed]; +"765 /layers/layers.2/blocks.5/Constant" -> "341 /layers/layers.2/blocks.5/Reshape" [label="[4]", style=dashed]; +"766 Constant_7275" -> "324 /layers/layers.2/blocks.5/norm1/Add_1" [label="[1, 1, 384]", style=solid]; +"767 Constant_7274" -> "310 /layers/layers.2/blocks.5/norm1/Mul" [label="[1, 1, 384]", style=solid]; +"768 Constant_1659" -> "294 /layers/layers.2/blocks.5/norm1/Div" [label="[1]", style=dashed]; +"769 Constant_6701" -> "400 /layers/layers.2/blocks.4/mlp/fc2/MatMul" [label="[384, 1536]", style=solid]; +"770 /layers/layers.2/blocks.4/mlp/act/Mul_1/smooth_quant_const" -> "386 /layers/layers.2/blocks.4/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 1, 1536]", style=solid]; +"771 Constant_6696" -> "342 /layers/layers.2/blocks.4/mlp/fc1/MatMul" [label="[1536, 384]", style=solid]; +"772 /layers/layers.2/blocks.4/norm2/Add_1/smooth_quant_const" -> "325 /layers/layers.2/blocks.4/norm2/Add_1/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"773 Constant_7271" -> "311 /layers/layers.2/blocks.4/norm2/Add_1" [label="[1, 1, 384]", style=solid]; +"774 Constant_7270" -> "295 /layers/layers.2/blocks.4/norm2/Mul" [label="[1, 1, 384]", style=solid]; +"775 Constant_1633" -> "280 /layers/layers.2/blocks.4/norm2/Div" [label="[1]", style=dashed]; +"776 /layers/layers.2/blocks.4/Constant_7" -> "568 /layers/layers.2/blocks.4/Reshape_7" [label="[3]", style=dashed]; +"777 /layers/layers.2/blocks.4/Constant_6" -> "557 /layers/layers.2/blocks.4/Reshape_6" [label="[4]", style=dashed]; +"778 Constant_1622" -> "546 /layers/layers.2/blocks.4/Transpose_1" [label="[6]", style=dashed]; +"779 /layers/layers.2/blocks.4/Constant_5" -> "535 /layers/layers.2/blocks.4/Reshape_5" [label="[6]", style=dashed]; +"780 /layers/layers.2/blocks.4/Constant_4" -> "522 /layers/layers.2/blocks.4/Reshape_4" [label="[4]", style=dashed]; +"781 Constant_6691" -> "497 /layers/layers.2/blocks.4/attn/proj/MatMul" [label="[384, 384]", style=solid]; +"782 /layers/layers.2/blocks.4/attn/Reshape_1/smooth_quant_const" -> "484 /layers/layers.2/blocks.4/attn/Reshape_1/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"783 /layers/layers.2/blocks.4/attn/Constant_2" -> "471 /layers/layers.2/blocks.4/attn/Reshape_1" [label="[3]", style=dashed]; +"784 Constant_1606" -> "460 /layers/layers.2/blocks.4/attn/Transpose_2" [label="[4]", style=dashed]; +"785 Constant_1596" -> "438 /layers/layers.2/blocks.4/attn/Gather_2" [label="[]", style=dashed]; +"786 Constant_1590" -> "424 /layers/layers.2/blocks.4/attn/Transpose" [label="[5]", style=dashed]; +"787 /layers/layers.2/blocks.4/attn/Constant" -> "413 /layers/layers.2/blocks.4/attn/Reshape" [label="[5]", style=dashed]; +"788 Constant_6687" -> "387 /layers/layers.2/blocks.4/attn/qkv/MatMul" [label="[1152, 384]", style=solid]; +"789 /layers/layers.2/blocks.4/Reshape_3/smooth_quant_const" -> "374 /layers/layers.2/blocks.4/Reshape_3/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"790 /layers/layers.2/blocks.4/Constant_3" -> "361 /layers/layers.2/blocks.4/Reshape_3" [label="[3]", style=dashed]; +"791 /layers/layers.2/blocks.4/Constant_2" -> "343 /layers/layers.2/blocks.4/Reshape_2" [label="[4]", style=dashed]; +"792 Constant_1574" -> "326 /layers/layers.2/blocks.4/Transpose" [label="[6]", style=dashed]; +"793 /layers/layers.2/blocks.4/Constant_1" -> "312 /layers/layers.2/blocks.4/Reshape_1" [label="[6]", style=dashed]; +"794 Constant_7266" -> "296 /layers/layers.2/blocks.4/norm1/Add_1" [label="[1, 1, 384]", style=solid]; +"795 Constant_7265" -> "281 /layers/layers.2/blocks.4/norm1/Mul" [label="[1, 1, 384]", style=solid]; +"796 Constant_1553" -> "266 /layers/layers.2/blocks.4/norm1/Div" [label="[1]", style=dashed]; +"797 Constant_6682" -> "375 /layers/layers.2/blocks.3/mlp/fc2/MatMul" [label="[384, 1536]", style=solid]; +"798 /layers/layers.2/blocks.3/mlp/act/Mul_1/smooth_quant_const" -> "362 /layers/layers.2/blocks.3/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 1, 1536]", style=solid]; +"799 Constant_6677" -> "313 /layers/layers.2/blocks.3/mlp/fc1/MatMul" [label="[1536, 384]", style=solid]; +"800 /layers/layers.2/blocks.3/norm2/Add_1/smooth_quant_const" -> "297 /layers/layers.2/blocks.3/norm2/Add_1/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"801 Constant_7262" -> "282 /layers/layers.2/blocks.3/norm2/Add_1" [label="[1, 1, 384]", style=solid]; +"802 Constant_7261" -> "267 /layers/layers.2/blocks.3/norm2/Mul" [label="[1, 1, 384]", style=solid]; +"803 Constant_1527" -> "254 /layers/layers.2/blocks.3/norm2/Div" [label="[1]", style=dashed]; +"804 /layers/layers.2/blocks.3/Constant_31" -> "626 /layers/layers.2/blocks.3/Reshape_7" [label="[3]", style=dashed]; +"805 Constant_5743" -> "613 /layers/layers.2/blocks.3/Slice_7" [label="[3]", style=dashed]; +"806 Constant_5740" -> "613 /layers/layers.2/blocks.3/Slice_7" [label="[3]", style=dashed]; +"807 Constant_5737" -> "613 /layers/layers.2/blocks.3/Slice_7" [label="[3]", style=dashed]; +"808 Constant_5719" -> "597 /layers/layers.2/blocks.3/Slice_5" [label="[2]", style=dashed]; +"809 Constant_5716" -> "597 /layers/layers.2/blocks.3/Slice_5" [label="[2]", style=dashed]; +"810 Constant_5713" -> "597 /layers/layers.2/blocks.3/Slice_5" [label="[2]", style=dashed]; +"811 /layers/layers.2/blocks.3/Constant_18" -> "587 /layers/layers.2/blocks.3/Reshape_6" [label="[4]", style=dashed]; +"812 Constant_1474" -> "578 /layers/layers.2/blocks.3/Transpose_1" [label="[6]", style=dashed]; +"813 /layers/layers.2/blocks.3/Constant_17" -> "569 /layers/layers.2/blocks.3/Reshape_5" [label="[6]", style=dashed]; +"814 /layers/layers.2/blocks.3/Constant_16" -> "558 /layers/layers.2/blocks.3/Reshape_4" [label="[4]", style=dashed]; +"815 Constant_6672" -> "537 /layers/layers.2/blocks.3/attn/proj/MatMul" [label="[384, 384]", style=solid]; +"816 /layers/layers.2/blocks.3/attn/Reshape_3/smooth_quant_const" -> "524 /layers/layers.2/blocks.3/attn/Reshape_3/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"817 /layers/layers.2/blocks.3/attn/Constant_4" -> "511 /layers/layers.2/blocks.3/attn/Reshape_3" [label="[3]", style=dashed]; +"818 Constant_1458" -> "499 /layers/layers.2/blocks.3/attn/Transpose_2" [label="[4]", style=dashed]; +"819 Constant_1439" -> "474 /layers/layers.2/blocks.3/attn/Gather_2" [label="[]", style=dashed]; +"820 Constant_1433" -> "461 /layers/layers.2/blocks.3/attn/Transpose" [label="[5]", style=dashed]; +"821 /layers/layers.2/blocks.3/attn/Constant" -> "451 /layers/layers.2/blocks.3/attn/Reshape" [label="[5]", style=dashed]; +"822 Constant_6668" -> "425 /layers/layers.2/blocks.3/attn/qkv/MatMul" [label="[1152, 384]", style=solid]; +"823 /layers/layers.2/blocks.3/Reshape_3/smooth_quant_const" -> "414 /layers/layers.2/blocks.3/Reshape_3/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"824 /layers/layers.2/blocks.3/Constant_15" -> "402 /layers/layers.2/blocks.3/Reshape_3" [label="[3]", style=dashed]; +"825 /layers/layers.2/blocks.3/Constant_14" -> "389 /layers/layers.2/blocks.3/Reshape_2" [label="[4]", style=dashed]; +"826 Constant_1417" -> "376 /layers/layers.2/blocks.3/Transpose" [label="[6]", style=dashed]; +"827 /layers/layers.2/blocks.3/Constant_13" -> "363 /layers/layers.2/blocks.3/Reshape_1" [label="[6]", style=dashed]; +"828 Constant_5695" -> "329 /layers/layers.2/blocks.3/Slice_3" [label="[3]", style=dashed]; +"829 Constant_5692" -> "329 /layers/layers.2/blocks.3/Slice_3" [label="[3]", style=dashed]; +"830 Constant_5689" -> "329 /layers/layers.2/blocks.3/Slice_3" [label="[3]", style=dashed]; +"831 Constant_5671" -> "299 /layers/layers.2/blocks.3/Slice_1" [label="[2]", style=dashed]; +"832 Constant_5668" -> "299 /layers/layers.2/blocks.3/Slice_1" [label="[2]", style=dashed]; +"833 Constant_5665" -> "299 /layers/layers.2/blocks.3/Slice_1" [label="[2]", style=dashed]; +"834 /layers/layers.2/blocks.3/Constant" -> "283 /layers/layers.2/blocks.3/Reshape" [label="[4]", style=dashed]; +"835 Constant_7257" -> "268 /layers/layers.2/blocks.3/norm1/Add_1" [label="[1, 1, 384]", style=solid]; +"836 Constant_7256" -> "255 /layers/layers.2/blocks.3/norm1/Mul" [label="[1, 1, 384]", style=solid]; +"837 Constant_1354" -> "240 /layers/layers.2/blocks.3/norm1/Div" [label="[1]", style=dashed]; +"838 Constant_6663" -> "346 /layers/layers.2/blocks.2/mlp/fc2/MatMul" [label="[384, 1536]", style=solid]; +"839 /layers/layers.2/blocks.2/mlp/act/Mul_1/smooth_quant_const" -> "330 /layers/layers.2/blocks.2/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 1, 1536]", style=solid]; +"840 Constant_6658" -> "284 /layers/layers.2/blocks.2/mlp/fc1/MatMul" [label="[1536, 384]", style=solid]; +"841 /layers/layers.2/blocks.2/norm2/Add_1/smooth_quant_const" -> "269 /layers/layers.2/blocks.2/norm2/Add_1/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"842 Constant_7253" -> "256 /layers/layers.2/blocks.2/norm2/Add_1" [label="[1, 1, 384]", style=solid]; +"843 Constant_7252" -> "241 /layers/layers.2/blocks.2/norm2/Mul" [label="[1, 1, 384]", style=solid]; +"844 Constant_1328" -> "228 /layers/layers.2/blocks.2/norm2/Div" [label="[1]", style=dashed]; +"845 /layers/layers.2/blocks.2/Constant_7" -> "525 /layers/layers.2/blocks.2/Reshape_7" [label="[3]", style=dashed]; +"846 /layers/layers.2/blocks.2/Constant_6" -> "512 /layers/layers.2/blocks.2/Reshape_6" [label="[4]", style=dashed]; +"847 Constant_1317" -> "500 /layers/layers.2/blocks.2/Transpose_1" [label="[6]", style=dashed]; +"848 /layers/layers.2/blocks.2/Constant_5" -> "488 /layers/layers.2/blocks.2/Reshape_5" [label="[6]", style=dashed]; +"849 /layers/layers.2/blocks.2/Constant_4" -> "475 /layers/layers.2/blocks.2/Reshape_4" [label="[4]", style=dashed]; +"850 Constant_6653" -> "452 /layers/layers.2/blocks.2/attn/proj/MatMul" [label="[384, 384]", style=solid]; +"851 /layers/layers.2/blocks.2/attn/Reshape_1/smooth_quant_const" -> "440 /layers/layers.2/blocks.2/attn/Reshape_1/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"852 /layers/layers.2/blocks.2/attn/Constant_2" -> "427 /layers/layers.2/blocks.2/attn/Reshape_1" [label="[3]", style=dashed]; +"853 Constant_1301" -> "416 /layers/layers.2/blocks.2/attn/Transpose_2" [label="[4]", style=dashed]; +"854 Constant_1291" -> "392 /layers/layers.2/blocks.2/attn/Gather_2" [label="[]", style=dashed]; +"855 Constant_1285" -> "377 /layers/layers.2/blocks.2/attn/Transpose" [label="[5]", style=dashed]; +"856 /layers/layers.2/blocks.2/attn/Constant" -> "365 /layers/layers.2/blocks.2/attn/Reshape" [label="[5]", style=dashed]; +"857 Constant_6649" -> "331 /layers/layers.2/blocks.2/attn/qkv/MatMul" [label="[1152, 384]", style=solid]; +"858 /layers/layers.2/blocks.2/Reshape_3/smooth_quant_const" -> "316 /layers/layers.2/blocks.2/Reshape_3/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"859 /layers/layers.2/blocks.2/Constant_3" -> "301 /layers/layers.2/blocks.2/Reshape_3" [label="[3]", style=dashed]; +"860 /layers/layers.2/blocks.2/Constant_2" -> "285 /layers/layers.2/blocks.2/Reshape_2" [label="[4]", style=dashed]; +"861 Constant_1269" -> "270 /layers/layers.2/blocks.2/Transpose" [label="[6]", style=dashed]; +"862 /layers/layers.2/blocks.2/Constant_1" -> "257 /layers/layers.2/blocks.2/Reshape_1" [label="[6]", style=dashed]; +"863 Constant_7248" -> "242 /layers/layers.2/blocks.2/norm1/Add_1" [label="[1, 1, 384]", style=solid]; +"864 Constant_7247" -> "229 /layers/layers.2/blocks.2/norm1/Mul" [label="[1, 1, 384]", style=solid]; +"865 Constant_1248" -> "218 /layers/layers.2/blocks.2/norm1/Div" [label="[1]", style=dashed]; +"866 Constant_6644" -> "317 /layers/layers.2/blocks.1/mlp/fc2/MatMul" [label="[384, 1536]", style=solid]; +"867 /layers/layers.2/blocks.1/mlp/act/Mul_1/smooth_quant_const" -> "302 /layers/layers.2/blocks.1/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 1, 1536]", style=solid]; +"868 Constant_6639" -> "258 /layers/layers.2/blocks.1/mlp/fc1/MatMul" [label="[1536, 384]", style=solid]; +"869 /layers/layers.2/blocks.1/norm2/Add_1/smooth_quant_const" -> "243 /layers/layers.2/blocks.1/norm2/Add_1/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"870 Constant_7244" -> "230 /layers/layers.2/blocks.1/norm2/Add_1" [label="[1, 1, 384]", style=solid]; +"871 Constant_7243" -> "219 /layers/layers.2/blocks.1/norm2/Mul" [label="[1, 1, 384]", style=solid]; +"872 Constant_1222" -> "210 /layers/layers.2/blocks.1/norm2/Div" [label="[1]", style=dashed]; +"873 /layers/layers.2/blocks.1/Constant_31" -> "598 /layers/layers.2/blocks.1/Reshape_7" [label="[3]", style=dashed]; +"874 Constant_5647" -> "580 /layers/layers.2/blocks.1/Slice_7" [label="[3]", style=dashed]; +"875 Constant_5644" -> "580 /layers/layers.2/blocks.1/Slice_7" [label="[3]", style=dashed]; +"876 Constant_5641" -> "580 /layers/layers.2/blocks.1/Slice_7" [label="[3]", style=dashed]; +"877 Constant_5623" -> "560 /layers/layers.2/blocks.1/Slice_5" [label="[2]", style=dashed]; +"878 Constant_5620" -> "560 /layers/layers.2/blocks.1/Slice_5" [label="[2]", style=dashed]; +"879 Constant_5617" -> "560 /layers/layers.2/blocks.1/Slice_5" [label="[2]", style=dashed]; +"880 /layers/layers.2/blocks.1/Constant_18" -> "549 /layers/layers.2/blocks.1/Reshape_6" [label="[4]", style=dashed]; +"881 Constant_1169" -> "538 /layers/layers.2/blocks.1/Transpose_1" [label="[6]", style=dashed]; +"882 /layers/layers.2/blocks.1/Constant_17" -> "526 /layers/layers.2/blocks.1/Reshape_5" [label="[6]", style=dashed]; +"883 /layers/layers.2/blocks.1/Constant_16" -> "513 /layers/layers.2/blocks.1/Reshape_4" [label="[4]", style=dashed]; +"884 Constant_6634" -> "490 /layers/layers.2/blocks.1/attn/proj/MatMul" [label="[384, 384]", style=solid]; +"885 /layers/layers.2/blocks.1/attn/Reshape_3/smooth_quant_const" -> "477 /layers/layers.2/blocks.1/attn/Reshape_3/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"886 /layers/layers.2/blocks.1/attn/Constant_4" -> "464 /layers/layers.2/blocks.1/attn/Reshape_3" [label="[3]", style=dashed]; +"887 Constant_1153" -> "454 /layers/layers.2/blocks.1/attn/Transpose_2" [label="[4]", style=dashed]; +"888 Constant_1134" -> "430 /layers/layers.2/blocks.1/attn/Gather_2" [label="[]", style=dashed]; +"889 Constant_1128" -> "417 /layers/layers.2/blocks.1/attn/Transpose" [label="[5]", style=dashed]; +"890 /layers/layers.2/blocks.1/attn/Constant" -> "406 /layers/layers.2/blocks.1/attn/Reshape" [label="[5]", style=dashed]; +"891 Constant_6630" -> "378 /layers/layers.2/blocks.1/attn/qkv/MatMul" [label="[1152, 384]", style=solid]; +"892 /layers/layers.2/blocks.1/Reshape_3/smooth_quant_const" -> "366 /layers/layers.2/blocks.1/Reshape_3/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"893 /layers/layers.2/blocks.1/Constant_15" -> "348 /layers/layers.2/blocks.1/Reshape_3" [label="[3]", style=dashed]; +"894 /layers/layers.2/blocks.1/Constant_14" -> "333 /layers/layers.2/blocks.1/Reshape_2" [label="[4]", style=dashed]; +"895 Constant_1112" -> "318 /layers/layers.2/blocks.1/Transpose" [label="[6]", style=dashed]; +"896 /layers/layers.2/blocks.1/Constant_13" -> "303 /layers/layers.2/blocks.1/Reshape_1" [label="[6]", style=dashed]; +"897 Constant_5599" -> "273 /layers/layers.2/blocks.1/Slice_3" [label="[3]", style=dashed]; +"898 Constant_5596" -> "273 /layers/layers.2/blocks.1/Slice_3" [label="[3]", style=dashed]; +"899 Constant_5593" -> "273 /layers/layers.2/blocks.1/Slice_3" [label="[3]", style=dashed]; +"900 Constant_5575" -> "245 /layers/layers.2/blocks.1/Slice_1" [label="[2]", style=dashed]; +"901 Constant_5572" -> "245 /layers/layers.2/blocks.1/Slice_1" [label="[2]", style=dashed]; +"902 Constant_5569" -> "245 /layers/layers.2/blocks.1/Slice_1" [label="[2]", style=dashed]; +"903 /layers/layers.2/blocks.1/Constant" -> "231 /layers/layers.2/blocks.1/Reshape" [label="[4]", style=dashed]; +"904 Constant_7239" -> "220 /layers/layers.2/blocks.1/norm1/Add_1" [label="[1, 1, 384]", style=solid]; +"905 Constant_7238" -> "211 /layers/layers.2/blocks.1/norm1/Mul" [label="[1, 1, 384]", style=solid]; +"906 Constant_1049" -> "202 /layers/layers.2/blocks.1/norm1/Div" [label="[1]", style=dashed]; +"907 Constant_6625" -> "288 /layers/layers.2/blocks.0/mlp/fc2/MatMul" [label="[384, 1536]", style=solid]; +"908 /layers/layers.2/blocks.0/mlp/act/Mul_1/smooth_quant_const" -> "274 /layers/layers.2/blocks.0/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 1, 1536]", style=solid]; +"909 Constant_6620" -> "232 /layers/layers.2/blocks.0/mlp/fc1/MatMul" [label="[1536, 384]", style=solid]; +"910 /layers/layers.2/blocks.0/norm2/Add_1/smooth_quant_const" -> "221 /layers/layers.2/blocks.0/norm2/Add_1/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"911 Constant_7235" -> "212 /layers/layers.2/blocks.0/norm2/Add_1" [label="[1, 1, 384]", style=solid]; +"912 Constant_7234" -> "203 /layers/layers.2/blocks.0/norm2/Mul" [label="[1, 1, 384]", style=solid]; +"913 Constant_1023" -> "195 /layers/layers.2/blocks.0/norm2/Div" [label="[1]", style=dashed]; +"914 /layers/layers.2/blocks.0/Constant_7" -> "478 /layers/layers.2/blocks.0/Reshape_7" [label="[3]", style=dashed]; +"915 /layers/layers.2/blocks.0/Constant_6" -> "465 /layers/layers.2/blocks.0/Reshape_6" [label="[4]", style=dashed]; +"916 Constant_1012" -> "455 /layers/layers.2/blocks.0/Transpose_1" [label="[6]", style=dashed]; +"917 /layers/layers.2/blocks.0/Constant_5" -> "444 /layers/layers.2/blocks.0/Reshape_5" [label="[6]", style=dashed]; +"918 /layers/layers.2/blocks.0/Constant_4" -> "431 /layers/layers.2/blocks.0/Reshape_4" [label="[4]", style=dashed]; +"919 Constant_6615" -> "407 /layers/layers.2/blocks.0/attn/proj/MatMul" [label="[384, 384]", style=solid]; +"920 /layers/layers.2/blocks.0/attn/Reshape_1/smooth_quant_const" -> "394 /layers/layers.2/blocks.0/attn/Reshape_1/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"921 /layers/layers.2/blocks.0/attn/Constant_2" -> "380 /layers/layers.2/blocks.0/attn/Reshape_1" [label="[3]", style=dashed]; +"922 Constant_996" -> "368 /layers/layers.2/blocks.0/attn/Transpose_2" [label="[4]", style=dashed]; +"923 Constant_986" -> "336 /layers/layers.2/blocks.0/attn/Gather_2" [label="[]", style=dashed]; +"924 Constant_980" -> "319 /layers/layers.2/blocks.0/attn/Transpose" [label="[5]", style=dashed]; +"925 /layers/layers.2/blocks.0/attn/Constant" -> "305 /layers/layers.2/blocks.0/attn/Reshape" [label="[5]", style=dashed]; +"926 Constant_6611" -> "275 /layers/layers.2/blocks.0/attn/qkv/MatMul" [label="[1152, 384]", style=solid]; +"927 /layers/layers.2/blocks.0/Reshape_3/smooth_quant_const" -> "261 /layers/layers.2/blocks.0/Reshape_3/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"928 /layers/layers.2/blocks.0/Constant_3" -> "247 /layers/layers.2/blocks.0/Reshape_3" [label="[3]", style=dashed]; +"929 /layers/layers.2/blocks.0/Constant_2" -> "233 /layers/layers.2/blocks.0/Reshape_2" [label="[4]", style=dashed]; +"930 Constant_964" -> "222 /layers/layers.2/blocks.0/Transpose" [label="[6]", style=dashed]; +"931 /layers/layers.2/blocks.0/Constant_1" -> "213 /layers/layers.2/blocks.0/Reshape_1" [label="[6]", style=dashed]; +"932 Constant_7230" -> "204 /layers/layers.2/blocks.0/norm1/Add_1" [label="[1, 1, 384]", style=solid]; +"933 Constant_7229" -> "196 /layers/layers.2/blocks.0/norm1/Mul" [label="[1, 1, 384]", style=solid]; +"934 Constant_943" -> "189 /layers/layers.2/blocks.0/norm1/Div" [label="[1]", style=dashed]; +"935 Constant_6606" -> "182 /layers/layers.1/downsample/reduction/MatMul" [label="[384, 768]", style=solid]; +"936 /layers/layers.1/downsample/norm/Add_1/smooth_quant_const" -> "175 /layers/layers.1/downsample/norm/Add_1/smooth_quant_multiply" [label="[1, 1, 768]", style=solid]; +"937 Constant_7228" -> "169 /layers/layers.1/downsample/norm/Add_1" [label="[1, 1, 768]", style=solid]; +"938 Constant_7227" -> "162 /layers/layers.1/downsample/norm/Mul" [label="[1, 1, 768]", style=solid]; +"939 Constant_929" -> "154 /layers/layers.1/downsample/norm/Div" [label="[1]", style=dashed]; +"940 /layers/layers.1/downsample/Constant_25" -> "145 /layers/layers.1/downsample/Reshape_1" [label="[3]", style=dashed]; +"941 Constant_5551" -> "127 /layers/layers.1/downsample/Slice_5" [label="[3]", style=dashed]; +"942 Constant_5548" -> "127 /layers/layers.1/downsample/Slice_5" [label="[3]", style=dashed]; +"943 Constant_5545" -> "127 /layers/layers.1/downsample/Slice_5" [label="[3]", style=dashed]; +"944 Constant_5515" -> "115 /layers/layers.1/downsample/Slice_2" [label="[2]", style=dashed]; +"945 Constant_5512" -> "115 /layers/layers.1/downsample/Slice_2" [label="[2]", style=dashed]; +"946 Constant_5509" -> "115 /layers/layers.1/downsample/Slice_2" [label="[2]", style=dashed]; +"947 /layers/layers.1/downsample/Constant" -> "107 /layers/layers.1/downsample/Reshape" [label="[4]", style=dashed]; +"948 Constant_6601" -> "170 /layers/layers.1/blocks.1/mlp/fc2/MatMul" [label="[192, 768]", style=solid]; +"949 /layers/layers.1/blocks.1/mlp/act/Mul_1/smooth_quant_const" -> "163 /layers/layers.1/blocks.1/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 1, 768]", style=solid]; +"950 Constant_6596" -> "138 /layers/layers.1/blocks.1/mlp/fc1/MatMul" [label="[768, 192]", style=solid]; +"951 /layers/layers.1/blocks.1/norm2/Add_1/smooth_quant_const" -> "128 /layers/layers.1/blocks.1/norm2/Add_1/smooth_quant_multiply" [label="[1, 1, 192]", style=solid]; +"952 Constant_7224" -> "116 /layers/layers.1/blocks.1/norm2/Add_1" [label="[1, 1, 192]", style=solid]; +"953 Constant_7223" -> "108 /layers/layers.1/blocks.1/norm2/Mul" [label="[1, 1, 192]", style=solid]; +"954 Constant_864" -> "101 /layers/layers.1/blocks.1/norm2/Div" [label="[1]", style=dashed]; +"955 /layers/layers.1/blocks.1/Constant_31" -> "445 /layers/layers.1/blocks.1/Reshape_7" [label="[3]", style=dashed]; +"956 Constant_5479" -> "420 /layers/layers.1/blocks.1/Slice_7" [label="[3]", style=dashed]; +"957 Constant_5476" -> "420 /layers/layers.1/blocks.1/Slice_7" [label="[3]", style=dashed]; +"958 Constant_5473" -> "420 /layers/layers.1/blocks.1/Slice_7" [label="[3]", style=dashed]; +"959 Constant_5455" -> "396 /layers/layers.1/blocks.1/Slice_5" [label="[2]", style=dashed]; +"960 Constant_5452" -> "396 /layers/layers.1/blocks.1/Slice_5" [label="[2]", style=dashed]; +"961 Constant_5449" -> "396 /layers/layers.1/blocks.1/Slice_5" [label="[2]", style=dashed]; +"962 /layers/layers.1/blocks.1/Constant_18" -> "381 /layers/layers.1/blocks.1/Reshape_6" [label="[4]", style=dashed]; +"963 Constant_811" -> "369 /layers/layers.1/blocks.1/Transpose_1" [label="[6]", style=dashed]; +"964 /layers/layers.1/blocks.1/Constant_17" -> "352 /layers/layers.1/blocks.1/Reshape_5" [label="[6]", style=dashed]; +"965 /layers/layers.1/blocks.1/Constant_16" -> "337 /layers/layers.1/blocks.1/Reshape_4" [label="[4]", style=dashed]; +"966 Constant_6591" -> "307 /layers/layers.1/blocks.1/attn/proj/MatMul" [label="[192, 192]", style=solid]; +"967 /layers/layers.1/blocks.1/attn/Reshape_3/smooth_quant_const" -> "291 /layers/layers.1/blocks.1/attn/Reshape_3/smooth_quant_multiply" [label="[1, 1, 192]", style=solid]; +"968 /layers/layers.1/blocks.1/attn/Constant_4" -> "277 /layers/layers.1/blocks.1/attn/Reshape_3" [label="[3]", style=dashed]; +"969 Constant_795" -> "263 /layers/layers.1/blocks.1/attn/Transpose_2" [label="[4]", style=dashed]; +"970 Constant_776" -> "236 /layers/layers.1/blocks.1/attn/Gather_2" [label="[]", style=dashed]; +"971 Constant_770" -> "223 /layers/layers.1/blocks.1/attn/Transpose" [label="[5]", style=dashed]; +"972 /layers/layers.1/blocks.1/attn/Constant" -> "214 /layers/layers.1/blocks.1/attn/Reshape" [label="[5]", style=dashed]; +"973 Constant_6587" -> "197 /layers/layers.1/blocks.1/attn/qkv/MatMul" [label="[576, 192]", style=solid]; +"974 /layers/layers.1/blocks.1/Reshape_3/smooth_quant_const" -> "190 /layers/layers.1/blocks.1/Reshape_3/smooth_quant_multiply" [label="[1, 1, 192]", style=solid]; +"975 /layers/layers.1/blocks.1/Constant_15" -> "183 /layers/layers.1/blocks.1/Reshape_3" [label="[3]", style=dashed]; +"976 /layers/layers.1/blocks.1/Constant_14" -> "177 /layers/layers.1/blocks.1/Reshape_2" [label="[4]", style=dashed]; +"977 Constant_754" -> "171 /layers/layers.1/blocks.1/Transpose" [label="[6]", style=dashed]; +"978 /layers/layers.1/blocks.1/Constant_13" -> "164 /layers/layers.1/blocks.1/Reshape_1" [label="[6]", style=dashed]; +"979 Constant_5431" -> "148 /layers/layers.1/blocks.1/Slice_3" [label="[3]", style=dashed]; +"980 Constant_5428" -> "148 /layers/layers.1/blocks.1/Slice_3" [label="[3]", style=dashed]; +"981 Constant_5425" -> "148 /layers/layers.1/blocks.1/Slice_3" [label="[3]", style=dashed]; +"982 Constant_5407" -> "130 /layers/layers.1/blocks.1/Slice_1" [label="[2]", style=dashed]; +"983 Constant_5404" -> "130 /layers/layers.1/blocks.1/Slice_1" [label="[2]", style=dashed]; +"984 Constant_5401" -> "130 /layers/layers.1/blocks.1/Slice_1" [label="[2]", style=dashed]; +"985 /layers/layers.1/blocks.1/Constant" -> "117 /layers/layers.1/blocks.1/Reshape" [label="[4]", style=dashed]; +"986 Constant_7219" -> "109 /layers/layers.1/blocks.1/norm1/Add_1" [label="[1, 1, 192]", style=solid]; +"987 Constant_7218" -> "102 /layers/layers.1/blocks.1/norm1/Mul" [label="[1, 1, 192]", style=solid]; +"988 Constant_691" -> "95 /layers/layers.1/blocks.1/norm1/Div" [label="[1]", style=dashed]; +"989 Constant_6582" -> "157 /layers/layers.1/blocks.0/mlp/fc2/MatMul" [label="[192, 768]", style=solid]; +"990 /layers/layers.1/blocks.0/mlp/act/Mul_1/smooth_quant_const" -> "149 /layers/layers.1/blocks.0/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 1, 768]", style=solid]; +"991 Constant_6577" -> "118 /layers/layers.1/blocks.0/mlp/fc1/MatMul" [label="[768, 192]", style=solid]; +"992 /layers/layers.1/blocks.0/norm2/Add_1/smooth_quant_const" -> "110 /layers/layers.1/blocks.0/norm2/Add_1/smooth_quant_multiply" [label="[1, 1, 192]", style=solid]; +"993 Constant_7215" -> "103 /layers/layers.1/blocks.0/norm2/Add_1" [label="[1, 1, 192]", style=solid]; +"994 Constant_7214" -> "96 /layers/layers.1/blocks.0/norm2/Mul" [label="[1, 1, 192]", style=solid]; +"995 Constant_665" -> "89 /layers/layers.1/blocks.0/norm2/Div" [label="[1]", style=dashed]; +"996 /layers/layers.1/blocks.0/Constant_7" -> "292 /layers/layers.1/blocks.0/Reshape_7" [label="[3]", style=dashed]; +"997 /layers/layers.1/blocks.0/Constant_6" -> "278 /layers/layers.1/blocks.0/Reshape_6" [label="[4]", style=dashed]; +"998 Constant_654" -> "264 /layers/layers.1/blocks.0/Transpose_1" [label="[6]", style=dashed]; +"999 /layers/layers.1/blocks.0/Constant_5" -> "251 /layers/layers.1/blocks.0/Reshape_5" [label="[6]", style=dashed]; +"1000 /layers/layers.1/blocks.0/Constant_4" -> "237 /layers/layers.1/blocks.0/Reshape_4" [label="[4]", style=dashed]; +"1001 Constant_6572" -> "215 /layers/layers.1/blocks.0/attn/proj/MatMul" [label="[192, 192]", style=solid]; +"1002 /layers/layers.1/blocks.0/attn/Reshape_1/smooth_quant_const" -> "206 /layers/layers.1/blocks.0/attn/Reshape_1/smooth_quant_multiply" [label="[1, 1, 192]", style=solid]; +"1003 /layers/layers.1/blocks.0/attn/Constant_2" -> "199 /layers/layers.1/blocks.0/attn/Reshape_1" [label="[3]", style=dashed]; +"1004 Constant_638" -> "192 /layers/layers.1/blocks.0/attn/Transpose_2" [label="[4]", style=dashed]; +"1005 Constant_628" -> "180 /layers/layers.1/blocks.0/attn/Gather_2" [label="[]", style=dashed]; +"1006 Constant_622" -> "172 /layers/layers.1/blocks.0/attn/Transpose" [label="[5]", style=dashed]; +"1007 /layers/layers.1/blocks.0/attn/Constant" -> "166 /layers/layers.1/blocks.0/attn/Reshape" [label="[5]", style=dashed]; +"1008 Constant_6568" -> "150 /layers/layers.1/blocks.0/attn/qkv/MatMul" [label="[576, 192]", style=solid]; +"1009 /layers/layers.1/blocks.0/Reshape_3/smooth_quant_const" -> "141 /layers/layers.1/blocks.0/Reshape_3/smooth_quant_multiply" [label="[1, 1, 192]", style=solid]; +"1010 /layers/layers.1/blocks.0/Constant_3" -> "132 /layers/layers.1/blocks.0/Reshape_3" [label="[3]", style=dashed]; +"1011 /layers/layers.1/blocks.0/Constant_2" -> "119 /layers/layers.1/blocks.0/Reshape_2" [label="[4]", style=dashed]; +"1012 Constant_606" -> "111 /layers/layers.1/blocks.0/Transpose" [label="[6]", style=dashed]; +"1013 /layers/layers.1/blocks.0/Constant_1" -> "104 /layers/layers.1/blocks.0/Reshape_1" [label="[6]", style=dashed]; +"1014 Constant_7210" -> "97 /layers/layers.1/blocks.0/norm1/Add_1" [label="[1, 1, 192]", style=solid]; +"1015 Constant_7209" -> "90 /layers/layers.1/blocks.0/norm1/Mul" [label="[1, 1, 192]", style=solid]; +"1016 Constant_585" -> "84 /layers/layers.1/blocks.0/norm1/Div" [label="[1]", style=dashed]; +"1017 Constant_6563" -> "78 /layers/layers.0/downsample/reduction/MatMul" [label="[192, 384]", style=solid]; +"1018 /layers/layers.0/downsample/norm/Add_1/smooth_quant_const" -> "72 /layers/layers.0/downsample/norm/Add_1/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"1019 Constant_7208" -> "68 /layers/layers.0/downsample/norm/Add_1" [label="[1, 1, 384]", style=solid]; +"1020 Constant_7207" -> "63 /layers/layers.0/downsample/norm/Mul" [label="[1, 1, 384]", style=solid]; +"1021 Constant_571" -> "58 /layers/layers.0/downsample/norm/Div" [label="[1]", style=dashed]; +"1022 /layers/layers.0/downsample/Constant_25" -> "52 /layers/layers.0/downsample/Reshape_1" [label="[3]", style=dashed]; +"1023 Constant_5383" -> "41 /layers/layers.0/downsample/Slice_5" [label="[3]", style=dashed]; +"1024 Constant_5380" -> "41 /layers/layers.0/downsample/Slice_5" [label="[3]", style=dashed]; +"1025 Constant_5377" -> "41 /layers/layers.0/downsample/Slice_5" [label="[3]", style=dashed]; +"1026 Constant_5347" -> "33 /layers/layers.0/downsample/Slice_2" [label="[2]", style=dashed]; +"1027 Constant_5344" -> "33 /layers/layers.0/downsample/Slice_2" [label="[2]", style=dashed]; +"1028 Constant_5341" -> "33 /layers/layers.0/downsample/Slice_2" [label="[2]", style=dashed]; +"1029 /layers/layers.0/downsample/Constant" -> "27 /layers/layers.0/downsample/Reshape" [label="[4]", style=dashed]; +"1030 Constant_6558" -> "69 /layers/layers.0/blocks.1/mlp/fc2/MatMul" [label="[96, 384]", style=solid]; +"1031 /layers/layers.0/blocks.1/mlp/act/Mul_1/smooth_quant_const" -> "64 /layers/layers.0/blocks.1/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"1032 Constant_6553" -> "48 /layers/layers.0/blocks.1/mlp/fc1/MatMul" [label="[384, 96]", style=solid]; +"1033 /layers/layers.0/blocks.1/norm2/Add_1/smooth_quant_const" -> "42 /layers/layers.0/blocks.1/norm2/Add_1/smooth_quant_multiply" [label="[1, 1, 96]", style=solid]; +"1034 Constant_7204" -> "34 /layers/layers.0/blocks.1/norm2/Add_1" [label="[1, 1, 96]", style=solid]; +"1035 Constant_7203" -> "28 /layers/layers.0/blocks.1/norm2/Mul" [label="[1, 1, 96]", style=solid]; +"1036 Constant_506" -> "23 /layers/layers.0/blocks.1/norm2/Div" [label="[1]", style=dashed]; +"1037 /layers/layers.0/blocks.1/Constant_31" -> "252 /layers/layers.0/blocks.1/Reshape_7" [label="[3]", style=dashed]; +"1038 Constant_5311" -> "226 /layers/layers.0/blocks.1/Slice_7" [label="[3]", style=dashed]; +"1039 Constant_5308" -> "226 /layers/layers.0/blocks.1/Slice_7" [label="[3]", style=dashed]; +"1040 Constant_5305" -> "226 /layers/layers.0/blocks.1/Slice_7" [label="[3]", style=dashed]; +"1041 Constant_5287" -> "208 /layers/layers.0/blocks.1/Slice_5" [label="[2]", style=dashed]; +"1042 Constant_5284" -> "208 /layers/layers.0/blocks.1/Slice_5" [label="[2]", style=dashed]; +"1043 Constant_5281" -> "208 /layers/layers.0/blocks.1/Slice_5" [label="[2]", style=dashed]; +"1044 /layers/layers.0/blocks.1/Constant_18" -> "200 /layers/layers.0/blocks.1/Reshape_6" [label="[4]", style=dashed]; +"1045 Constant_453" -> "193 /layers/layers.0/blocks.1/Transpose_1" [label="[6]", style=dashed]; +"1046 /layers/layers.0/blocks.1/Constant_17" -> "187 /layers/layers.0/blocks.1/Reshape_5" [label="[6]", style=dashed]; +"1047 /layers/layers.0/blocks.1/Constant_16" -> "181 /layers/layers.0/blocks.1/Reshape_4" [label="[4]", style=dashed]; +"1048 Constant_6548" -> "168 /layers/layers.0/blocks.1/attn/proj/MatMul" [label="[96, 96]", style=solid]; +"1049 /layers/layers.0/blocks.1/attn/Reshape_3/smooth_quant_const" -> "160 /layers/layers.0/blocks.1/attn/Reshape_3/smooth_quant_multiply" [label="[1, 1, 96]", style=solid]; +"1050 /layers/layers.0/blocks.1/attn/Constant_4" -> "152 /layers/layers.0/blocks.1/attn/Reshape_3" [label="[3]", style=dashed]; +"1051 Constant_437" -> "143 /layers/layers.0/blocks.1/attn/Transpose_2" [label="[4]", style=dashed]; +"1052 Constant_418" -> "122 /layers/layers.0/blocks.1/attn/Gather_2" [label="[]", style=dashed]; +"1053 Constant_412" -> "112 /layers/layers.0/blocks.1/attn/Transpose" [label="[5]", style=dashed]; +"1054 /layers/layers.0/blocks.1/attn/Constant" -> "105 /layers/layers.0/blocks.1/attn/Reshape" [label="[5]", style=dashed]; +"1055 Constant_6544" -> "91 /layers/layers.0/blocks.1/attn/qkv/MatMul" [label="[288, 96]", style=solid]; +"1056 /layers/layers.0/blocks.1/Reshape_3/smooth_quant_const" -> "85 /layers/layers.0/blocks.1/Reshape_3/smooth_quant_multiply" [label="[1, 1, 96]", style=solid]; +"1057 /layers/layers.0/blocks.1/Constant_15" -> "79 /layers/layers.0/blocks.1/Reshape_3" [label="[3]", style=dashed]; +"1058 /layers/layers.0/blocks.1/Constant_14" -> "74 /layers/layers.0/blocks.1/Reshape_2" [label="[4]", style=dashed]; +"1059 Constant_396" -> "70 /layers/layers.0/blocks.1/Transpose" [label="[6]", style=dashed]; +"1060 /layers/layers.0/blocks.1/Constant_13" -> "65 /layers/layers.0/blocks.1/Reshape_1" [label="[6]", style=dashed]; +"1061 Constant_5263" -> "55 /layers/layers.0/blocks.1/Slice_3" [label="[3]", style=dashed]; +"1062 Constant_5260" -> "55 /layers/layers.0/blocks.1/Slice_3" [label="[3]", style=dashed]; +"1063 Constant_5257" -> "55 /layers/layers.0/blocks.1/Slice_3" [label="[3]", style=dashed]; +"1064 Constant_5239" -> "44 /layers/layers.0/blocks.1/Slice_1" [label="[2]", style=dashed]; +"1065 Constant_5236" -> "44 /layers/layers.0/blocks.1/Slice_1" [label="[2]", style=dashed]; +"1066 Constant_5233" -> "44 /layers/layers.0/blocks.1/Slice_1" [label="[2]", style=dashed]; +"1067 /layers/layers.0/blocks.1/Constant" -> "35 /layers/layers.0/blocks.1/Reshape" [label="[4]", style=dashed]; +"1068 Constant_7199" -> "29 /layers/layers.0/blocks.1/norm1/Add_1" [label="[1, 1, 96]", style=solid]; +"1069 Constant_7198" -> "24 /layers/layers.0/blocks.1/norm1/Mul" [label="[1, 1, 96]", style=solid]; +"1070 Constant_333" -> "19 /layers/layers.0/blocks.1/norm1/Div" [label="[1]", style=dashed]; +"1071 Constant_6539" -> "61 /layers/layers.0/blocks.0/mlp/fc2/MatMul" [label="[96, 384]", style=solid]; +"1072 /layers/layers.0/blocks.0/mlp/act/Mul_1/smooth_quant_const" -> "56 /layers/layers.0/blocks.0/mlp/act/Mul_1/smooth_quant_multiply" [label="[1, 1, 384]", style=solid]; +"1073 Constant_6534" -> "36 /layers/layers.0/blocks.0/mlp/fc1/MatMul" [label="[384, 96]", style=solid]; +"1074 /layers/layers.0/blocks.0/norm2/Add_1/smooth_quant_const" -> "30 /layers/layers.0/blocks.0/norm2/Add_1/smooth_quant_multiply" [label="[1, 1, 96]", style=solid]; +"1075 Constant_7195" -> "25 /layers/layers.0/blocks.0/norm2/Add_1" [label="[1, 1, 96]", style=solid]; +"1076 Constant_7194" -> "20 /layers/layers.0/blocks.0/norm2/Mul" [label="[1, 1, 96]", style=solid]; +"1077 Constant_307" -> "16 /layers/layers.0/blocks.0/norm2/Div" [label="[1]", style=dashed]; +"1078 /layers/layers.0/blocks.0/Constant_8" -> "161 /layers/layers.0/blocks.0/Reshape_7" [label="[3]", style=dashed]; +"1079 /layers/layers.0/blocks.0/Constant_7" -> "153 /layers/layers.0/blocks.0/Reshape_6" [label="[4]", style=dashed]; +"1080 Constant_296" -> "144 /layers/layers.0/blocks.0/Transpose_1" [label="[6]", style=dashed]; +"1081 /layers/layers.0/blocks.0/Constant_6" -> "136 /layers/layers.0/blocks.0/Reshape_5" [label="[6]", style=dashed]; +"1082 /layers/layers.0/blocks.0/Constant_5" -> "123 /layers/layers.0/blocks.0/Reshape_4" [label="[4]", style=dashed]; +"1083 Constant_6529" -> "106 /layers/layers.0/blocks.0/attn/proj/MatMul" [label="[96, 96]", style=solid]; +"1084 /layers/layers.0/blocks.0/attn/Reshape_1/smooth_quant_const" -> "99 /layers/layers.0/blocks.0/attn/Reshape_1/smooth_quant_multiply" [label="[1, 1, 96]", style=solid]; +"1085 /layers/layers.0/blocks.0/attn/Constant_2" -> "93 /layers/layers.0/blocks.0/attn/Reshape_1" [label="[3]", style=dashed]; +"1086 Constant_280" -> "87 /layers/layers.0/blocks.0/attn/Transpose_2" [label="[4]", style=dashed]; +"1087 Constant_270" -> "77 /layers/layers.0/blocks.0/attn/Gather_2" [label="[]", style=dashed]; +"1088 Constant_264" -> "71 /layers/layers.0/blocks.0/attn/Transpose" [label="[5]", style=dashed]; +"1089 /layers/layers.0/blocks.0/attn/Constant" -> "67 /layers/layers.0/blocks.0/attn/Reshape" [label="[5]", style=dashed]; +"1090 Constant_6525" -> "57 /layers/layers.0/blocks.0/attn/qkv/MatMul" [label="[288, 96]", style=solid]; +"1091 /layers/layers.0/blocks.0/Reshape_3/smooth_quant_const" -> "51 /layers/layers.0/blocks.0/Reshape_3/smooth_quant_multiply" [label="[1, 1, 96]", style=solid]; +"1092 /layers/layers.0/blocks.0/Constant_4" -> "46 /layers/layers.0/blocks.0/Reshape_3" [label="[3]", style=dashed]; +"1093 /layers/layers.0/blocks.0/Constant_3" -> "37 /layers/layers.0/blocks.0/Reshape_2" [label="[4]", style=dashed]; +"1094 Constant_248" -> "31 /layers/layers.0/blocks.0/Transpose" [label="[6]", style=dashed]; +"1095 /layers/layers.0/blocks.0/Constant_2" -> "26 /layers/layers.0/blocks.0/Reshape_1" [label="[6]", style=dashed]; +"1096 Constant_7190" -> "21 /layers/layers.0/blocks.0/norm1/Add_1" [label="[1, 1, 96]", style=solid]; +"1097 Constant_7189" -> "17 /layers/layers.0/blocks.0/norm1/Mul" [label="[1, 1, 96]", style=solid]; +"1098 Constant_227" -> "14 /layers/layers.0/blocks.0/norm1/Div" [label="[1]", style=dashed]; +"1099 Constant_7188" -> "12 /patch_embed/norm/Add_1" [label="[1, 1, 96]", style=solid]; +"1100 Constant_7187" -> "11 /patch_embed/norm/Mul" [label="[1, 1, 96]", style=solid]; +"1101 Constant_213" -> "9 /patch_embed/norm/Div" [label="[1]", style=dashed]; +"1102 Constant_211" -> "7 /patch_embed/Transpose" [label="[3]", style=dashed]; +"1103 /patch_embed/Constant_4" -> "10 /patch_embed/Concat" [label="[1]", style=dashed]; +"1104 Broadcast_201" -> "8 /patch_embed/Slice" [label="[1]", style=dashed]; +"1105 /patch_embed/Constant_3" -> "8 /patch_embed/Slice" [label="[1]", style=dashed]; +"1106 /patch_embed/Constant_2" -> "8 /patch_embed/Slice" [label="[1]", style=dashed]; +"1107 Reshape_190" -> "4 /patch_embed/proj/Conv" [label="[1, 96, 1, 1]", style=solid]; +"1108 Gather_7186" -> "3 /patch_embed/proj/Conv/WithoutBiases" [label="[96, 3, 4, 4]", style=solid]; +"1109 Gather_7183" -> "2 Divide_2169" [label="[1, 3, 1, 1]", style=solid]; +"1110 Gather_7180" -> "1 Multiply_6794" [label="[1, 3, 1, 1]", style=solid]; +"1111 Constant_7191" -> "62 /layers/layers.0/blocks.0/attn/qkv/Add" [label="[1, 1, 288]", style=solid]; +"1112 onnx^^Add_2243" -> "86 /layers/layers.0/blocks.0/attn/Add" [label="[1, 3, 49, 49]", style=solid]; +"1113 Constant_268" -> "76 /layers/layers.0/blocks.0/attn/Gather_1" [label="[]", style=dashed]; +"1114 /patch_embed/proj/Constant" -> "76 /layers/layers.0/blocks.0/attn/Gather_1" [label="[]", style=dashed]; +"1114 /patch_embed/proj/Constant" -> "121 /layers/layers.0/blocks.1/attn/Gather_1" [label="[]", style=dashed]; +"1114 /patch_embed/proj/Constant" -> "179 /layers/layers.1/blocks.0/attn/Gather_1" [label="[]", style=dashed]; +"1114 /patch_embed/proj/Constant" -> "235 /layers/layers.1/blocks.1/attn/Gather_1" [label="[]", style=dashed]; +"1114 /patch_embed/proj/Constant" -> "335 /layers/layers.2/blocks.0/attn/Gather_1" [label="[]", style=dashed]; +"1114 /patch_embed/proj/Constant" -> "391 /layers/layers.2/blocks.2/attn/Gather_1" [label="[]", style=dashed]; +"1114 /patch_embed/proj/Constant" -> "429 /layers/layers.2/blocks.1/attn/Gather_1" [label="[]", style=dashed]; +"1114 /patch_embed/proj/Constant" -> "437 /layers/layers.2/blocks.4/attn/Gather_1" [label="[]", style=dashed]; +"1114 /patch_embed/proj/Constant" -> "473 /layers/layers.2/blocks.3/attn/Gather_1" [label="[]", style=dashed]; +"1114 /patch_embed/proj/Constant" -> "520 /layers/layers.2/blocks.5/attn/Gather_1" [label="[]", style=dashed]; +"1114 /patch_embed/proj/Constant" -> "593 /layers/layers.3/blocks.0/attn/Gather_1" [label="[]", style=dashed]; +"1114 /patch_embed/proj/Constant" -> "607 /layers/layers.3/blocks.1/attn/Gather_1" [label="[]", style=dashed]; +"1115 Constant_7192" -> "80 /layers/layers.0/blocks.0/attn/Mul" [label="[1, 1, 1, 1]", style=solid]; +"1116 Constant_266" -> "75 /layers/layers.0/blocks.0/attn/Gather" [label="[]", style=dashed]; +"1117 /layers/layers.0/blocks.0/Constant" -> "75 /layers/layers.0/blocks.0/attn/Gather" [label="[]", style=dashed]; +"1117 /layers/layers.0/blocks.0/Constant" -> "120 /layers/layers.0/blocks.1/attn/Gather" [label="[]", style=dashed]; +"1117 /layers/layers.0/blocks.0/Constant" -> "178 /layers/layers.1/blocks.0/attn/Gather" [label="[]", style=dashed]; +"1117 /layers/layers.0/blocks.0/Constant" -> "234 /layers/layers.1/blocks.1/attn/Gather" [label="[]", style=dashed]; +"1117 /layers/layers.0/blocks.0/Constant" -> "334 /layers/layers.2/blocks.0/attn/Gather" [label="[]", style=dashed]; +"1117 /layers/layers.0/blocks.0/Constant" -> "390 /layers/layers.2/blocks.2/attn/Gather" [label="[]", style=dashed]; +"1117 /layers/layers.0/blocks.0/Constant" -> "428 /layers/layers.2/blocks.1/attn/Gather" [label="[]", style=dashed]; +"1117 /layers/layers.0/blocks.0/Constant" -> "436 /layers/layers.2/blocks.4/attn/Gather" [label="[]", style=dashed]; +"1117 /layers/layers.0/blocks.0/Constant" -> "472 /layers/layers.2/blocks.3/attn/Gather" [label="[]", style=dashed]; +"1117 /layers/layers.0/blocks.0/Constant" -> "519 /layers/layers.2/blocks.5/attn/Gather" [label="[]", style=dashed]; +"1117 /layers/layers.0/blocks.0/Constant" -> "592 /layers/layers.3/blocks.0/attn/Gather" [label="[]", style=dashed]; +"1117 /layers/layers.0/blocks.0/Constant" -> "606 /layers/layers.3/blocks.1/attn/Gather" [label="[]", style=dashed]; +"1118 Constant_7193" -> "113 /layers/layers.0/blocks.0/attn/proj/Add" [label="[1, 1, 96]", style=solid]; +"1119 Constant_7196" -> "45 /layers/layers.0/blocks.0/mlp/fc1/Add" [label="[1, 1, 384]", style=solid]; +"1120 Constant_7197" -> "66 /layers/layers.0/blocks.0/mlp/fc2/Add" [label="[1, 1, 96]", style=solid]; +"1121 Constant_5227" -> "43 /layers/layers.0/blocks.1/Slice" [label="[2]", style=dashed]; +"1122 Constant_5224" -> "43 /layers/layers.0/blocks.1/Slice" [label="[2]", style=dashed]; +"1123 Constant_5221" -> "43 /layers/layers.0/blocks.1/Slice" [label="[2]", style=dashed]; +"1124 Constant_5251" -> "54 /layers/layers.0/blocks.1/Slice_2" [label="[3]", style=dashed]; +"1125 Constant_5248" -> "54 /layers/layers.0/blocks.1/Slice_2" [label="[3]", style=dashed]; +"1126 Constant_5245" -> "54 /layers/layers.0/blocks.1/Slice_2" [label="[3]", style=dashed]; +"1127 Constant_7200" -> "98 /layers/layers.0/blocks.1/attn/qkv/Add" [label="[1, 1, 288]", style=solid]; +"1128 /layers/layers.0/blocks.1/attn/Constant_3" -> "167 /layers/layers.0/blocks.1/attn/Reshape_2" [label="[4]", style=dashed]; +"1129 onnx^^Add_2300" -> "159 /layers/layers.0/blocks.1/attn/Add_1" [label="[1, 64, 1, 49, 49]", style=solid]; +"1130 /layers/layers.0/blocks.1/attn/Constant_2" -> "151 /layers/layers.0/blocks.1/attn/Reshape_1" [label="[5]", style=dashed]; +"1131 onnx^^Add_2292" -> "142 /layers/layers.0/blocks.1/attn/Add" [label="[1, 3, 49, 49]", style=solid]; +"1132 Constant_416" -> "121 /layers/layers.0/blocks.1/attn/Gather_1" [label="[]", style=dashed]; +"1133 Constant_7201" -> "133 /layers/layers.0/blocks.1/attn/Mul" [label="[1, 1, 1, 1]", style=solid]; +"1134 Constant_414" -> "120 /layers/layers.0/blocks.1/attn/Gather" [label="[]", style=dashed]; +"1135 Constant_7202" -> "174 /layers/layers.0/blocks.1/attn/proj/Add" [label="[1, 1, 96]", style=solid]; +"1136 Constant_5275" -> "207 /layers/layers.0/blocks.1/Slice_4" [label="[2]", style=dashed]; +"1137 Constant_5272" -> "207 /layers/layers.0/blocks.1/Slice_4" [label="[2]", style=dashed]; +"1138 Constant_5269" -> "207 /layers/layers.0/blocks.1/Slice_4" [label="[2]", style=dashed]; +"1139 Constant_5299" -> "225 /layers/layers.0/blocks.1/Slice_6" [label="[3]", style=dashed]; +"1140 Constant_5296" -> "225 /layers/layers.0/blocks.1/Slice_6" [label="[3]", style=dashed]; +"1141 Constant_5293" -> "225 /layers/layers.0/blocks.1/Slice_6" [label="[3]", style=dashed]; +"1142 Constant_7205" -> "53 /layers/layers.0/blocks.1/mlp/fc1/Add" [label="[1, 1, 384]", style=solid]; +"1143 Constant_7206" -> "73 /layers/layers.0/blocks.1/mlp/fc2/Add" [label="[1, 1, 96]", style=solid]; +"1144 Constant_5371" -> "39 /layers/layers.0/downsample/Slice_4" [label="[3]", style=dashed]; +"1145 Constant_5368" -> "39 /layers/layers.0/downsample/Slice_4" [label="[3]", style=dashed]; +"1146 Constant_5365" -> "39 /layers/layers.0/downsample/Slice_4" [label="[3]", style=dashed]; +"1147 Constant_5323" -> "32 /layers/layers.0/downsample/Slice" [label="[2]", style=dashed]; +"1148 Constant_5320" -> "32 /layers/layers.0/downsample/Slice" [label="[2]", style=dashed]; +"1149 Constant_5317" -> "32 /layers/layers.0/downsample/Slice" [label="[2]", style=dashed]; +"1150 Constant_5359" -> "40 /layers/layers.0/downsample/Slice_3" [label="[3]", style=dashed]; +"1151 Constant_5356" -> "40 /layers/layers.0/downsample/Slice_3" [label="[3]", style=dashed]; +"1152 Constant_5353" -> "40 /layers/layers.0/downsample/Slice_3" [label="[3]", style=dashed]; +"1153 Constant_5335" -> "38 /layers/layers.0/downsample/Slice_1" [label="[3]", style=dashed]; +"1154 Constant_5332" -> "38 /layers/layers.0/downsample/Slice_1" [label="[3]", style=dashed]; +"1155 Constant_5329" -> "38 /layers/layers.0/downsample/Slice_1" [label="[3]", style=dashed]; +"1156 Constant_7211" -> "158 /layers/layers.1/blocks.0/attn/qkv/Add" [label="[1, 1, 576]", style=solid]; +"1157 onnx^^Add_2364" -> "191 /layers/layers.1/blocks.0/attn/Add" [label="[1, 6, 49, 49]", style=solid]; +"1158 Constant_626" -> "179 /layers/layers.1/blocks.0/attn/Gather_1" [label="[]", style=dashed]; +"1159 Constant_7212" -> "184 /layers/layers.1/blocks.0/attn/Mul" [label="[1, 1, 1, 1]", style=solid]; +"1160 Constant_624" -> "178 /layers/layers.1/blocks.0/attn/Gather" [label="[]", style=dashed]; +"1161 Constant_7213" -> "224 /layers/layers.1/blocks.0/attn/proj/Add" [label="[1, 1, 192]", style=solid]; +"1162 Constant_7216" -> "131 /layers/layers.1/blocks.0/mlp/fc1/Add" [label="[1, 1, 768]", style=solid]; +"1163 Constant_7217" -> "165 /layers/layers.1/blocks.0/mlp/fc2/Add" [label="[1, 1, 192]", style=solid]; +"1164 Constant_5395" -> "129 /layers/layers.1/blocks.1/Slice" [label="[2]", style=dashed]; +"1165 Constant_5392" -> "129 /layers/layers.1/blocks.1/Slice" [label="[2]", style=dashed]; +"1166 Constant_5389" -> "129 /layers/layers.1/blocks.1/Slice" [label="[2]", style=dashed]; +"1167 Constant_5419" -> "147 /layers/layers.1/blocks.1/Slice_2" [label="[3]", style=dashed]; +"1168 Constant_5416" -> "147 /layers/layers.1/blocks.1/Slice_2" [label="[3]", style=dashed]; +"1169 Constant_5413" -> "147 /layers/layers.1/blocks.1/Slice_2" [label="[3]", style=dashed]; +"1170 Constant_7220" -> "205 /layers/layers.1/blocks.1/attn/qkv/Add" [label="[1, 1, 576]", style=solid]; +"1171 /layers/layers.1/blocks.1/attn/Constant_3" -> "306 /layers/layers.1/blocks.1/attn/Reshape_2" [label="[4]", style=dashed]; +"1172 onnx^^Add_2421" -> "290 /layers/layers.1/blocks.1/attn/Add_1" [label="[1, 16, 1, 49, 49]", style=solid]; +"1173 /layers/layers.1/blocks.1/attn/Constant_2" -> "276 /layers/layers.1/blocks.1/attn/Reshape_1" [label="[5]", style=dashed]; +"1174 onnx^^Add_2413" -> "262 /layers/layers.1/blocks.1/attn/Add" [label="[1, 6, 49, 49]", style=solid]; +"1175 Constant_774" -> "235 /layers/layers.1/blocks.1/attn/Gather_1" [label="[]", style=dashed]; +"1176 Constant_7221" -> "248 /layers/layers.1/blocks.1/attn/Mul" [label="[1, 1, 1, 1]", style=solid]; +"1177 Constant_772" -> "234 /layers/layers.1/blocks.1/attn/Gather" [label="[]", style=dashed]; +"1178 Constant_7222" -> "321 /layers/layers.1/blocks.1/attn/proj/Add" [label="[1, 1, 192]", style=solid]; +"1179 Constant_5443" -> "395 /layers/layers.1/blocks.1/Slice_4" [label="[2]", style=dashed]; +"1180 Constant_5440" -> "395 /layers/layers.1/blocks.1/Slice_4" [label="[2]", style=dashed]; +"1181 Constant_5437" -> "395 /layers/layers.1/blocks.1/Slice_4" [label="[2]", style=dashed]; +"1182 Constant_5467" -> "419 /layers/layers.1/blocks.1/Slice_6" [label="[3]", style=dashed]; +"1183 Constant_5464" -> "419 /layers/layers.1/blocks.1/Slice_6" [label="[3]", style=dashed]; +"1184 Constant_5461" -> "419 /layers/layers.1/blocks.1/Slice_6" [label="[3]", style=dashed]; +"1185 Constant_7225" -> "146 /layers/layers.1/blocks.1/mlp/fc1/Add" [label="[1, 1, 768]", style=solid]; +"1186 Constant_7226" -> "176 /layers/layers.1/blocks.1/mlp/fc2/Add" [label="[1, 1, 192]", style=solid]; +"1187 Constant_5539" -> "125 /layers/layers.1/downsample/Slice_4" [label="[3]", style=dashed]; +"1188 Constant_5536" -> "125 /layers/layers.1/downsample/Slice_4" [label="[3]", style=dashed]; +"1189 Constant_5533" -> "125 /layers/layers.1/downsample/Slice_4" [label="[3]", style=dashed]; +"1190 Constant_5491" -> "114 /layers/layers.1/downsample/Slice" [label="[2]", style=dashed]; +"1191 Constant_5488" -> "114 /layers/layers.1/downsample/Slice" [label="[2]", style=dashed]; +"1192 Constant_5485" -> "114 /layers/layers.1/downsample/Slice" [label="[2]", style=dashed]; +"1193 Constant_5527" -> "126 /layers/layers.1/downsample/Slice_3" [label="[3]", style=dashed]; +"1194 Constant_5524" -> "126 /layers/layers.1/downsample/Slice_3" [label="[3]", style=dashed]; +"1195 Constant_5521" -> "126 /layers/layers.1/downsample/Slice_3" [label="[3]", style=dashed]; +"1196 Constant_5503" -> "124 /layers/layers.1/downsample/Slice_1" [label="[3]", style=dashed]; +"1197 Constant_5500" -> "124 /layers/layers.1/downsample/Slice_1" [label="[3]", style=dashed]; +"1198 Constant_5497" -> "124 /layers/layers.1/downsample/Slice_1" [label="[3]", style=dashed]; +"1199 Constant_7231" -> "289 /layers/layers.2/blocks.0/attn/qkv/Add" [label="[1, 1, 1152]", style=solid]; +"1200 onnx^^Add_2485" -> "367 /layers/layers.2/blocks.0/attn/Add" [label="[1, 12, 49, 49]", style=solid]; +"1201 Constant_984" -> "335 /layers/layers.2/blocks.0/attn/Gather_1" [label="[]", style=dashed]; +"1202 Constant_7232" -> "349 /layers/layers.2/blocks.0/attn/Mul" [label="[1, 1, 1, 1]", style=solid]; +"1203 Constant_982" -> "334 /layers/layers.2/blocks.0/attn/Gather" [label="[]", style=dashed]; +"1204 Constant_7233" -> "418 /layers/layers.2/blocks.0/attn/proj/Add" [label="[1, 1, 384]", style=solid]; +"1205 Constant_7236" -> "246 /layers/layers.2/blocks.0/mlp/fc1/Add" [label="[1, 1, 1536]", style=solid]; +"1206 Constant_7237" -> "304 /layers/layers.2/blocks.0/mlp/fc2/Add" [label="[1, 1, 384]", style=solid]; +"1207 Constant_5563" -> "244 /layers/layers.2/blocks.1/Slice" [label="[2]", style=dashed]; +"1208 Constant_5560" -> "244 /layers/layers.2/blocks.1/Slice" [label="[2]", style=dashed]; +"1209 Constant_5557" -> "244 /layers/layers.2/blocks.1/Slice" [label="[2]", style=dashed]; +"1210 Constant_5587" -> "272 /layers/layers.2/blocks.1/Slice_2" [label="[3]", style=dashed]; +"1211 Constant_5584" -> "272 /layers/layers.2/blocks.1/Slice_2" [label="[3]", style=dashed]; +"1212 Constant_5581" -> "272 /layers/layers.2/blocks.1/Slice_2" [label="[3]", style=dashed]; +"1213 Constant_7240" -> "393 /layers/layers.2/blocks.1/attn/qkv/Add" [label="[1, 1, 1152]", style=solid]; +"1214 /layers/layers.2/blocks.1/attn/Constant_3" -> "489 /layers/layers.2/blocks.1/attn/Reshape_2" [label="[4]", style=dashed]; +"1215 onnx^^Add_2653" -> "476 /layers/layers.2/blocks.1/attn/Add_1" [label="[1, 4, 1, 49, 49]", style=solid]; +"1215 onnx^^Add_2653" -> "523 /layers/layers.2/blocks.3/attn/Add_1" [label="[1, 4, 1, 49, 49]", style=solid]; +"1215 onnx^^Add_2653" -> "566 /layers/layers.2/blocks.5/attn/Add_1" [label="[1, 4, 1, 49, 49]", style=solid]; +"1216 /layers/layers.2/blocks.1/attn/Constant_2" -> "463 /layers/layers.2/blocks.1/attn/Reshape_1" [label="[5]", style=dashed]; +"1217 onnx^^Add_2534" -> "453 /layers/layers.2/blocks.1/attn/Add" [label="[1, 12, 49, 49]", style=solid]; +"1218 Constant_1132" -> "429 /layers/layers.2/blocks.1/attn/Gather_1" [label="[]", style=dashed]; +"1219 Constant_7241" -> "441 /layers/layers.2/blocks.1/attn/Mul" [label="[1, 1, 1, 1]", style=solid]; +"1220 Constant_1130" -> "428 /layers/layers.2/blocks.1/attn/Gather" [label="[]", style=dashed]; +"1221 Constant_7242" -> "502 /layers/layers.2/blocks.1/attn/proj/Add" [label="[1, 1, 384]", style=solid]; +"1222 Constant_5611" -> "559 /layers/layers.2/blocks.1/Slice_4" [label="[2]", style=dashed]; +"1223 Constant_5608" -> "559 /layers/layers.2/blocks.1/Slice_4" [label="[2]", style=dashed]; +"1224 Constant_5605" -> "559 /layers/layers.2/blocks.1/Slice_4" [label="[2]", style=dashed]; +"1225 Constant_5635" -> "579 /layers/layers.2/blocks.1/Slice_6" [label="[3]", style=dashed]; +"1226 Constant_5632" -> "579 /layers/layers.2/blocks.1/Slice_6" [label="[3]", style=dashed]; +"1227 Constant_5629" -> "579 /layers/layers.2/blocks.1/Slice_6" [label="[3]", style=dashed]; +"1228 Constant_7245" -> "271 /layers/layers.2/blocks.1/mlp/fc1/Add" [label="[1, 1, 1536]", style=solid]; +"1229 Constant_7246" -> "332 /layers/layers.2/blocks.1/mlp/fc2/Add" [label="[1, 1, 384]", style=solid]; +"1230 Constant_7249" -> "347 /layers/layers.2/blocks.2/attn/qkv/Add" [label="[1, 1, 1152]", style=solid]; +"1231 onnx^^Add_2596" -> "415 /layers/layers.2/blocks.2/attn/Add" [label="[1, 12, 49, 49]", style=solid]; +"1232 Constant_1289" -> "391 /layers/layers.2/blocks.2/attn/Gather_1" [label="[]", style=dashed]; +"1233 Constant_7250" -> "403 /layers/layers.2/blocks.2/attn/Mul" [label="[1, 1, 1, 1]", style=solid]; +"1234 Constant_1287" -> "390 /layers/layers.2/blocks.2/attn/Gather" [label="[]", style=dashed]; +"1235 Constant_7251" -> "462 /layers/layers.2/blocks.2/attn/proj/Add" [label="[1, 1, 384]", style=solid]; +"1236 Constant_7254" -> "300 /layers/layers.2/blocks.2/mlp/fc1/Add" [label="[1, 1, 1536]", style=solid]; +"1237 Constant_7255" -> "364 /layers/layers.2/blocks.2/mlp/fc2/Add" [label="[1, 1, 384]", style=solid]; +"1238 Constant_5659" -> "298 /layers/layers.2/blocks.3/Slice" [label="[2]", style=dashed]; +"1239 Constant_5656" -> "298 /layers/layers.2/blocks.3/Slice" [label="[2]", style=dashed]; +"1240 Constant_5653" -> "298 /layers/layers.2/blocks.3/Slice" [label="[2]", style=dashed]; +"1241 Constant_5683" -> "328 /layers/layers.2/blocks.3/Slice_2" [label="[3]", style=dashed]; +"1242 Constant_5680" -> "328 /layers/layers.2/blocks.3/Slice_2" [label="[3]", style=dashed]; +"1243 Constant_5677" -> "328 /layers/layers.2/blocks.3/Slice_2" [label="[3]", style=dashed]; +"1244 Constant_7258" -> "439 /layers/layers.2/blocks.3/attn/qkv/Add" [label="[1, 1, 1152]", style=solid]; +"1245 /layers/layers.2/blocks.3/attn/Constant_3" -> "536 /layers/layers.2/blocks.3/attn/Reshape_2" [label="[4]", style=dashed]; +"1246 /layers/layers.2/blocks.3/attn/Constant_2" -> "510 /layers/layers.2/blocks.3/attn/Reshape_1" [label="[5]", style=dashed]; +"1247 onnx^^Add_2645" -> "498 /layers/layers.2/blocks.3/attn/Add" [label="[1, 12, 49, 49]", style=solid]; +"1248 Constant_1437" -> "473 /layers/layers.2/blocks.3/attn/Gather_1" [label="[]", style=dashed]; +"1249 Constant_7259" -> "485 /layers/layers.2/blocks.3/attn/Mul" [label="[1, 1, 1, 1]", style=solid]; +"1250 Constant_1435" -> "472 /layers/layers.2/blocks.3/attn/Gather" [label="[]", style=dashed]; +"1251 Constant_7260" -> "548 /layers/layers.2/blocks.3/attn/proj/Add" [label="[1, 1, 384]", style=solid]; +"1252 Constant_5707" -> "596 /layers/layers.2/blocks.3/Slice_4" [label="[2]", style=dashed]; +"1253 Constant_5704" -> "596 /layers/layers.2/blocks.3/Slice_4" [label="[2]", style=dashed]; +"1254 Constant_5701" -> "596 /layers/layers.2/blocks.3/Slice_4" [label="[2]", style=dashed]; +"1255 Constant_5731" -> "612 /layers/layers.2/blocks.3/Slice_6" [label="[3]", style=dashed]; +"1256 Constant_5728" -> "612 /layers/layers.2/blocks.3/Slice_6" [label="[3]", style=dashed]; +"1257 Constant_5725" -> "612 /layers/layers.2/blocks.3/Slice_6" [label="[3]", style=dashed]; +"1258 Constant_7263" -> "327 /layers/layers.2/blocks.3/mlp/fc1/Add" [label="[1, 1, 1536]", style=solid]; +"1259 Constant_7264" -> "388 /layers/layers.2/blocks.3/mlp/fc2/Add" [label="[1, 1, 384]", style=solid]; +"1260 Constant_7267" -> "401 /layers/layers.2/blocks.4/attn/qkv/Add" [label="[1, 1, 1152]", style=solid]; +"1261 onnx^^Add_2707" -> "459 /layers/layers.2/blocks.4/attn/Add" [label="[1, 12, 49, 49]", style=solid]; +"1262 Constant_1594" -> "437 /layers/layers.2/blocks.4/attn/Gather_1" [label="[]", style=dashed]; +"1263 Constant_7268" -> "448 /layers/layers.2/blocks.4/attn/Mul" [label="[1, 1, 1, 1]", style=solid]; +"1264 Constant_1592" -> "436 /layers/layers.2/blocks.4/attn/Gather" [label="[]", style=dashed]; +"1265 Constant_7269" -> "509 /layers/layers.2/blocks.4/attn/proj/Add" [label="[1, 1, 384]", style=solid]; +"1266 Constant_7272" -> "360 /layers/layers.2/blocks.4/mlp/fc1/Add" [label="[1, 1, 1536]", style=solid]; +"1267 Constant_7273" -> "412 /layers/layers.2/blocks.4/mlp/fc2/Add" [label="[1, 1, 384]", style=solid]; +"1268 Constant_5755" -> "358 /layers/layers.2/blocks.5/Slice" [label="[2]", style=dashed]; +"1269 Constant_5752" -> "358 /layers/layers.2/blocks.5/Slice" [label="[2]", style=dashed]; +"1270 Constant_5749" -> "358 /layers/layers.2/blocks.5/Slice" [label="[2]", style=dashed]; +"1271 Constant_5779" -> "384 /layers/layers.2/blocks.5/Slice_2" [label="[3]", style=dashed]; +"1272 Constant_5776" -> "384 /layers/layers.2/blocks.5/Slice_2" [label="[3]", style=dashed]; +"1273 Constant_5773" -> "384 /layers/layers.2/blocks.5/Slice_2" [label="[3]", style=dashed]; +"1274 Constant_7276" -> "483 /layers/layers.2/blocks.5/attn/qkv/Add" [label="[1, 1, 1152]", style=solid]; +"1275 /layers/layers.2/blocks.5/attn/Constant_3" -> "576 /layers/layers.2/blocks.5/attn/Reshape_2" [label="[4]", style=dashed]; +"1276 /layers/layers.2/blocks.5/attn/Constant_2" -> "555 /layers/layers.2/blocks.5/attn/Reshape_1" [label="[5]", style=dashed]; +"1277 onnx^^Add_2756" -> "544 /layers/layers.2/blocks.5/attn/Add" [label="[1, 12, 49, 49]", style=solid]; +"1278 Constant_1742" -> "520 /layers/layers.2/blocks.5/attn/Gather_1" [label="[]", style=dashed]; +"1279 Constant_7277" -> "532 /layers/layers.2/blocks.5/attn/Mul" [label="[1, 1, 1, 1]", style=solid]; +"1280 Constant_1740" -> "519 /layers/layers.2/blocks.5/attn/Gather" [label="[]", style=dashed]; +"1281 Constant_7278" -> "586 /layers/layers.2/blocks.5/attn/proj/Add" [label="[1, 1, 384]", style=solid]; +"1282 Constant_5803" -> "624 /layers/layers.2/blocks.5/Slice_4" [label="[2]", style=dashed]; +"1283 Constant_5800" -> "624 /layers/layers.2/blocks.5/Slice_4" [label="[2]", style=dashed]; +"1284 Constant_5797" -> "624 /layers/layers.2/blocks.5/Slice_4" [label="[2]", style=dashed]; +"1285 Constant_5827" -> "633 /layers/layers.2/blocks.5/Slice_6" [label="[3]", style=dashed]; +"1286 Constant_5824" -> "633 /layers/layers.2/blocks.5/Slice_6" [label="[3]", style=dashed]; +"1287 Constant_5821" -> "633 /layers/layers.2/blocks.5/Slice_6" [label="[3]", style=dashed]; +"1288 Constant_7281" -> "383 /layers/layers.2/blocks.5/mlp/fc1/Add" [label="[1, 1, 1536]", style=solid]; +"1289 Constant_7282" -> "434 /layers/layers.2/blocks.5/mlp/fc2/Add" [label="[1, 1, 384]", style=solid]; +"1290 Constant_5899" -> "354 /layers/layers.2/downsample/Slice_4" [label="[3]", style=dashed]; +"1291 Constant_5896" -> "354 /layers/layers.2/downsample/Slice_4" [label="[3]", style=dashed]; +"1292 Constant_5893" -> "354 /layers/layers.2/downsample/Slice_4" [label="[3]", style=dashed]; +"1293 Constant_5851" -> "338 /layers/layers.2/downsample/Slice" [label="[2]", style=dashed]; +"1294 Constant_5848" -> "338 /layers/layers.2/downsample/Slice" [label="[2]", style=dashed]; +"1295 Constant_5845" -> "338 /layers/layers.2/downsample/Slice" [label="[2]", style=dashed]; +"1296 Constant_5887" -> "355 /layers/layers.2/downsample/Slice_3" [label="[3]", style=dashed]; +"1297 Constant_5884" -> "355 /layers/layers.2/downsample/Slice_3" [label="[3]", style=dashed]; +"1298 Constant_5881" -> "355 /layers/layers.2/downsample/Slice_3" [label="[3]", style=dashed]; +"1299 Constant_5863" -> "353 /layers/layers.2/downsample/Slice_1" [label="[3]", style=dashed]; +"1300 Constant_5860" -> "353 /layers/layers.2/downsample/Slice_1" [label="[3]", style=dashed]; +"1301 Constant_5857" -> "353 /layers/layers.2/downsample/Slice_1" [label="[3]", style=dashed]; +"1302 Constant_7287" -> "565 /layers/layers.3/blocks.0/attn/qkv/Add" [label="[1, 1, 2304]", style=solid]; +"1303 onnx^^Add_2828" -> "609 /layers/layers.3/blocks.0/attn/Add" [label="[1, 24, 49, 49]", style=solid]; +"1304 Constant_1952" -> "593 /layers/layers.3/blocks.0/attn/Gather_1" [label="[]", style=dashed]; +"1305 Constant_7288" -> "601 /layers/layers.3/blocks.0/attn/Mul" [label="[1, 1, 1, 1]", style=solid]; +"1306 Constant_1950" -> "592 /layers/layers.3/blocks.0/attn/Gather" [label="[]", style=dashed]; +"1307 Constant_7289" -> "632 /layers/layers.3/blocks.0/attn/proj/Add" [label="[1, 1, 768]", style=solid]; +"1308 Constant_7292" -> "530 /layers/layers.3/blocks.0/mlp/fc1/Add" [label="[1, 1, 3072]", style=solid]; +"1309 Constant_7293" -> "574 /layers/layers.3/blocks.0/mlp/fc2/Add" [label="[1, 1, 768]", style=solid]; +"1310 Constant_7296" -> "583 /layers/layers.3/blocks.1/attn/qkv/Add" [label="[1, 1, 2304]", style=solid]; +"1311 onnx^^Add_2877" -> "621 /layers/layers.3/blocks.1/attn/Add" [label="[1, 24, 49, 49]", style=solid]; +"1312 Constant_2058" -> "607 /layers/layers.3/blocks.1/attn/Gather_1" [label="[]", style=dashed]; +"1313 Constant_7297" -> "614 /layers/layers.3/blocks.1/attn/Mul" [label="[1, 1, 1, 1]", style=solid]; +"1314 Constant_2056" -> "606 /layers/layers.3/blocks.1/attn/Gather" [label="[]", style=dashed]; +"1315 Constant_7298" -> "638 /layers/layers.3/blocks.1/attn/proj/Add" [label="[1, 1, 768]", style=solid]; +"1316 Constant_7301" -> "551 /layers/layers.3/blocks.1/mlp/fc1/Add" [label="[1, 1, 3072]", style=solid]; +"1317 Constant_7302" -> "590 /layers/layers.3/blocks.1/mlp/fc2/Add" [label="[1, 1, 768]", style=solid]; +} diff --git a/tests/openvino/native/data/reference_graphs/quantized/yolo-v4-tiny-tf.dot b/tests/openvino/native/data/reference_graphs/quantized/yolo-v4-tiny-tf.dot deleted file mode 100644 index 54be0e529c9..00000000000 --- a/tests/openvino/native/data/reference_graphs/quantized/yolo-v4-tiny-tf.dot +++ /dev/null @@ -1,749 +0,0 @@ -strict digraph { -"0 image_input" [id=0, type=Parameter]; -"1 image_input/fq_output_0" [id=1, type=FakeQuantize]; -"2 Divide_2373" [id=2, type=Transpose]; -"3 Multiply_3580" [id=3, type=Convolution]; -"4 Transpose_1182" [id=4, type=Add]; -"5 Transpose_1188" [id=5, type=PRelu]; -"6 Transpose_1188/fq_output_0" [id=6, type=FakeQuantize]; -"7 Multiply_3594" [id=7, type=Convolution]; -"8 Transpose_1237" [id=8, type=Add]; -"9 Transpose_1243" [id=9, type=PRelu]; -"10 Transpose_1243/fq_output_0" [id=10, type=FakeQuantize]; -"11 Multiply_3608" [id=11, type=Convolution]; -"12 Transpose_1267" [id=12, type=Add]; -"13 Transpose_1273" [id=13, type=PRelu]; -"14 Transpose_1273/fq_output_0" [id=14, type=FakeQuantize]; -"15 Transpose_1376" [id=15, type=Concat]; -"16 group_route_3/split" [id=16, type=Split]; -"17 MaxPool_303" [id=17, type=MaxPool]; -"18 Multiply_3622" [id=18, type=Convolution]; -"19 Multiply_3664" [id=19, type=Convolution]; -"20 Transpose_1302" [id=20, type=Add]; -"21 Transpose_1400" [id=21, type=Add]; -"22 Transpose_1308" [id=22, type=PRelu]; -"23 Transpose_1406" [id=23, type=PRelu]; -"24 Transpose_1308/fq_output_0" [id=24, type=FakeQuantize]; -"25 Transpose_1406/fq_output_0" [id=25, type=FakeQuantize]; -"26 Multiply_3636" [id=26, type=Convolution]; -"27 Transpose_1342" [id=27, type=Concat]; -"28 Transpose_1509" [id=28, type=Concat]; -"29 group_route_11/split" [id=29, type=Split]; -"30 Transpose_1332" [id=30, type=Add]; -"31 Multiply_3650" [id=31, type=Convolution]; -"32 MaxPool_429" [id=32, type=MaxPool]; -"33 Multiply_3678" [id=33, type=Convolution]; -"34 Transpose_1338" [id=34, type=PRelu]; -"35 Transpose_1366" [id=35, type=Add]; -"36 Multiply_3720" [id=36, type=Convolution]; -"37 Transpose_1435" [id=37, type=Add]; -"38 Transpose_1338/fq_output_0" [id=38, type=FakeQuantize]; -"39 Transpose_1372" [id=39, type=PRelu]; -"40 Transpose_1533" [id=40, type=Add]; -"41 Transpose_1441" [id=41, type=PRelu]; -"42 Transpose_1372/fq_output_0" [id=42, type=FakeQuantize]; -"43 Transpose_1539" [id=43, type=PRelu]; -"44 Transpose_1441/fq_output_0" [id=44, type=FakeQuantize]; -"45 Transpose_1539/fq_output_0" [id=45, type=FakeQuantize]; -"46 Multiply_3692" [id=46, type=Convolution]; -"47 Transpose_1475" [id=47, type=Concat]; -"48 Transpose_1642" [id=48, type=Concat]; -"49 group_route_19/split" [id=49, type=Split]; -"50 Transpose_1465" [id=50, type=Add]; -"51 Multiply_3706" [id=51, type=Convolution]; -"52 MaxPool_575" [id=52, type=MaxPool]; -"53 Multiply_3734" [id=53, type=Convolution]; -"54 Transpose_1471" [id=54, type=PRelu]; -"55 Transpose_1499" [id=55, type=Add]; -"56 Multiply_3776" [id=56, type=Convolution]; -"57 Transpose_1568" [id=57, type=Add]; -"58 Transpose_1471/fq_output_0" [id=58, type=FakeQuantize]; -"59 Transpose_1505" [id=59, type=PRelu]; -"60 Transpose_1666" [id=60, type=Add]; -"61 Transpose_1574" [id=61, type=PRelu]; -"62 Transpose_1505/fq_output_0" [id=62, type=FakeQuantize]; -"63 Transpose_1672" [id=63, type=PRelu]; -"64 Transpose_1574/fq_output_0" [id=64, type=FakeQuantize]; -"65 Transpose_1672/fq_output_0" [id=65, type=FakeQuantize]; -"66 Multiply_3748" [id=66, type=Convolution]; -"67 Transpose_1608" [id=67, type=Concat]; -"68 Multiply_3790" [id=68, type=Convolution]; -"69 Transpose_1598" [id=69, type=Add]; -"70 Multiply_3762" [id=70, type=Convolution]; -"71 Transpose_1696" [id=71, type=Add]; -"72 Transpose_1604" [id=72, type=PRelu]; -"73 Transpose_1632" [id=73, type=Add]; -"74 Transpose_1702" [id=74, type=PRelu]; -"75 Transpose_1604/fq_output_0" [id=75, type=FakeQuantize]; -"76 Transpose_1638" [id=76, type=PRelu]; -"77 Transpose_1702/fq_output_0" [id=77, type=FakeQuantize]; -"78 Transpose_1638/fq_output_0" [id=78, type=FakeQuantize]; -"79 Multiply_3804" [id=79, type=Convolution]; -"80 Multiply_3832" [id=80, type=Convolution]; -"81 Transpose_1744" [id=81, type=Concat]; -"82 Transpose_1726" [id=82, type=Add]; -"83 Transpose_1804" [id=83, type=Add]; -"84 Multiply_3818" [id=84, type=Convolution]; -"85 Transpose_1732" [id=85, type=PRelu]; -"86 Transpose_1810" [id=86, type=PRelu]; -"87 Transpose_1768" [id=87, type=Add]; -"88 Transpose_1732/fq_output_0" [id=88, type=FakeQuantize]; -"89 Transpose_1810/fq_output_0" [id=89, type=FakeQuantize]; -"90 Transpose_1774" [id=90, type=PRelu]; -"91 Transpose_1740" [id=91, type=Interpolate]; -"92 leaky_re_lu_17/LeakyRelu" [id=92, type=Transpose]; -"93 Convolution_754" [id=93, type=Convolution]; -"94 Transpose_1774/fq_output_0" [id=94, type=FakeQuantize]; -"95 Transpose_1740/fq_output_0" [id=95, type=FakeQuantize]; -"96 ShapeOf_665" [id=96, type=ShapeOf]; -"97 Transpose_1816" [id=97, type=Add]; -"98 Convolution_711" [id=98, type=Convolution]; -"99 Slice_670" [id=99, type=StridedSlice]; -"100 conv2d_17/BiasAdd" [id=100, type=Transpose]; -"101 Transpose_1780" [id=101, type=Add]; -"102 Convert_671" [id=102, type=Convert]; -"103 conv2d_17/BiasAdd^0" [id=103, label="103 conv2d_17/BiasAdd:0", type=Result]; -"104 conv2d_20/BiasAdd" [id=104, type=Transpose]; -"105 Divide_673" [id=105, type=Divide]; -"106 conv2d_20/BiasAdd^0" [id=106, label="106 conv2d_20/BiasAdd:0", type=Result]; -"107 Constant_1779" [id=107, type=Constant]; -"108 Transpose_1778" [id=108, type=Constant]; -"109 Convolution_711/fq_weights_1" [id=109, type=FakeQuantize]; -"110 Constant_127163" [id=110, type=Constant]; -"111 Constant_127162" [id=111, type=Constant]; -"112 Constant_127161" [id=112, type=Constant]; -"113 Constant_127160" [id=113, type=Constant]; -"114 Transpose_710" [id=114, type=Constant]; -"115 Constant_127158" [id=115, type=Constant]; -"116 Constant_127157" [id=116, type=Constant]; -"117 Constant_127156" [id=117, type=Constant]; -"118 Constant_127155" [id=118, type=Constant]; -"119 Transpose_1772" [id=119, type=Constant]; -"120 Constant_3826" [id=120, type=Constant]; -"121 Multiply_3818/fq_weights_1" [id=121, type=FakeQuantize]; -"122 Constant_127153" [id=122, type=Constant]; -"123 Constant_127152" [id=123, type=Constant]; -"124 Constant_127151" [id=124, type=Constant]; -"125 Constant_127150" [id=125, type=Constant]; -"126 Multiply_3951" [id=126, type=Constant]; -"127 Constant_127023" [id=127, type=Constant]; -"128 Constant_127022" [id=128, type=Constant]; -"129 Constant_127021" [id=129, type=Constant]; -"130 Constant_127020" [id=130, type=Constant]; -"131 Transpose_1636" [id=131, type=Constant]; -"132 Constant_3770" [id=132, type=Constant]; -"133 Multiply_3762/fq_weights_1" [id=133, type=FakeQuantize]; -"134 Constant_127118" [id=134, type=Constant]; -"135 Constant_127117" [id=135, type=Constant]; -"136 Constant_127116" [id=136, type=Constant]; -"137 Constant_127115" [id=137, type=Constant]; -"138 Multiply_3927" [id=138, type=Constant]; -"139 Constant_126993" [id=139, type=Constant]; -"140 Constant_126992" [id=140, type=Constant]; -"141 Constant_126991" [id=141, type=Constant]; -"142 Constant_126990" [id=142, type=Constant]; -"143 Transpose_1572" [id=143, type=Constant]; -"144 Constant_3742" [id=144, type=Constant]; -"145 Multiply_3734/fq_weights_1" [id=145, type=FakeQuantize]; -"146 Constant_127108" [id=146, type=Constant]; -"147 Constant_127107" [id=147, type=Constant]; -"148 Constant_127106" [id=148, type=Constant]; -"149 Constant_127105" [id=149, type=Constant]; -"150 Multiply_3915" [id=150, type=Constant]; -"151 Constant_1540" [id=151, type=Constant]; -"152 Constant_127028" [id=152, type=Constant]; -"153 Constant_127027" [id=153, type=Constant]; -"154 Constant_127026" [id=154, type=Constant]; -"155 Constant_127025" [id=155, type=Constant]; -"156 Transpose_1537" [id=156, type=Constant]; -"157 Constant_3728" [id=157, type=Constant]; -"158 Multiply_3720/fq_weights_1" [id=158, type=FakeQuantize]; -"159 Constant_127103" [id=159, type=Constant]; -"160 Constant_127102" [id=160, type=Constant]; -"161 Constant_127101" [id=161, type=Constant]; -"162 Constant_127100" [id=162, type=Constant]; -"163 Multiply_3909" [id=163, type=Constant]; -"164 Constant_127018" [id=164, type=Constant]; -"165 Constant_127017" [id=165, type=Constant]; -"166 Constant_127016" [id=166, type=Constant]; -"167 Constant_127015" [id=167, type=Constant]; -"168 Transpose_1503" [id=168, type=Constant]; -"169 Constant_3714" [id=169, type=Constant]; -"170 Multiply_3706/fq_weights_1" [id=170, type=FakeQuantize]; -"171 Constant_127098" [id=171, type=Constant]; -"172 Constant_127097" [id=172, type=Constant]; -"173 Constant_127096" [id=173, type=Constant]; -"174 Constant_127095" [id=174, type=Constant]; -"175 Multiply_3903" [id=175, type=Constant]; -"176 Constant_126988" [id=176, type=Constant]; -"177 Constant_126987" [id=177, type=Constant]; -"178 Constant_126986" [id=178, type=Constant]; -"179 Constant_126985" [id=179, type=Constant]; -"180 Transpose_1439" [id=180, type=Constant]; -"181 Constant_3686" [id=181, type=Constant]; -"182 Multiply_3678/fq_weights_1" [id=182, type=FakeQuantize]; -"183 Constant_127088" [id=183, type=Constant]; -"184 Constant_127087" [id=184, type=Constant]; -"185 Constant_127086" [id=185, type=Constant]; -"186 Constant_127085" [id=186, type=Constant]; -"187 Multiply_3891" [id=187, type=Constant]; -"188 Constant_1407" [id=188, type=Constant]; -"189 Constant_127013" [id=189, type=Constant]; -"190 Constant_127012" [id=190, type=Constant]; -"191 Constant_127011" [id=191, type=Constant]; -"192 Constant_127010" [id=192, type=Constant]; -"193 Transpose_1404" [id=193, type=Constant]; -"194 Constant_3672" [id=194, type=Constant]; -"195 Multiply_3664/fq_weights_1" [id=195, type=FakeQuantize]; -"196 Constant_127083" [id=196, type=Constant]; -"197 Constant_127082" [id=197, type=Constant]; -"198 Constant_127081" [id=198, type=Constant]; -"199 Constant_127080" [id=199, type=Constant]; -"200 Multiply_3885" [id=200, type=Constant]; -"201 Constant_127008" [id=201, type=Constant]; -"202 Constant_127007" [id=202, type=Constant]; -"203 Constant_127006" [id=203, type=Constant]; -"204 Constant_127005" [id=204, type=Constant]; -"205 Transpose_1370" [id=205, type=Constant]; -"206 Constant_3658" [id=206, type=Constant]; -"207 Multiply_3650/fq_weights_1" [id=207, type=FakeQuantize]; -"208 Constant_127078" [id=208, type=Constant]; -"209 Constant_127077" [id=209, type=Constant]; -"210 Constant_127076" [id=210, type=Constant]; -"211 Constant_127075" [id=211, type=Constant]; -"212 Multiply_3879" [id=212, type=Constant]; -"213 Constant_126973" [id=213, type=Constant]; -"214 Constant_126972" [id=214, type=Constant]; -"215 Constant_126971" [id=215, type=Constant]; -"216 Constant_126970" [id=216, type=Constant]; -"217 Transpose_1306" [id=217, type=Constant]; -"218 Constant_3630" [id=218, type=Constant]; -"219 Multiply_3622/fq_weights_1" [id=219, type=FakeQuantize]; -"220 Constant_127068" [id=220, type=Constant]; -"221 Constant_127067" [id=221, type=Constant]; -"222 Constant_127066" [id=222, type=Constant]; -"223 Constant_127065" [id=223, type=Constant]; -"224 Multiply_3867" [id=224, type=Constant]; -"225 Constant_1274" [id=225, type=Constant]; -"226 Constant_127003" [id=226, type=Constant]; -"227 Constant_127002" [id=227, type=Constant]; -"228 Constant_127001" [id=228, type=Constant]; -"229 Constant_127000" [id=229, type=Constant]; -"230 Transpose_1271" [id=230, type=Constant]; -"231 Constant_3616" [id=231, type=Constant]; -"232 Multiply_3608/fq_weights_1" [id=232, type=FakeQuantize]; -"233 Constant_127063" [id=233, type=Constant]; -"234 Constant_127062" [id=234, type=Constant]; -"235 Constant_127061" [id=235, type=Constant]; -"236 Constant_127060" [id=236, type=Constant]; -"237 Multiply_3861" [id=237, type=Constant]; -"238 Constant_127058" [id=238, type=Constant]; -"239 Constant_127057" [id=239, type=Constant]; -"240 Constant_127056" [id=240, type=Constant]; -"241 Constant_127055" [id=241, type=Constant]; -"242 Transpose_1241" [id=242, type=Constant]; -"243 Constant_3602" [id=243, type=Constant]; -"244 Multiply_3594/fq_weights_1" [id=244, type=FakeQuantize]; -"245 Constant_127053" [id=245, type=Constant]; -"246 Constant_127052" [id=246, type=Constant]; -"247 Constant_127051" [id=247, type=Constant]; -"248 Constant_127050" [id=248, type=Constant]; -"249 Multiply_3855" [id=249, type=Constant]; -"250 Constant_127048" [id=250, type=Constant]; -"251 Constant_127047" [id=251, type=Constant]; -"252 Constant_127046" [id=252, type=Constant]; -"253 Constant_127045" [id=253, type=Constant]; -"254 Transpose_1186" [id=254, type=Constant]; -"255 Constant_3588" [id=255, type=Constant]; -"256 Multiply_3580/fq_weights_1" [id=256, type=FakeQuantize]; -"257 Constant_127043" [id=257, type=Constant]; -"258 Constant_127042" [id=258, type=Constant]; -"259 Constant_127041" [id=259, type=Constant]; -"260 Constant_127040" [id=260, type=Constant]; -"261 Gather_4127" [id=261, type=Constant]; -"262 Constant_2343" [id=262, type=Constant]; -"263 Constant_127038" [id=263, type=Constant]; -"264 Constant_127037" [id=264, type=Constant]; -"265 Constant_127036" [id=265, type=Constant]; -"266 Constant_127035" [id=266, type=Constant]; -"267 Constant_126978" [id=267, type=Constant]; -"268 Constant_126977" [id=268, type=Constant]; -"269 Constant_126976" [id=269, type=Constant]; -"270 Constant_126975" [id=270, type=Constant]; -"271 Transpose_1336" [id=271, type=Constant]; -"272 Constant_3644" [id=272, type=Constant]; -"273 Multiply_3636/fq_weights_1" [id=273, type=FakeQuantize]; -"274 Constant_127073" [id=274, type=Constant]; -"275 Constant_127072" [id=275, type=Constant]; -"276 Constant_127071" [id=276, type=Constant]; -"277 Constant_127070" [id=277, type=Constant]; -"278 Multiply_3873" [id=278, type=Constant]; -"279 Constant_126983" [id=279, type=Constant]; -"280 Constant_126982" [id=280, type=Constant]; -"281 Constant_126981" [id=281, type=Constant]; -"282 Constant_126980" [id=282, type=Constant]; -"283 Transpose_1469" [id=283, type=Constant]; -"284 Constant_3700" [id=284, type=Constant]; -"285 Multiply_3692/fq_weights_1" [id=285, type=FakeQuantize]; -"286 Constant_127093" [id=286, type=Constant]; -"287 Constant_127092" [id=287, type=Constant]; -"288 Constant_127091" [id=288, type=Constant]; -"289 Constant_127090" [id=289, type=Constant]; -"290 Multiply_3897" [id=290, type=Constant]; -"291 Constant_126998" [id=291, type=Constant]; -"292 Constant_126997" [id=292, type=Constant]; -"293 Constant_126996" [id=293, type=Constant]; -"294 Constant_126995" [id=294, type=Constant]; -"295 Transpose_1602" [id=295, type=Constant]; -"296 Constant_3756" [id=296, type=Constant]; -"297 Multiply_3748/fq_weights_1" [id=297, type=FakeQuantize]; -"298 Constant_127113" [id=298, type=Constant]; -"299 Constant_127112" [id=299, type=Constant]; -"300 Constant_127111" [id=300, type=Constant]; -"301 Constant_127110" [id=301, type=Constant]; -"302 Multiply_3921" [id=302, type=Constant]; -"303 Constant_127033" [id=303, type=Constant]; -"304 Constant_127032" [id=304, type=Constant]; -"305 Constant_127031" [id=305, type=Constant]; -"306 Constant_127030" [id=306, type=Constant]; -"307 Gather_1735" [id=307, type=Constant]; -"308 Constant_667" [id=308, type=Constant]; -"309 Constant_668" [id=309, type=Constant]; -"310 Constant_669" [id=310, type=Constant]; -"311 Constant_1731" [id=311, type=Constant]; -"312 Constant_127148" [id=312, type=Constant]; -"313 Constant_127147" [id=313, type=Constant]; -"314 Constant_127146" [id=314, type=Constant]; -"315 Constant_127145" [id=315, type=Constant]; -"316 Transpose_1730" [id=316, type=Constant]; -"317 Constant_3812" [id=317, type=Constant]; -"318 Multiply_3804/fq_weights_1" [id=318, type=FakeQuantize]; -"319 Constant_127143" [id=319, type=Constant]; -"320 Constant_127142" [id=320, type=Constant]; -"321 Constant_127141" [id=321, type=Constant]; -"322 Constant_127140" [id=322, type=Constant]; -"323 Multiply_3945" [id=323, type=Constant]; -"324 Constant_127138" [id=324, type=Constant]; -"325 Constant_127137" [id=325, type=Constant]; -"326 Constant_127136" [id=326, type=Constant]; -"327 Constant_127135" [id=327, type=Constant]; -"328 Transpose_1700" [id=328, type=Constant]; -"329 Constant_3798" [id=329, type=Constant]; -"330 Multiply_3790/fq_weights_1" [id=330, type=FakeQuantize]; -"331 Constant_127133" [id=331, type=Constant]; -"332 Constant_127132" [id=332, type=Constant]; -"333 Constant_127131" [id=333, type=Constant]; -"334 Constant_127130" [id=334, type=Constant]; -"335 Multiply_3939" [id=335, type=Constant]; -"336 Constant_127128" [id=336, type=Constant]; -"337 Constant_127127" [id=337, type=Constant]; -"338 Constant_127126" [id=338, type=Constant]; -"339 Constant_127125" [id=339, type=Constant]; -"340 Transpose_1670" [id=340, type=Constant]; -"341 Constant_3784" [id=341, type=Constant]; -"342 Multiply_3776/fq_weights_1" [id=342, type=FakeQuantize]; -"343 Constant_127123" [id=343, type=Constant]; -"344 Constant_127122" [id=344, type=Constant]; -"345 Constant_127121" [id=345, type=Constant]; -"346 Constant_127120" [id=346, type=Constant]; -"347 Multiply_3933" [id=347, type=Constant]; -"348 Convert_672" [id=348, type=Constant]; -"349 up_sampling2d/mul" [id=349, type=Constant]; -"350 Constant_1815" [id=350, type=Constant]; -"351 Transpose_1814" [id=351, type=Constant]; -"352 Convolution_754/fq_weights_1" [id=352, type=FakeQuantize]; -"353 Constant_127178" [id=353, type=Constant]; -"354 Constant_127177" [id=354, type=Constant]; -"355 Constant_127176" [id=355, type=Constant]; -"356 Constant_127175" [id=356, type=Constant]; -"357 Transpose_753" [id=357, type=Constant]; -"358 Constant_127173" [id=358, type=Constant]; -"359 Constant_127172" [id=359, type=Constant]; -"360 Constant_127171" [id=360, type=Constant]; -"361 Constant_127170" [id=361, type=Constant]; -"362 Transpose_1808" [id=362, type=Constant]; -"363 Constant_3840" [id=363, type=Constant]; -"364 Multiply_3832/fq_weights_1" [id=364, type=FakeQuantize]; -"365 Constant_127168" [id=365, type=Constant]; -"366 Constant_127167" [id=366, type=Constant]; -"367 Constant_127166" [id=367, type=Constant]; -"368 Constant_127165" [id=368, type=Constant]; -"369 Multiply_3957" [id=369, type=Constant]; -"0 image_input" -> "1 image_input/fq_output_0" [label="[1, 416, 416, 3]", style=solid]; -"1 image_input/fq_output_0" -> "2 Divide_2373" [label="[1, 416, 416, 3]", style=solid]; -"2 Divide_2373" -> "3 Multiply_3580" [label="[1, 3, 416, 416]", style=solid]; -"3 Multiply_3580" -> "4 Transpose_1182" [label="[1, 32, 208, 208]", style=solid]; -"4 Transpose_1182" -> "5 Transpose_1188" [label="[1, 32, 208, 208]", style=solid]; -"5 Transpose_1188" -> "6 Transpose_1188/fq_output_0" [label="[1, 32, 208, 208]", style=solid]; -"6 Transpose_1188/fq_output_0" -> "7 Multiply_3594" [label="[1, 32, 208, 208]", style=solid]; -"7 Multiply_3594" -> "8 Transpose_1237" [label="[1, 64, 104, 104]", style=solid]; -"8 Transpose_1237" -> "9 Transpose_1243" [label="[1, 64, 104, 104]", style=solid]; -"9 Transpose_1243" -> "10 Transpose_1243/fq_output_0" [label="[1, 64, 104, 104]", style=solid]; -"10 Transpose_1243/fq_output_0" -> "11 Multiply_3608" [label="[1, 64, 104, 104]", style=solid]; -"11 Multiply_3608" -> "12 Transpose_1267" [label="[1, 64, 104, 104]", style=solid]; -"12 Transpose_1267" -> "13 Transpose_1273" [label="[1, 64, 104, 104]", style=solid]; -"13 Transpose_1273" -> "14 Transpose_1273/fq_output_0" [label="[1, 64, 104, 104]", style=solid]; -"14 Transpose_1273/fq_output_0" -> "15 Transpose_1376" [label="[1, 64, 104, 104]", style=solid]; -"14 Transpose_1273/fq_output_0" -> "16 group_route_3/split" [label="[1, 64, 104, 104]", style=solid]; -"15 Transpose_1376" -> "17 MaxPool_303" [label="[1, 128, 104, 104]", style=solid]; -"16 group_route_3/split" -> "18 Multiply_3622" [label="[1, 32, 104, 104]", style=solid]; -"17 MaxPool_303" -> "19 Multiply_3664" [label="[1, 128, 52, 52]", style=solid]; -"18 Multiply_3622" -> "20 Transpose_1302" [label="[1, 32, 104, 104]", style=solid]; -"19 Multiply_3664" -> "21 Transpose_1400" [label="[1, 128, 52, 52]", style=solid]; -"20 Transpose_1302" -> "22 Transpose_1308" [label="[1, 32, 104, 104]", style=solid]; -"21 Transpose_1400" -> "23 Transpose_1406" [label="[1, 128, 52, 52]", style=solid]; -"22 Transpose_1308" -> "24 Transpose_1308/fq_output_0" [label="[1, 32, 104, 104]", style=solid]; -"23 Transpose_1406" -> "25 Transpose_1406/fq_output_0" [label="[1, 128, 52, 52]", style=solid]; -"24 Transpose_1308/fq_output_0" -> "26 Multiply_3636" [label="[1, 32, 104, 104]", style=solid]; -"24 Transpose_1308/fq_output_0" -> "27 Transpose_1342" [label="[1, 32, 104, 104]", style=solid]; -"25 Transpose_1406/fq_output_0" -> "28 Transpose_1509" [label="[1, 128, 52, 52]", style=solid]; -"25 Transpose_1406/fq_output_0" -> "29 group_route_11/split" [label="[1, 128, 52, 52]", style=solid]; -"26 Multiply_3636" -> "30 Transpose_1332" [label="[1, 32, 104, 104]", style=solid]; -"27 Transpose_1342" -> "31 Multiply_3650" [label="[1, 64, 104, 104]", style=solid]; -"28 Transpose_1509" -> "32 MaxPool_429" [label="[1, 256, 52, 52]", style=solid]; -"29 group_route_11/split" -> "33 Multiply_3678" [label="[1, 64, 52, 52]", style=solid]; -"30 Transpose_1332" -> "34 Transpose_1338" [label="[1, 32, 104, 104]", style=solid]; -"31 Multiply_3650" -> "35 Transpose_1366" [label="[1, 64, 104, 104]", style=solid]; -"32 MaxPool_429" -> "36 Multiply_3720" [label="[1, 256, 26, 26]", style=solid]; -"33 Multiply_3678" -> "37 Transpose_1435" [label="[1, 64, 52, 52]", style=solid]; -"34 Transpose_1338" -> "38 Transpose_1338/fq_output_0" [label="[1, 32, 104, 104]", style=solid]; -"35 Transpose_1366" -> "39 Transpose_1372" [label="[1, 64, 104, 104]", style=solid]; -"36 Multiply_3720" -> "40 Transpose_1533" [label="[1, 256, 26, 26]", style=solid]; -"37 Transpose_1435" -> "41 Transpose_1441" [label="[1, 64, 52, 52]", style=solid]; -"38 Transpose_1338/fq_output_0" -> "27 Transpose_1342" [label="[1, 32, 104, 104]", style=solid]; -"39 Transpose_1372" -> "42 Transpose_1372/fq_output_0" [label="[1, 64, 104, 104]", style=solid]; -"40 Transpose_1533" -> "43 Transpose_1539" [label="[1, 256, 26, 26]", style=solid]; -"41 Transpose_1441" -> "44 Transpose_1441/fq_output_0" [label="[1, 64, 52, 52]", style=solid]; -"42 Transpose_1372/fq_output_0" -> "15 Transpose_1376" [label="[1, 64, 104, 104]", style=solid]; -"43 Transpose_1539" -> "45 Transpose_1539/fq_output_0" [label="[1, 256, 26, 26]", style=solid]; -"44 Transpose_1441/fq_output_0" -> "46 Multiply_3692" [label="[1, 64, 52, 52]", style=solid]; -"44 Transpose_1441/fq_output_0" -> "47 Transpose_1475" [label="[1, 64, 52, 52]", style=solid]; -"45 Transpose_1539/fq_output_0" -> "48 Transpose_1642" [label="[1, 256, 26, 26]", style=solid]; -"45 Transpose_1539/fq_output_0" -> "49 group_route_19/split" [label="[1, 256, 26, 26]", style=solid]; -"46 Multiply_3692" -> "50 Transpose_1465" [label="[1, 64, 52, 52]", style=solid]; -"47 Transpose_1475" -> "51 Multiply_3706" [label="[1, 128, 52, 52]", style=solid]; -"48 Transpose_1642" -> "52 MaxPool_575" [label="[1, 512, 26, 26]", style=solid]; -"49 group_route_19/split" -> "53 Multiply_3734" [label="[1, 128, 26, 26]", style=solid]; -"50 Transpose_1465" -> "54 Transpose_1471" [label="[1, 64, 52, 52]", style=solid]; -"51 Multiply_3706" -> "55 Transpose_1499" [label="[1, 128, 52, 52]", style=solid]; -"52 MaxPool_575" -> "56 Multiply_3776" [label="[1, 512, 13, 13]", style=solid]; -"53 Multiply_3734" -> "57 Transpose_1568" [label="[1, 128, 26, 26]", style=solid]; -"54 Transpose_1471" -> "58 Transpose_1471/fq_output_0" [label="[1, 64, 52, 52]", style=solid]; -"55 Transpose_1499" -> "59 Transpose_1505" [label="[1, 128, 52, 52]", style=solid]; -"56 Multiply_3776" -> "60 Transpose_1666" [label="[1, 512, 13, 13]", style=solid]; -"57 Transpose_1568" -> "61 Transpose_1574" [label="[1, 128, 26, 26]", style=solid]; -"58 Transpose_1471/fq_output_0" -> "47 Transpose_1475" [label="[1, 64, 52, 52]", style=solid]; -"59 Transpose_1505" -> "62 Transpose_1505/fq_output_0" [label="[1, 128, 52, 52]", style=solid]; -"60 Transpose_1666" -> "63 Transpose_1672" [label="[1, 512, 13, 13]", style=solid]; -"61 Transpose_1574" -> "64 Transpose_1574/fq_output_0" [label="[1, 128, 26, 26]", style=solid]; -"62 Transpose_1505/fq_output_0" -> "28 Transpose_1509" [label="[1, 128, 52, 52]", style=solid]; -"63 Transpose_1672" -> "65 Transpose_1672/fq_output_0" [label="[1, 512, 13, 13]", style=solid]; -"64 Transpose_1574/fq_output_0" -> "66 Multiply_3748" [label="[1, 128, 26, 26]", style=solid]; -"64 Transpose_1574/fq_output_0" -> "67 Transpose_1608" [label="[1, 128, 26, 26]", style=solid]; -"65 Transpose_1672/fq_output_0" -> "68 Multiply_3790" [label="[1, 512, 13, 13]", style=solid]; -"66 Multiply_3748" -> "69 Transpose_1598" [label="[1, 128, 26, 26]", style=solid]; -"67 Transpose_1608" -> "70 Multiply_3762" [label="[1, 256, 26, 26]", style=solid]; -"68 Multiply_3790" -> "71 Transpose_1696" [label="[1, 256, 13, 13]", style=solid]; -"69 Transpose_1598" -> "72 Transpose_1604" [label="[1, 128, 26, 26]", style=solid]; -"70 Multiply_3762" -> "73 Transpose_1632" [label="[1, 256, 26, 26]", style=solid]; -"71 Transpose_1696" -> "74 Transpose_1702" [label="[1, 256, 13, 13]", style=solid]; -"72 Transpose_1604" -> "75 Transpose_1604/fq_output_0" [label="[1, 128, 26, 26]", style=solid]; -"73 Transpose_1632" -> "76 Transpose_1638" [label="[1, 256, 26, 26]", style=solid]; -"74 Transpose_1702" -> "77 Transpose_1702/fq_output_0" [label="[1, 256, 13, 13]", style=solid]; -"75 Transpose_1604/fq_output_0" -> "67 Transpose_1608" [label="[1, 128, 26, 26]", style=solid]; -"76 Transpose_1638" -> "78 Transpose_1638/fq_output_0" [label="[1, 256, 26, 26]", style=solid]; -"77 Transpose_1702/fq_output_0" -> "79 Multiply_3804" [label="[1, 256, 13, 13]", style=solid]; -"77 Transpose_1702/fq_output_0" -> "80 Multiply_3832" [label="[1, 256, 13, 13]", style=solid]; -"78 Transpose_1638/fq_output_0" -> "48 Transpose_1642" [label="[1, 256, 26, 26]", style=solid]; -"78 Transpose_1638/fq_output_0" -> "81 Transpose_1744" [label="[1, 256, 26, 26]", style=solid]; -"79 Multiply_3804" -> "82 Transpose_1726" [label="[1, 128, 13, 13]", style=solid]; -"80 Multiply_3832" -> "83 Transpose_1804" [label="[1, 512, 13, 13]", style=solid]; -"81 Transpose_1744" -> "84 Multiply_3818" [label="[1, 384, 26, 26]", style=solid]; -"82 Transpose_1726" -> "85 Transpose_1732" [label="[1, 128, 13, 13]", style=solid]; -"83 Transpose_1804" -> "86 Transpose_1810" [label="[1, 512, 13, 13]", style=solid]; -"84 Multiply_3818" -> "87 Transpose_1768" [label="[1, 256, 26, 26]", style=solid]; -"85 Transpose_1732" -> "88 Transpose_1732/fq_output_0" [label="[1, 128, 13, 13]", style=solid]; -"86 Transpose_1810" -> "89 Transpose_1810/fq_output_0" [label="[1, 512, 13, 13]", style=solid]; -"87 Transpose_1768" -> "90 Transpose_1774" [label="[1, 256, 26, 26]", style=solid]; -"88 Transpose_1732/fq_output_0" -> "91 Transpose_1740" [label="[1, 128, 13, 13]", style=solid]; -"88 Transpose_1732/fq_output_0" -> "92 leaky_re_lu_17/LeakyRelu" [label="[1, 128, 13, 13]", style=solid]; -"89 Transpose_1810/fq_output_0" -> "93 Convolution_754" [label="[1, 512, 13, 13]", style=solid]; -"90 Transpose_1774" -> "94 Transpose_1774/fq_output_0" [label="[1, 256, 26, 26]", style=solid]; -"91 Transpose_1740" -> "95 Transpose_1740/fq_output_0" [label="[1, 128, 26, 26]", style=solid]; -"92 leaky_re_lu_17/LeakyRelu" -> "96 ShapeOf_665" [label="[1, 13, 13, 128]", style=solid]; -"93 Convolution_754" -> "97 Transpose_1816" [label="[1, 255, 13, 13]", style=solid]; -"94 Transpose_1774/fq_output_0" -> "98 Convolution_711" [label="[1, 256, 26, 26]", style=solid]; -"95 Transpose_1740/fq_output_0" -> "81 Transpose_1744" [label="[1, 128, 26, 26]", style=solid]; -"96 ShapeOf_665" -> "99 Slice_670" [label="[4]", style=dashed]; -"97 Transpose_1816" -> "100 conv2d_17/BiasAdd" [label="[1, 255, 13, 13]", style=solid]; -"98 Convolution_711" -> "101 Transpose_1780" [label="[1, 255, 26, 26]", style=solid]; -"99 Slice_670" -> "102 Convert_671" [label="[2]", style=dashed]; -"100 conv2d_17/BiasAdd" -> "103 conv2d_17/BiasAdd^0" [label="[1, 13, 13, 255]", style=solid]; -"101 Transpose_1780" -> "104 conv2d_20/BiasAdd" [label="[1, 255, 26, 26]", style=solid]; -"102 Convert_671" -> "105 Divide_673" [label="[2]", style=solid]; -"104 conv2d_20/BiasAdd" -> "106 conv2d_20/BiasAdd^0" [label="[1, 26, 26, 255]", style=solid]; -"105 Divide_673" -> "91 Transpose_1740" [label="[2]", style=solid]; -"107 Constant_1779" -> "104 conv2d_20/BiasAdd" [label="[4]", style=dashed]; -"108 Transpose_1778" -> "101 Transpose_1780" [label="[1, 255, 1, 1]", style=solid]; -"109 Convolution_711/fq_weights_1" -> "98 Convolution_711" [label="[255, 256, 1, 1]", style=solid]; -"110 Constant_127163" -> "109 Convolution_711/fq_weights_1" [label="[255, 1, 1, 1]", style=solid]; -"111 Constant_127162" -> "109 Convolution_711/fq_weights_1" [label="[255, 1, 1, 1]", style=solid]; -"112 Constant_127161" -> "109 Convolution_711/fq_weights_1" [label="[255, 1, 1, 1]", style=solid]; -"113 Constant_127160" -> "109 Convolution_711/fq_weights_1" [label="[255, 1, 1, 1]", style=solid]; -"114 Transpose_710" -> "109 Convolution_711/fq_weights_1" [label="[255, 256, 1, 1]", style=solid]; -"115 Constant_127158" -> "94 Transpose_1774/fq_output_0" [label="[]", style=solid]; -"116 Constant_127157" -> "94 Transpose_1774/fq_output_0" [label="[]", style=solid]; -"117 Constant_127156" -> "94 Transpose_1774/fq_output_0" [label="[]", style=solid]; -"118 Constant_127155" -> "94 Transpose_1774/fq_output_0" [label="[]", style=solid]; -"119 Transpose_1772" -> "90 Transpose_1774" [label="[1, 1, 1, 1]", style=solid]; -"120 Constant_3826" -> "87 Transpose_1768" [label="[1, 256, 1, 1]", style=solid]; -"121 Multiply_3818/fq_weights_1" -> "84 Multiply_3818" [label="[256, 384, 3, 3]", style=solid]; -"122 Constant_127153" -> "121 Multiply_3818/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"123 Constant_127152" -> "121 Multiply_3818/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"124 Constant_127151" -> "121 Multiply_3818/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"125 Constant_127150" -> "121 Multiply_3818/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"126 Multiply_3951" -> "121 Multiply_3818/fq_weights_1" [label="[256, 384, 3, 3]", style=solid]; -"127 Constant_127023" -> "78 Transpose_1638/fq_output_0" [label="[]", style=solid]; -"128 Constant_127022" -> "78 Transpose_1638/fq_output_0" [label="[]", style=solid]; -"129 Constant_127021" -> "78 Transpose_1638/fq_output_0" [label="[]", style=solid]; -"130 Constant_127020" -> "78 Transpose_1638/fq_output_0" [label="[]", style=solid]; -"131 Transpose_1636" -> "76 Transpose_1638" [label="[1, 1, 1, 1]", style=solid]; -"132 Constant_3770" -> "73 Transpose_1632" [label="[1, 256, 1, 1]", style=solid]; -"133 Multiply_3762/fq_weights_1" -> "70 Multiply_3762" [label="[256, 256, 1, 1]", style=solid]; -"134 Constant_127118" -> "133 Multiply_3762/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"135 Constant_127117" -> "133 Multiply_3762/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"136 Constant_127116" -> "133 Multiply_3762/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"137 Constant_127115" -> "133 Multiply_3762/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"138 Multiply_3927" -> "133 Multiply_3762/fq_weights_1" [label="[256, 256, 1, 1]", style=solid]; -"139 Constant_126993" -> "64 Transpose_1574/fq_output_0" [label="[]", style=solid]; -"140 Constant_126992" -> "64 Transpose_1574/fq_output_0" [label="[]", style=solid]; -"141 Constant_126991" -> "64 Transpose_1574/fq_output_0" [label="[]", style=solid]; -"142 Constant_126990" -> "64 Transpose_1574/fq_output_0" [label="[]", style=solid]; -"143 Transpose_1572" -> "61 Transpose_1574" [label="[1, 1, 1, 1]", style=solid]; -"144 Constant_3742" -> "57 Transpose_1568" [label="[1, 128, 1, 1]", style=solid]; -"145 Multiply_3734/fq_weights_1" -> "53 Multiply_3734" [label="[128, 128, 3, 3]", style=solid]; -"146 Constant_127108" -> "145 Multiply_3734/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"147 Constant_127107" -> "145 Multiply_3734/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"148 Constant_127106" -> "145 Multiply_3734/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"149 Constant_127105" -> "145 Multiply_3734/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"150 Multiply_3915" -> "145 Multiply_3734/fq_weights_1" [label="[128, 128, 3, 3]", style=solid]; -"151 Constant_1540" -> "49 group_route_19/split" [label="[]", style=dashed]; -"152 Constant_127028" -> "45 Transpose_1539/fq_output_0" [label="[]", style=solid]; -"153 Constant_127027" -> "45 Transpose_1539/fq_output_0" [label="[]", style=solid]; -"154 Constant_127026" -> "45 Transpose_1539/fq_output_0" [label="[]", style=solid]; -"155 Constant_127025" -> "45 Transpose_1539/fq_output_0" [label="[]", style=solid]; -"156 Transpose_1537" -> "43 Transpose_1539" [label="[1, 1, 1, 1]", style=solid]; -"157 Constant_3728" -> "40 Transpose_1533" [label="[1, 256, 1, 1]", style=solid]; -"158 Multiply_3720/fq_weights_1" -> "36 Multiply_3720" [label="[256, 256, 3, 3]", style=solid]; -"159 Constant_127103" -> "158 Multiply_3720/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"160 Constant_127102" -> "158 Multiply_3720/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"161 Constant_127101" -> "158 Multiply_3720/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"162 Constant_127100" -> "158 Multiply_3720/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"163 Multiply_3909" -> "158 Multiply_3720/fq_weights_1" [label="[256, 256, 3, 3]", style=solid]; -"164 Constant_127018" -> "62 Transpose_1505/fq_output_0" [label="[]", style=solid]; -"165 Constant_127017" -> "62 Transpose_1505/fq_output_0" [label="[]", style=solid]; -"166 Constant_127016" -> "62 Transpose_1505/fq_output_0" [label="[]", style=solid]; -"167 Constant_127015" -> "62 Transpose_1505/fq_output_0" [label="[]", style=solid]; -"168 Transpose_1503" -> "59 Transpose_1505" [label="[1, 1, 1, 1]", style=solid]; -"169 Constant_3714" -> "55 Transpose_1499" [label="[1, 128, 1, 1]", style=solid]; -"170 Multiply_3706/fq_weights_1" -> "51 Multiply_3706" [label="[128, 128, 1, 1]", style=solid]; -"171 Constant_127098" -> "170 Multiply_3706/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"172 Constant_127097" -> "170 Multiply_3706/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"173 Constant_127096" -> "170 Multiply_3706/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"174 Constant_127095" -> "170 Multiply_3706/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"175 Multiply_3903" -> "170 Multiply_3706/fq_weights_1" [label="[128, 128, 1, 1]", style=solid]; -"176 Constant_126988" -> "44 Transpose_1441/fq_output_0" [label="[]", style=solid]; -"177 Constant_126987" -> "44 Transpose_1441/fq_output_0" [label="[]", style=solid]; -"178 Constant_126986" -> "44 Transpose_1441/fq_output_0" [label="[]", style=solid]; -"179 Constant_126985" -> "44 Transpose_1441/fq_output_0" [label="[]", style=solid]; -"180 Transpose_1439" -> "41 Transpose_1441" [label="[1, 1, 1, 1]", style=solid]; -"181 Constant_3686" -> "37 Transpose_1435" [label="[1, 64, 1, 1]", style=solid]; -"182 Multiply_3678/fq_weights_1" -> "33 Multiply_3678" [label="[64, 64, 3, 3]", style=solid]; -"183 Constant_127088" -> "182 Multiply_3678/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"184 Constant_127087" -> "182 Multiply_3678/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"185 Constant_127086" -> "182 Multiply_3678/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"186 Constant_127085" -> "182 Multiply_3678/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"187 Multiply_3891" -> "182 Multiply_3678/fq_weights_1" [label="[64, 64, 3, 3]", style=solid]; -"188 Constant_1407" -> "29 group_route_11/split" [label="[]", style=dashed]; -"189 Constant_127013" -> "25 Transpose_1406/fq_output_0" [label="[]", style=solid]; -"190 Constant_127012" -> "25 Transpose_1406/fq_output_0" [label="[]", style=solid]; -"191 Constant_127011" -> "25 Transpose_1406/fq_output_0" [label="[]", style=solid]; -"192 Constant_127010" -> "25 Transpose_1406/fq_output_0" [label="[]", style=solid]; -"193 Transpose_1404" -> "23 Transpose_1406" [label="[1, 1, 1, 1]", style=solid]; -"194 Constant_3672" -> "21 Transpose_1400" [label="[1, 128, 1, 1]", style=solid]; -"195 Multiply_3664/fq_weights_1" -> "19 Multiply_3664" [label="[128, 128, 3, 3]", style=solid]; -"196 Constant_127083" -> "195 Multiply_3664/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"197 Constant_127082" -> "195 Multiply_3664/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"198 Constant_127081" -> "195 Multiply_3664/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"199 Constant_127080" -> "195 Multiply_3664/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"200 Multiply_3885" -> "195 Multiply_3664/fq_weights_1" [label="[128, 128, 3, 3]", style=solid]; -"201 Constant_127008" -> "42 Transpose_1372/fq_output_0" [label="[]", style=solid]; -"202 Constant_127007" -> "42 Transpose_1372/fq_output_0" [label="[]", style=solid]; -"203 Constant_127006" -> "42 Transpose_1372/fq_output_0" [label="[]", style=solid]; -"204 Constant_127005" -> "42 Transpose_1372/fq_output_0" [label="[]", style=solid]; -"205 Transpose_1370" -> "39 Transpose_1372" [label="[1, 1, 1, 1]", style=solid]; -"206 Constant_3658" -> "35 Transpose_1366" [label="[1, 64, 1, 1]", style=solid]; -"207 Multiply_3650/fq_weights_1" -> "31 Multiply_3650" [label="[64, 64, 1, 1]", style=solid]; -"208 Constant_127078" -> "207 Multiply_3650/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"209 Constant_127077" -> "207 Multiply_3650/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"210 Constant_127076" -> "207 Multiply_3650/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"211 Constant_127075" -> "207 Multiply_3650/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"212 Multiply_3879" -> "207 Multiply_3650/fq_weights_1" [label="[64, 64, 1, 1]", style=solid]; -"213 Constant_126973" -> "24 Transpose_1308/fq_output_0" [label="[]", style=solid]; -"214 Constant_126972" -> "24 Transpose_1308/fq_output_0" [label="[]", style=solid]; -"215 Constant_126971" -> "24 Transpose_1308/fq_output_0" [label="[]", style=solid]; -"216 Constant_126970" -> "24 Transpose_1308/fq_output_0" [label="[]", style=solid]; -"217 Transpose_1306" -> "22 Transpose_1308" [label="[1, 1, 1, 1]", style=solid]; -"218 Constant_3630" -> "20 Transpose_1302" [label="[1, 32, 1, 1]", style=solid]; -"219 Multiply_3622/fq_weights_1" -> "18 Multiply_3622" [label="[32, 32, 3, 3]", style=solid]; -"220 Constant_127068" -> "219 Multiply_3622/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"221 Constant_127067" -> "219 Multiply_3622/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"222 Constant_127066" -> "219 Multiply_3622/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"223 Constant_127065" -> "219 Multiply_3622/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"224 Multiply_3867" -> "219 Multiply_3622/fq_weights_1" [label="[32, 32, 3, 3]", style=solid]; -"225 Constant_1274" -> "16 group_route_3/split" [label="[]", style=dashed]; -"226 Constant_127003" -> "14 Transpose_1273/fq_output_0" [label="[]", style=solid]; -"227 Constant_127002" -> "14 Transpose_1273/fq_output_0" [label="[]", style=solid]; -"228 Constant_127001" -> "14 Transpose_1273/fq_output_0" [label="[]", style=solid]; -"229 Constant_127000" -> "14 Transpose_1273/fq_output_0" [label="[]", style=solid]; -"230 Transpose_1271" -> "13 Transpose_1273" [label="[1, 1, 1, 1]", style=solid]; -"231 Constant_3616" -> "12 Transpose_1267" [label="[1, 64, 1, 1]", style=solid]; -"232 Multiply_3608/fq_weights_1" -> "11 Multiply_3608" [label="[64, 64, 3, 3]", style=solid]; -"233 Constant_127063" -> "232 Multiply_3608/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"234 Constant_127062" -> "232 Multiply_3608/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"235 Constant_127061" -> "232 Multiply_3608/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"236 Constant_127060" -> "232 Multiply_3608/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"237 Multiply_3861" -> "232 Multiply_3608/fq_weights_1" [label="[64, 64, 3, 3]", style=solid]; -"238 Constant_127058" -> "10 Transpose_1243/fq_output_0" [label="[]", style=solid]; -"239 Constant_127057" -> "10 Transpose_1243/fq_output_0" [label="[]", style=solid]; -"240 Constant_127056" -> "10 Transpose_1243/fq_output_0" [label="[]", style=solid]; -"241 Constant_127055" -> "10 Transpose_1243/fq_output_0" [label="[]", style=solid]; -"242 Transpose_1241" -> "9 Transpose_1243" [label="[1, 1, 1, 1]", style=solid]; -"243 Constant_3602" -> "8 Transpose_1237" [label="[1, 64, 1, 1]", style=solid]; -"244 Multiply_3594/fq_weights_1" -> "7 Multiply_3594" [label="[64, 32, 3, 3]", style=solid]; -"245 Constant_127053" -> "244 Multiply_3594/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"246 Constant_127052" -> "244 Multiply_3594/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"247 Constant_127051" -> "244 Multiply_3594/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"248 Constant_127050" -> "244 Multiply_3594/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"249 Multiply_3855" -> "244 Multiply_3594/fq_weights_1" [label="[64, 32, 3, 3]", style=solid]; -"250 Constant_127048" -> "6 Transpose_1188/fq_output_0" [label="[]", style=solid]; -"251 Constant_127047" -> "6 Transpose_1188/fq_output_0" [label="[]", style=solid]; -"252 Constant_127046" -> "6 Transpose_1188/fq_output_0" [label="[]", style=solid]; -"253 Constant_127045" -> "6 Transpose_1188/fq_output_0" [label="[]", style=solid]; -"254 Transpose_1186" -> "5 Transpose_1188" [label="[1, 1, 1, 1]", style=solid]; -"255 Constant_3588" -> "4 Transpose_1182" [label="[1, 32, 1, 1]", style=solid]; -"256 Multiply_3580/fq_weights_1" -> "3 Multiply_3580" [label="[32, 3, 3, 3]", style=solid]; -"257 Constant_127043" -> "256 Multiply_3580/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"258 Constant_127042" -> "256 Multiply_3580/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"259 Constant_127041" -> "256 Multiply_3580/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"260 Constant_127040" -> "256 Multiply_3580/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"261 Gather_4127" -> "256 Multiply_3580/fq_weights_1" [label="[32, 3, 3, 3]", style=solid]; -"262 Constant_2343" -> "2 Divide_2373" [label="[4]", style=dashed]; -"263 Constant_127038" -> "1 image_input/fq_output_0" [label="[]", style=solid]; -"264 Constant_127037" -> "1 image_input/fq_output_0" [label="[]", style=solid]; -"265 Constant_127036" -> "1 image_input/fq_output_0" [label="[]", style=solid]; -"266 Constant_127035" -> "1 image_input/fq_output_0" [label="[]", style=solid]; -"267 Constant_126978" -> "38 Transpose_1338/fq_output_0" [label="[]", style=solid]; -"268 Constant_126977" -> "38 Transpose_1338/fq_output_0" [label="[]", style=solid]; -"269 Constant_126976" -> "38 Transpose_1338/fq_output_0" [label="[]", style=solid]; -"270 Constant_126975" -> "38 Transpose_1338/fq_output_0" [label="[]", style=solid]; -"271 Transpose_1336" -> "34 Transpose_1338" [label="[1, 1, 1, 1]", style=solid]; -"272 Constant_3644" -> "30 Transpose_1332" [label="[1, 32, 1, 1]", style=solid]; -"273 Multiply_3636/fq_weights_1" -> "26 Multiply_3636" [label="[32, 32, 3, 3]", style=solid]; -"274 Constant_127073" -> "273 Multiply_3636/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"275 Constant_127072" -> "273 Multiply_3636/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"276 Constant_127071" -> "273 Multiply_3636/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"277 Constant_127070" -> "273 Multiply_3636/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; -"278 Multiply_3873" -> "273 Multiply_3636/fq_weights_1" [label="[32, 32, 3, 3]", style=solid]; -"279 Constant_126983" -> "58 Transpose_1471/fq_output_0" [label="[]", style=solid]; -"280 Constant_126982" -> "58 Transpose_1471/fq_output_0" [label="[]", style=solid]; -"281 Constant_126981" -> "58 Transpose_1471/fq_output_0" [label="[]", style=solid]; -"282 Constant_126980" -> "58 Transpose_1471/fq_output_0" [label="[]", style=solid]; -"283 Transpose_1469" -> "54 Transpose_1471" [label="[1, 1, 1, 1]", style=solid]; -"284 Constant_3700" -> "50 Transpose_1465" [label="[1, 64, 1, 1]", style=solid]; -"285 Multiply_3692/fq_weights_1" -> "46 Multiply_3692" [label="[64, 64, 3, 3]", style=solid]; -"286 Constant_127093" -> "285 Multiply_3692/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"287 Constant_127092" -> "285 Multiply_3692/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"288 Constant_127091" -> "285 Multiply_3692/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"289 Constant_127090" -> "285 Multiply_3692/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; -"290 Multiply_3897" -> "285 Multiply_3692/fq_weights_1" [label="[64, 64, 3, 3]", style=solid]; -"291 Constant_126998" -> "75 Transpose_1604/fq_output_0" [label="[]", style=solid]; -"292 Constant_126997" -> "75 Transpose_1604/fq_output_0" [label="[]", style=solid]; -"293 Constant_126996" -> "75 Transpose_1604/fq_output_0" [label="[]", style=solid]; -"294 Constant_126995" -> "75 Transpose_1604/fq_output_0" [label="[]", style=solid]; -"295 Transpose_1602" -> "72 Transpose_1604" [label="[1, 1, 1, 1]", style=solid]; -"296 Constant_3756" -> "69 Transpose_1598" [label="[1, 128, 1, 1]", style=solid]; -"297 Multiply_3748/fq_weights_1" -> "66 Multiply_3748" [label="[128, 128, 3, 3]", style=solid]; -"298 Constant_127113" -> "297 Multiply_3748/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"299 Constant_127112" -> "297 Multiply_3748/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"300 Constant_127111" -> "297 Multiply_3748/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"301 Constant_127110" -> "297 Multiply_3748/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"302 Multiply_3921" -> "297 Multiply_3748/fq_weights_1" [label="[128, 128, 3, 3]", style=solid]; -"303 Constant_127033" -> "95 Transpose_1740/fq_output_0" [label="[]", style=solid]; -"304 Constant_127032" -> "95 Transpose_1740/fq_output_0" [label="[]", style=solid]; -"305 Constant_127031" -> "95 Transpose_1740/fq_output_0" [label="[]", style=solid]; -"306 Constant_127030" -> "95 Transpose_1740/fq_output_0" [label="[]", style=solid]; -"307 Gather_1735" -> "91 Transpose_1740" [label="[2]", style=dashed]; -"308 Constant_667" -> "99 Slice_670" [label="[1]", style=dashed]; -"309 Constant_668" -> "99 Slice_670" [label="[1]", style=dashed]; -"310 Constant_669" -> "99 Slice_670" [label="[1]", style=dashed]; -"311 Constant_1731" -> "92 leaky_re_lu_17/LeakyRelu" [label="[4]", style=dashed]; -"312 Constant_127148" -> "88 Transpose_1732/fq_output_0" [label="[]", style=solid]; -"313 Constant_127147" -> "88 Transpose_1732/fq_output_0" [label="[]", style=solid]; -"314 Constant_127146" -> "88 Transpose_1732/fq_output_0" [label="[]", style=solid]; -"315 Constant_127145" -> "88 Transpose_1732/fq_output_0" [label="[]", style=solid]; -"316 Transpose_1730" -> "85 Transpose_1732" [label="[1, 1, 1, 1]", style=solid]; -"317 Constant_3812" -> "82 Transpose_1726" [label="[1, 128, 1, 1]", style=solid]; -"318 Multiply_3804/fq_weights_1" -> "79 Multiply_3804" [label="[128, 256, 1, 1]", style=solid]; -"319 Constant_127143" -> "318 Multiply_3804/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"320 Constant_127142" -> "318 Multiply_3804/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"321 Constant_127141" -> "318 Multiply_3804/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"322 Constant_127140" -> "318 Multiply_3804/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; -"323 Multiply_3945" -> "318 Multiply_3804/fq_weights_1" [label="[128, 256, 1, 1]", style=solid]; -"324 Constant_127138" -> "77 Transpose_1702/fq_output_0" [label="[]", style=solid]; -"325 Constant_127137" -> "77 Transpose_1702/fq_output_0" [label="[]", style=solid]; -"326 Constant_127136" -> "77 Transpose_1702/fq_output_0" [label="[]", style=solid]; -"327 Constant_127135" -> "77 Transpose_1702/fq_output_0" [label="[]", style=solid]; -"328 Transpose_1700" -> "74 Transpose_1702" [label="[1, 1, 1, 1]", style=solid]; -"329 Constant_3798" -> "71 Transpose_1696" [label="[1, 256, 1, 1]", style=solid]; -"330 Multiply_3790/fq_weights_1" -> "68 Multiply_3790" [label="[256, 512, 1, 1]", style=solid]; -"331 Constant_127133" -> "330 Multiply_3790/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"332 Constant_127132" -> "330 Multiply_3790/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"333 Constant_127131" -> "330 Multiply_3790/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"334 Constant_127130" -> "330 Multiply_3790/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; -"335 Multiply_3939" -> "330 Multiply_3790/fq_weights_1" [label="[256, 512, 1, 1]", style=solid]; -"336 Constant_127128" -> "65 Transpose_1672/fq_output_0" [label="[]", style=solid]; -"337 Constant_127127" -> "65 Transpose_1672/fq_output_0" [label="[]", style=solid]; -"338 Constant_127126" -> "65 Transpose_1672/fq_output_0" [label="[]", style=solid]; -"339 Constant_127125" -> "65 Transpose_1672/fq_output_0" [label="[]", style=solid]; -"340 Transpose_1670" -> "63 Transpose_1672" [label="[1, 1, 1, 1]", style=solid]; -"341 Constant_3784" -> "60 Transpose_1666" [label="[1, 512, 1, 1]", style=solid]; -"342 Multiply_3776/fq_weights_1" -> "56 Multiply_3776" [label="[512, 512, 3, 3]", style=solid]; -"343 Constant_127123" -> "342 Multiply_3776/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"344 Constant_127122" -> "342 Multiply_3776/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"345 Constant_127121" -> "342 Multiply_3776/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"346 Constant_127120" -> "342 Multiply_3776/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"347 Multiply_3933" -> "342 Multiply_3776/fq_weights_1" [label="[512, 512, 3, 3]", style=solid]; -"348 Convert_672" -> "105 Divide_673" [label="[2]", style=solid]; -"349 up_sampling2d/mul" -> "91 Transpose_1740" [label="[2]", style=dashed]; -"350 Constant_1815" -> "100 conv2d_17/BiasAdd" [label="[4]", style=dashed]; -"351 Transpose_1814" -> "97 Transpose_1816" [label="[1, 255, 1, 1]", style=solid]; -"352 Convolution_754/fq_weights_1" -> "93 Convolution_754" [label="[255, 512, 1, 1]", style=solid]; -"353 Constant_127178" -> "352 Convolution_754/fq_weights_1" [label="[255, 1, 1, 1]", style=solid]; -"354 Constant_127177" -> "352 Convolution_754/fq_weights_1" [label="[255, 1, 1, 1]", style=solid]; -"355 Constant_127176" -> "352 Convolution_754/fq_weights_1" [label="[255, 1, 1, 1]", style=solid]; -"356 Constant_127175" -> "352 Convolution_754/fq_weights_1" [label="[255, 1, 1, 1]", style=solid]; -"357 Transpose_753" -> "352 Convolution_754/fq_weights_1" [label="[255, 512, 1, 1]", style=solid]; -"358 Constant_127173" -> "89 Transpose_1810/fq_output_0" [label="[]", style=solid]; -"359 Constant_127172" -> "89 Transpose_1810/fq_output_0" [label="[]", style=solid]; -"360 Constant_127171" -> "89 Transpose_1810/fq_output_0" [label="[]", style=solid]; -"361 Constant_127170" -> "89 Transpose_1810/fq_output_0" [label="[]", style=solid]; -"362 Transpose_1808" -> "86 Transpose_1810" [label="[1, 1, 1, 1]", style=solid]; -"363 Constant_3840" -> "83 Transpose_1804" [label="[1, 512, 1, 1]", style=solid]; -"364 Multiply_3832/fq_weights_1" -> "80 Multiply_3832" [label="[512, 256, 3, 3]", style=solid]; -"365 Constant_127168" -> "364 Multiply_3832/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"366 Constant_127167" -> "364 Multiply_3832/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"367 Constant_127166" -> "364 Multiply_3832/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"368 Constant_127165" -> "364 Multiply_3832/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; -"369 Multiply_3957" -> "364 Multiply_3832/fq_weights_1" [label="[512, 256, 3, 3]", style=solid]; -} diff --git a/tests/openvino/native/data/reference_graphs/quantized/yolo-v4-tiny-tf_performance.dot b/tests/openvino/native/data/reference_graphs/quantized/yolo-v4-tiny-tf_performance.dot new file mode 100644 index 00000000000..91280c60741 --- /dev/null +++ b/tests/openvino/native/data/reference_graphs/quantized/yolo-v4-tiny-tf_performance.dot @@ -0,0 +1,749 @@ +strict digraph { +"0 image_input" [id=0, type=Parameter]; +"1 image_input/fq_output_0" [id=1, type=FakeQuantize]; +"2 Divide_2366" [id=2, type=Transpose]; +"3 Multiply_3699" [id=3, type=Convolution]; +"4 Transpose_1171" [id=4, type=Add]; +"5 Transpose_1177" [id=5, type=PRelu]; +"6 Transpose_1177/fq_output_0" [id=6, type=FakeQuantize]; +"7 Multiply_3713" [id=7, type=Convolution]; +"8 Transpose_1228" [id=8, type=Add]; +"9 Transpose_1234" [id=9, type=PRelu]; +"10 Transpose_1234/fq_output_0" [id=10, type=FakeQuantize]; +"11 Multiply_3727" [id=11, type=Convolution]; +"12 Transpose_1258" [id=12, type=Add]; +"13 Transpose_1264" [id=13, type=PRelu]; +"14 Transpose_1264/fq_output_0" [id=14, type=FakeQuantize]; +"15 Transpose_1367" [id=15, type=Concat]; +"16 group_route_3/split" [id=16, type=Split]; +"17 MaxPool_307" [id=17, type=MaxPool]; +"18 Multiply_3741" [id=18, type=Convolution]; +"19 Multiply_3783" [id=19, type=Convolution]; +"20 Transpose_1293" [id=20, type=Add]; +"21 Transpose_1391" [id=21, type=Add]; +"22 Transpose_1299" [id=22, type=PRelu]; +"23 Transpose_1397" [id=23, type=PRelu]; +"24 Transpose_1299/fq_output_0" [id=24, type=FakeQuantize]; +"25 Transpose_1397/fq_output_0" [id=25, type=FakeQuantize]; +"26 Multiply_3755" [id=26, type=Convolution]; +"27 Transpose_1333" [id=27, type=Concat]; +"28 Transpose_1500" [id=28, type=Concat]; +"29 group_route_11/split" [id=29, type=Split]; +"30 Transpose_1323" [id=30, type=Add]; +"31 Multiply_3769" [id=31, type=Convolution]; +"32 MaxPool_433" [id=32, type=MaxPool]; +"33 Multiply_3797" [id=33, type=Convolution]; +"34 Transpose_1329" [id=34, type=PRelu]; +"35 Transpose_1357" [id=35, type=Add]; +"36 Multiply_3839" [id=36, type=Convolution]; +"37 Transpose_1426" [id=37, type=Add]; +"38 Transpose_1329/fq_output_0" [id=38, type=FakeQuantize]; +"39 Transpose_1363" [id=39, type=PRelu]; +"40 Transpose_1524" [id=40, type=Add]; +"41 Transpose_1432" [id=41, type=PRelu]; +"42 Transpose_1363/fq_output_0" [id=42, type=FakeQuantize]; +"43 Transpose_1530" [id=43, type=PRelu]; +"44 Transpose_1432/fq_output_0" [id=44, type=FakeQuantize]; +"45 Transpose_1530/fq_output_0" [id=45, type=FakeQuantize]; +"46 Multiply_3811" [id=46, type=Convolution]; +"47 Transpose_1466" [id=47, type=Concat]; +"48 Transpose_1633" [id=48, type=Concat]; +"49 group_route_19/split" [id=49, type=Split]; +"50 Transpose_1456" [id=50, type=Add]; +"51 Multiply_3825" [id=51, type=Convolution]; +"52 MaxPool_579" [id=52, type=MaxPool]; +"53 Multiply_3853" [id=53, type=Convolution]; +"54 Transpose_1462" [id=54, type=PRelu]; +"55 Transpose_1490" [id=55, type=Add]; +"56 Multiply_3895" [id=56, type=Convolution]; +"57 Transpose_1559" [id=57, type=Add]; +"58 Transpose_1462/fq_output_0" [id=58, type=FakeQuantize]; +"59 Transpose_1496" [id=59, type=PRelu]; +"60 Transpose_1657" [id=60, type=Add]; +"61 Transpose_1565" [id=61, type=PRelu]; +"62 Transpose_1496/fq_output_0" [id=62, type=FakeQuantize]; +"63 Transpose_1663" [id=63, type=PRelu]; +"64 Transpose_1565/fq_output_0" [id=64, type=FakeQuantize]; +"65 Transpose_1663/fq_output_0" [id=65, type=FakeQuantize]; +"66 Multiply_3867" [id=66, type=Convolution]; +"67 Transpose_1599" [id=67, type=Concat]; +"68 Multiply_3909" [id=68, type=Convolution]; +"69 Transpose_1589" [id=69, type=Add]; +"70 Multiply_3881" [id=70, type=Convolution]; +"71 Transpose_1687" [id=71, type=Add]; +"72 Transpose_1595" [id=72, type=PRelu]; +"73 Transpose_1623" [id=73, type=Add]; +"74 Transpose_1693" [id=74, type=PRelu]; +"75 Transpose_1595/fq_output_0" [id=75, type=FakeQuantize]; +"76 Transpose_1629" [id=76, type=PRelu]; +"77 Transpose_1693/fq_output_0" [id=77, type=FakeQuantize]; +"78 Transpose_1629/fq_output_0" [id=78, type=FakeQuantize]; +"79 Multiply_3923" [id=79, type=Convolution]; +"80 Multiply_3951" [id=80, type=Convolution]; +"81 Transpose_1727" [id=81, type=Concat]; +"82 Transpose_1717" [id=82, type=Add]; +"83 Transpose_1787" [id=83, type=Add]; +"84 Multiply_3937" [id=84, type=Convolution]; +"85 Transpose_1723" [id=85, type=PRelu]; +"86 Transpose_1793" [id=86, type=PRelu]; +"87 Transpose_1751" [id=87, type=Add]; +"88 Transpose_1723/fq_output_0" [id=88, type=FakeQuantize]; +"89 Transpose_1793/fq_output_0" [id=89, type=FakeQuantize]; +"90 Transpose_1757" [id=90, type=PRelu]; +"91 leaky_re_lu_17/LeakyRelu" [id=91, type=Transpose]; +"92 Convolution_749" [id=92, type=Convolution]; +"93 Transpose_1757/fq_output_0" [id=93, type=FakeQuantize]; +"94 up_sampling2d/Shape" [id=94, type=ShapeOf]; +"95 up_sampling2d/resize/ResizeNearestNeighbor" [id=95, type=Interpolate]; +"96 Transpose_1799" [id=96, type=Add]; +"97 Convolution_706" [id=97, type=Convolution]; +"98 up_sampling2d/strided_slice" [id=98, type=StridedSlice]; +"99 up_sampling2d/resize/ResizeNearestNeighbor/fq_output_0" [id=99, type=FakeQuantize]; +"100 conv2d_17/BiasAdd" [id=100, type=Transpose]; +"101 Transpose_1763" [id=101, type=Add]; +"102 up_sampling2d/mul" [id=102, type=Multiply]; +"103 Transpose_1725" [id=103, type=Transpose]; +"104 conv2d_17/BiasAdd^0" [id=104, label="104 conv2d_17/BiasAdd:0", type=Result]; +"105 conv2d_20/BiasAdd" [id=105, type=Transpose]; +"106 conv2d_20/BiasAdd^0" [id=106, label="106 conv2d_20/BiasAdd:0", type=Result]; +"107 Constant_1762" [id=107, type=Constant]; +"108 Transpose_1761" [id=108, type=Constant]; +"109 Convolution_706/fq_weights_1" [id=109, type=FakeQuantize]; +"110 Constant_5705" [id=110, type=Constant]; +"111 Constant_5704" [id=111, type=Constant]; +"112 Constant_5703" [id=112, type=Constant]; +"113 Constant_5702" [id=113, type=Constant]; +"114 Transpose_705" [id=114, type=Constant]; +"115 Constant_5700" [id=115, type=Constant]; +"116 Constant_5699" [id=116, type=Constant]; +"117 Constant_5698" [id=117, type=Constant]; +"118 Constant_5697" [id=118, type=Constant]; +"119 Transpose_1755" [id=119, type=Constant]; +"120 Constant_3945" [id=120, type=Constant]; +"121 Multiply_3937/fq_weights_1" [id=121, type=FakeQuantize]; +"122 Constant_5695" [id=122, type=Constant]; +"123 Constant_5694" [id=123, type=Constant]; +"124 Constant_5693" [id=124, type=Constant]; +"125 Constant_5692" [id=125, type=Constant]; +"126 Multiply_4070" [id=126, type=Constant]; +"127 Constant_5565" [id=127, type=Constant]; +"128 Constant_5564" [id=128, type=Constant]; +"129 Constant_5563" [id=129, type=Constant]; +"130 Constant_5562" [id=130, type=Constant]; +"131 Transpose_1627" [id=131, type=Constant]; +"132 Constant_3889" [id=132, type=Constant]; +"133 Multiply_3881/fq_weights_1" [id=133, type=FakeQuantize]; +"134 Constant_5660" [id=134, type=Constant]; +"135 Constant_5659" [id=135, type=Constant]; +"136 Constant_5658" [id=136, type=Constant]; +"137 Constant_5657" [id=137, type=Constant]; +"138 Multiply_4046" [id=138, type=Constant]; +"139 Constant_5535" [id=139, type=Constant]; +"140 Constant_5534" [id=140, type=Constant]; +"141 Constant_5533" [id=141, type=Constant]; +"142 Constant_5532" [id=142, type=Constant]; +"143 Transpose_1563" [id=143, type=Constant]; +"144 Constant_3861" [id=144, type=Constant]; +"145 Multiply_3853/fq_weights_1" [id=145, type=FakeQuantize]; +"146 Constant_5650" [id=146, type=Constant]; +"147 Constant_5649" [id=147, type=Constant]; +"148 Constant_5648" [id=148, type=Constant]; +"149 Constant_5647" [id=149, type=Constant]; +"150 Multiply_4034" [id=150, type=Constant]; +"151 Constant_1531" [id=151, type=Constant]; +"152 Constant_5570" [id=152, type=Constant]; +"153 Constant_5569" [id=153, type=Constant]; +"154 Constant_5568" [id=154, type=Constant]; +"155 Constant_5567" [id=155, type=Constant]; +"156 Transpose_1528" [id=156, type=Constant]; +"157 Constant_3847" [id=157, type=Constant]; +"158 Multiply_3839/fq_weights_1" [id=158, type=FakeQuantize]; +"159 Constant_5645" [id=159, type=Constant]; +"160 Constant_5644" [id=160, type=Constant]; +"161 Constant_5643" [id=161, type=Constant]; +"162 Constant_5642" [id=162, type=Constant]; +"163 Multiply_4028" [id=163, type=Constant]; +"164 Constant_5560" [id=164, type=Constant]; +"165 Constant_5559" [id=165, type=Constant]; +"166 Constant_5558" [id=166, type=Constant]; +"167 Constant_5557" [id=167, type=Constant]; +"168 Transpose_1494" [id=168, type=Constant]; +"169 Constant_3833" [id=169, type=Constant]; +"170 Multiply_3825/fq_weights_1" [id=170, type=FakeQuantize]; +"171 Constant_5640" [id=171, type=Constant]; +"172 Constant_5639" [id=172, type=Constant]; +"173 Constant_5638" [id=173, type=Constant]; +"174 Constant_5637" [id=174, type=Constant]; +"175 Multiply_4022" [id=175, type=Constant]; +"176 Constant_5530" [id=176, type=Constant]; +"177 Constant_5529" [id=177, type=Constant]; +"178 Constant_5528" [id=178, type=Constant]; +"179 Constant_5527" [id=179, type=Constant]; +"180 Transpose_1430" [id=180, type=Constant]; +"181 Constant_3805" [id=181, type=Constant]; +"182 Multiply_3797/fq_weights_1" [id=182, type=FakeQuantize]; +"183 Constant_5630" [id=183, type=Constant]; +"184 Constant_5629" [id=184, type=Constant]; +"185 Constant_5628" [id=185, type=Constant]; +"186 Constant_5627" [id=186, type=Constant]; +"187 Multiply_4010" [id=187, type=Constant]; +"188 Constant_1398" [id=188, type=Constant]; +"189 Constant_5555" [id=189, type=Constant]; +"190 Constant_5554" [id=190, type=Constant]; +"191 Constant_5553" [id=191, type=Constant]; +"192 Constant_5552" [id=192, type=Constant]; +"193 Transpose_1395" [id=193, type=Constant]; +"194 Constant_3791" [id=194, type=Constant]; +"195 Multiply_3783/fq_weights_1" [id=195, type=FakeQuantize]; +"196 Constant_5625" [id=196, type=Constant]; +"197 Constant_5624" [id=197, type=Constant]; +"198 Constant_5623" [id=198, type=Constant]; +"199 Constant_5622" [id=199, type=Constant]; +"200 Multiply_4004" [id=200, type=Constant]; +"201 Constant_5550" [id=201, type=Constant]; +"202 Constant_5549" [id=202, type=Constant]; +"203 Constant_5548" [id=203, type=Constant]; +"204 Constant_5547" [id=204, type=Constant]; +"205 Transpose_1361" [id=205, type=Constant]; +"206 Constant_3777" [id=206, type=Constant]; +"207 Multiply_3769/fq_weights_1" [id=207, type=FakeQuantize]; +"208 Constant_5620" [id=208, type=Constant]; +"209 Constant_5619" [id=209, type=Constant]; +"210 Constant_5618" [id=210, type=Constant]; +"211 Constant_5617" [id=211, type=Constant]; +"212 Multiply_3998" [id=212, type=Constant]; +"213 Constant_5515" [id=213, type=Constant]; +"214 Constant_5514" [id=214, type=Constant]; +"215 Constant_5513" [id=215, type=Constant]; +"216 Constant_5512" [id=216, type=Constant]; +"217 Transpose_1297" [id=217, type=Constant]; +"218 Constant_3749" [id=218, type=Constant]; +"219 Multiply_3741/fq_weights_1" [id=219, type=FakeQuantize]; +"220 Constant_5610" [id=220, type=Constant]; +"221 Constant_5609" [id=221, type=Constant]; +"222 Constant_5608" [id=222, type=Constant]; +"223 Constant_5607" [id=223, type=Constant]; +"224 Multiply_3986" [id=224, type=Constant]; +"225 Constant_1265" [id=225, type=Constant]; +"226 Constant_5545" [id=226, type=Constant]; +"227 Constant_5544" [id=227, type=Constant]; +"228 Constant_5543" [id=228, type=Constant]; +"229 Constant_5542" [id=229, type=Constant]; +"230 Transpose_1262" [id=230, type=Constant]; +"231 Constant_3735" [id=231, type=Constant]; +"232 Multiply_3727/fq_weights_1" [id=232, type=FakeQuantize]; +"233 Constant_5605" [id=233, type=Constant]; +"234 Constant_5604" [id=234, type=Constant]; +"235 Constant_5603" [id=235, type=Constant]; +"236 Constant_5602" [id=236, type=Constant]; +"237 Multiply_3980" [id=237, type=Constant]; +"238 Constant_5600" [id=238, type=Constant]; +"239 Constant_5599" [id=239, type=Constant]; +"240 Constant_5598" [id=240, type=Constant]; +"241 Constant_5597" [id=241, type=Constant]; +"242 Transpose_1232" [id=242, type=Constant]; +"243 Constant_3721" [id=243, type=Constant]; +"244 Multiply_3713/fq_weights_1" [id=244, type=FakeQuantize]; +"245 Constant_5595" [id=245, type=Constant]; +"246 Constant_5594" [id=246, type=Constant]; +"247 Constant_5593" [id=247, type=Constant]; +"248 Constant_5592" [id=248, type=Constant]; +"249 Multiply_3974" [id=249, type=Constant]; +"250 Constant_5590" [id=250, type=Constant]; +"251 Constant_5589" [id=251, type=Constant]; +"252 Constant_5588" [id=252, type=Constant]; +"253 Constant_5587" [id=253, type=Constant]; +"254 Transpose_1175" [id=254, type=Constant]; +"255 Constant_3707" [id=255, type=Constant]; +"256 Multiply_3699/fq_weights_1" [id=256, type=FakeQuantize]; +"257 Constant_5585" [id=257, type=Constant]; +"258 Constant_5584" [id=258, type=Constant]; +"259 Constant_5583" [id=259, type=Constant]; +"260 Constant_5582" [id=260, type=Constant]; +"261 Gather_4242" [id=261, type=Constant]; +"262 Constant_2326" [id=262, type=Constant]; +"263 Constant_5580" [id=263, type=Constant]; +"264 Constant_5579" [id=264, type=Constant]; +"265 Constant_5578" [id=265, type=Constant]; +"266 Constant_5577" [id=266, type=Constant]; +"267 Constant_5520" [id=267, type=Constant]; +"268 Constant_5519" [id=268, type=Constant]; +"269 Constant_5518" [id=269, type=Constant]; +"270 Constant_5517" [id=270, type=Constant]; +"271 Transpose_1327" [id=271, type=Constant]; +"272 Constant_3763" [id=272, type=Constant]; +"273 Multiply_3755/fq_weights_1" [id=273, type=FakeQuantize]; +"274 Constant_5615" [id=274, type=Constant]; +"275 Constant_5614" [id=275, type=Constant]; +"276 Constant_5613" [id=276, type=Constant]; +"277 Constant_5612" [id=277, type=Constant]; +"278 Multiply_3992" [id=278, type=Constant]; +"279 Constant_5525" [id=279, type=Constant]; +"280 Constant_5524" [id=280, type=Constant]; +"281 Constant_5523" [id=281, type=Constant]; +"282 Constant_5522" [id=282, type=Constant]; +"283 Transpose_1460" [id=283, type=Constant]; +"284 Constant_3819" [id=284, type=Constant]; +"285 Multiply_3811/fq_weights_1" [id=285, type=FakeQuantize]; +"286 Constant_5635" [id=286, type=Constant]; +"287 Constant_5634" [id=287, type=Constant]; +"288 Constant_5633" [id=288, type=Constant]; +"289 Constant_5632" [id=289, type=Constant]; +"290 Multiply_4016" [id=290, type=Constant]; +"291 Constant_5540" [id=291, type=Constant]; +"292 Constant_5539" [id=292, type=Constant]; +"293 Constant_5538" [id=293, type=Constant]; +"294 Constant_5537" [id=294, type=Constant]; +"295 Transpose_1593" [id=295, type=Constant]; +"296 Constant_3875" [id=296, type=Constant]; +"297 Multiply_3867/fq_weights_1" [id=297, type=FakeQuantize]; +"298 Constant_5655" [id=298, type=Constant]; +"299 Constant_5654" [id=299, type=Constant]; +"300 Constant_5653" [id=300, type=Constant]; +"301 Constant_5652" [id=301, type=Constant]; +"302 Multiply_4040" [id=302, type=Constant]; +"303 Constant_1724" [id=303, type=Constant]; +"304 Constant_5575" [id=304, type=Constant]; +"305 Constant_5574" [id=305, type=Constant]; +"306 Constant_5573" [id=306, type=Constant]; +"307 Constant_5572" [id=307, type=Constant]; +"308 Constant_669" [id=308, type=Constant]; +"309 up_sampling2d/Const" [id=309, type=Constant]; +"310 up_sampling2d/strided_slice/stack_2" [id=310, type=Constant]; +"311 up_sampling2d/strided_slice/stack_1" [id=311, type=Constant]; +"312 up_sampling2d/strided_slice/stack" [id=312, type=Constant]; +"313 Constant_1722" [id=313, type=Constant]; +"314 Constant_5690" [id=314, type=Constant]; +"315 Constant_5689" [id=315, type=Constant]; +"316 Constant_5688" [id=316, type=Constant]; +"317 Constant_5687" [id=317, type=Constant]; +"318 Transpose_1721" [id=318, type=Constant]; +"319 Constant_3931" [id=319, type=Constant]; +"320 Multiply_3923/fq_weights_1" [id=320, type=FakeQuantize]; +"321 Constant_5685" [id=321, type=Constant]; +"322 Constant_5684" [id=322, type=Constant]; +"323 Constant_5683" [id=323, type=Constant]; +"324 Constant_5682" [id=324, type=Constant]; +"325 Multiply_4064" [id=325, type=Constant]; +"326 Constant_5680" [id=326, type=Constant]; +"327 Constant_5679" [id=327, type=Constant]; +"328 Constant_5678" [id=328, type=Constant]; +"329 Constant_5677" [id=329, type=Constant]; +"330 Transpose_1691" [id=330, type=Constant]; +"331 Constant_3917" [id=331, type=Constant]; +"332 Multiply_3909/fq_weights_1" [id=332, type=FakeQuantize]; +"333 Constant_5675" [id=333, type=Constant]; +"334 Constant_5674" [id=334, type=Constant]; +"335 Constant_5673" [id=335, type=Constant]; +"336 Constant_5672" [id=336, type=Constant]; +"337 Multiply_4058" [id=337, type=Constant]; +"338 Constant_5670" [id=338, type=Constant]; +"339 Constant_5669" [id=339, type=Constant]; +"340 Constant_5668" [id=340, type=Constant]; +"341 Constant_5667" [id=341, type=Constant]; +"342 Transpose_1661" [id=342, type=Constant]; +"343 Constant_3903" [id=343, type=Constant]; +"344 Multiply_3895/fq_weights_1" [id=344, type=FakeQuantize]; +"345 Constant_5665" [id=345, type=Constant]; +"346 Constant_5664" [id=346, type=Constant]; +"347 Constant_5663" [id=347, type=Constant]; +"348 Constant_5662" [id=348, type=Constant]; +"349 Multiply_4052" [id=349, type=Constant]; +"350 Constant_1798" [id=350, type=Constant]; +"351 Transpose_1797" [id=351, type=Constant]; +"352 Convolution_749/fq_weights_1" [id=352, type=FakeQuantize]; +"353 Constant_5720" [id=353, type=Constant]; +"354 Constant_5719" [id=354, type=Constant]; +"355 Constant_5718" [id=355, type=Constant]; +"356 Constant_5717" [id=356, type=Constant]; +"357 Transpose_748" [id=357, type=Constant]; +"358 Constant_5715" [id=358, type=Constant]; +"359 Constant_5714" [id=359, type=Constant]; +"360 Constant_5713" [id=360, type=Constant]; +"361 Constant_5712" [id=361, type=Constant]; +"362 Transpose_1791" [id=362, type=Constant]; +"363 Constant_3959" [id=363, type=Constant]; +"364 Multiply_3951/fq_weights_1" [id=364, type=FakeQuantize]; +"365 Constant_5710" [id=365, type=Constant]; +"366 Constant_5709" [id=366, type=Constant]; +"367 Constant_5708" [id=367, type=Constant]; +"368 Constant_5707" [id=368, type=Constant]; +"369 Multiply_4076" [id=369, type=Constant]; +"0 image_input" -> "1 image_input/fq_output_0" [label="[1, 416, 416, 3]", style=solid]; +"1 image_input/fq_output_0" -> "2 Divide_2366" [label="[1, 416, 416, 3]", style=solid]; +"2 Divide_2366" -> "3 Multiply_3699" [label="[1, 3, 416, 416]", style=solid]; +"3 Multiply_3699" -> "4 Transpose_1171" [label="[1, 32, 208, 208]", style=solid]; +"4 Transpose_1171" -> "5 Transpose_1177" [label="[1, 32, 208, 208]", style=solid]; +"5 Transpose_1177" -> "6 Transpose_1177/fq_output_0" [label="[1, 32, 208, 208]", style=solid]; +"6 Transpose_1177/fq_output_0" -> "7 Multiply_3713" [label="[1, 32, 208, 208]", style=solid]; +"7 Multiply_3713" -> "8 Transpose_1228" [label="[1, 64, 104, 104]", style=solid]; +"8 Transpose_1228" -> "9 Transpose_1234" [label="[1, 64, 104, 104]", style=solid]; +"9 Transpose_1234" -> "10 Transpose_1234/fq_output_0" [label="[1, 64, 104, 104]", style=solid]; +"10 Transpose_1234/fq_output_0" -> "11 Multiply_3727" [label="[1, 64, 104, 104]", style=solid]; +"11 Multiply_3727" -> "12 Transpose_1258" [label="[1, 64, 104, 104]", style=solid]; +"12 Transpose_1258" -> "13 Transpose_1264" [label="[1, 64, 104, 104]", style=solid]; +"13 Transpose_1264" -> "14 Transpose_1264/fq_output_0" [label="[1, 64, 104, 104]", style=solid]; +"14 Transpose_1264/fq_output_0" -> "15 Transpose_1367" [label="[1, 64, 104, 104]", style=solid]; +"14 Transpose_1264/fq_output_0" -> "16 group_route_3/split" [label="[1, 64, 104, 104]", style=solid]; +"15 Transpose_1367" -> "17 MaxPool_307" [label="[1, 128, 104, 104]", style=solid]; +"16 group_route_3/split" -> "18 Multiply_3741" [label="[1, 32, 104, 104]", style=solid]; +"17 MaxPool_307" -> "19 Multiply_3783" [label="[1, 128, 52, 52]", style=solid]; +"18 Multiply_3741" -> "20 Transpose_1293" [label="[1, 32, 104, 104]", style=solid]; +"19 Multiply_3783" -> "21 Transpose_1391" [label="[1, 128, 52, 52]", style=solid]; +"20 Transpose_1293" -> "22 Transpose_1299" [label="[1, 32, 104, 104]", style=solid]; +"21 Transpose_1391" -> "23 Transpose_1397" [label="[1, 128, 52, 52]", style=solid]; +"22 Transpose_1299" -> "24 Transpose_1299/fq_output_0" [label="[1, 32, 104, 104]", style=solid]; +"23 Transpose_1397" -> "25 Transpose_1397/fq_output_0" [label="[1, 128, 52, 52]", style=solid]; +"24 Transpose_1299/fq_output_0" -> "26 Multiply_3755" [label="[1, 32, 104, 104]", style=solid]; +"24 Transpose_1299/fq_output_0" -> "27 Transpose_1333" [label="[1, 32, 104, 104]", style=solid]; +"25 Transpose_1397/fq_output_0" -> "28 Transpose_1500" [label="[1, 128, 52, 52]", style=solid]; +"25 Transpose_1397/fq_output_0" -> "29 group_route_11/split" [label="[1, 128, 52, 52]", style=solid]; +"26 Multiply_3755" -> "30 Transpose_1323" [label="[1, 32, 104, 104]", style=solid]; +"27 Transpose_1333" -> "31 Multiply_3769" [label="[1, 64, 104, 104]", style=solid]; +"28 Transpose_1500" -> "32 MaxPool_433" [label="[1, 256, 52, 52]", style=solid]; +"29 group_route_11/split" -> "33 Multiply_3797" [label="[1, 64, 52, 52]", style=solid]; +"30 Transpose_1323" -> "34 Transpose_1329" [label="[1, 32, 104, 104]", style=solid]; +"31 Multiply_3769" -> "35 Transpose_1357" [label="[1, 64, 104, 104]", style=solid]; +"32 MaxPool_433" -> "36 Multiply_3839" [label="[1, 256, 26, 26]", style=solid]; +"33 Multiply_3797" -> "37 Transpose_1426" [label="[1, 64, 52, 52]", style=solid]; +"34 Transpose_1329" -> "38 Transpose_1329/fq_output_0" [label="[1, 32, 104, 104]", style=solid]; +"35 Transpose_1357" -> "39 Transpose_1363" [label="[1, 64, 104, 104]", style=solid]; +"36 Multiply_3839" -> "40 Transpose_1524" [label="[1, 256, 26, 26]", style=solid]; +"37 Transpose_1426" -> "41 Transpose_1432" [label="[1, 64, 52, 52]", style=solid]; +"38 Transpose_1329/fq_output_0" -> "27 Transpose_1333" [label="[1, 32, 104, 104]", style=solid]; +"39 Transpose_1363" -> "42 Transpose_1363/fq_output_0" [label="[1, 64, 104, 104]", style=solid]; +"40 Transpose_1524" -> "43 Transpose_1530" [label="[1, 256, 26, 26]", style=solid]; +"41 Transpose_1432" -> "44 Transpose_1432/fq_output_0" [label="[1, 64, 52, 52]", style=solid]; +"42 Transpose_1363/fq_output_0" -> "15 Transpose_1367" [label="[1, 64, 104, 104]", style=solid]; +"43 Transpose_1530" -> "45 Transpose_1530/fq_output_0" [label="[1, 256, 26, 26]", style=solid]; +"44 Transpose_1432/fq_output_0" -> "46 Multiply_3811" [label="[1, 64, 52, 52]", style=solid]; +"44 Transpose_1432/fq_output_0" -> "47 Transpose_1466" [label="[1, 64, 52, 52]", style=solid]; +"45 Transpose_1530/fq_output_0" -> "48 Transpose_1633" [label="[1, 256, 26, 26]", style=solid]; +"45 Transpose_1530/fq_output_0" -> "49 group_route_19/split" [label="[1, 256, 26, 26]", style=solid]; +"46 Multiply_3811" -> "50 Transpose_1456" [label="[1, 64, 52, 52]", style=solid]; +"47 Transpose_1466" -> "51 Multiply_3825" [label="[1, 128, 52, 52]", style=solid]; +"48 Transpose_1633" -> "52 MaxPool_579" [label="[1, 512, 26, 26]", style=solid]; +"49 group_route_19/split" -> "53 Multiply_3853" [label="[1, 128, 26, 26]", style=solid]; +"50 Transpose_1456" -> "54 Transpose_1462" [label="[1, 64, 52, 52]", style=solid]; +"51 Multiply_3825" -> "55 Transpose_1490" [label="[1, 128, 52, 52]", style=solid]; +"52 MaxPool_579" -> "56 Multiply_3895" [label="[1, 512, 13, 13]", style=solid]; +"53 Multiply_3853" -> "57 Transpose_1559" [label="[1, 128, 26, 26]", style=solid]; +"54 Transpose_1462" -> "58 Transpose_1462/fq_output_0" [label="[1, 64, 52, 52]", style=solid]; +"55 Transpose_1490" -> "59 Transpose_1496" [label="[1, 128, 52, 52]", style=solid]; +"56 Multiply_3895" -> "60 Transpose_1657" [label="[1, 512, 13, 13]", style=solid]; +"57 Transpose_1559" -> "61 Transpose_1565" [label="[1, 128, 26, 26]", style=solid]; +"58 Transpose_1462/fq_output_0" -> "47 Transpose_1466" [label="[1, 64, 52, 52]", style=solid]; +"59 Transpose_1496" -> "62 Transpose_1496/fq_output_0" [label="[1, 128, 52, 52]", style=solid]; +"60 Transpose_1657" -> "63 Transpose_1663" [label="[1, 512, 13, 13]", style=solid]; +"61 Transpose_1565" -> "64 Transpose_1565/fq_output_0" [label="[1, 128, 26, 26]", style=solid]; +"62 Transpose_1496/fq_output_0" -> "28 Transpose_1500" [label="[1, 128, 52, 52]", style=solid]; +"63 Transpose_1663" -> "65 Transpose_1663/fq_output_0" [label="[1, 512, 13, 13]", style=solid]; +"64 Transpose_1565/fq_output_0" -> "66 Multiply_3867" [label="[1, 128, 26, 26]", style=solid]; +"64 Transpose_1565/fq_output_0" -> "67 Transpose_1599" [label="[1, 128, 26, 26]", style=solid]; +"65 Transpose_1663/fq_output_0" -> "68 Multiply_3909" [label="[1, 512, 13, 13]", style=solid]; +"66 Multiply_3867" -> "69 Transpose_1589" [label="[1, 128, 26, 26]", style=solid]; +"67 Transpose_1599" -> "70 Multiply_3881" [label="[1, 256, 26, 26]", style=solid]; +"68 Multiply_3909" -> "71 Transpose_1687" [label="[1, 256, 13, 13]", style=solid]; +"69 Transpose_1589" -> "72 Transpose_1595" [label="[1, 128, 26, 26]", style=solid]; +"70 Multiply_3881" -> "73 Transpose_1623" [label="[1, 256, 26, 26]", style=solid]; +"71 Transpose_1687" -> "74 Transpose_1693" [label="[1, 256, 13, 13]", style=solid]; +"72 Transpose_1595" -> "75 Transpose_1595/fq_output_0" [label="[1, 128, 26, 26]", style=solid]; +"73 Transpose_1623" -> "76 Transpose_1629" [label="[1, 256, 26, 26]", style=solid]; +"74 Transpose_1693" -> "77 Transpose_1693/fq_output_0" [label="[1, 256, 13, 13]", style=solid]; +"75 Transpose_1595/fq_output_0" -> "67 Transpose_1599" [label="[1, 128, 26, 26]", style=solid]; +"76 Transpose_1629" -> "78 Transpose_1629/fq_output_0" [label="[1, 256, 26, 26]", style=solid]; +"77 Transpose_1693/fq_output_0" -> "79 Multiply_3923" [label="[1, 256, 13, 13]", style=solid]; +"77 Transpose_1693/fq_output_0" -> "80 Multiply_3951" [label="[1, 256, 13, 13]", style=solid]; +"78 Transpose_1629/fq_output_0" -> "48 Transpose_1633" [label="[1, 256, 26, 26]", style=solid]; +"78 Transpose_1629/fq_output_0" -> "81 Transpose_1727" [label="[1, 256, 26, 26]", style=solid]; +"79 Multiply_3923" -> "82 Transpose_1717" [label="[1, 128, 13, 13]", style=solid]; +"80 Multiply_3951" -> "83 Transpose_1787" [label="[1, 512, 13, 13]", style=solid]; +"81 Transpose_1727" -> "84 Multiply_3937" [label="[1, 384, 26, 26]", style=solid]; +"82 Transpose_1717" -> "85 Transpose_1723" [label="[1, 128, 13, 13]", style=solid]; +"83 Transpose_1787" -> "86 Transpose_1793" [label="[1, 512, 13, 13]", style=solid]; +"84 Multiply_3937" -> "87 Transpose_1751" [label="[1, 256, 26, 26]", style=solid]; +"85 Transpose_1723" -> "88 Transpose_1723/fq_output_0" [label="[1, 128, 13, 13]", style=solid]; +"86 Transpose_1793" -> "89 Transpose_1793/fq_output_0" [label="[1, 512, 13, 13]", style=solid]; +"87 Transpose_1751" -> "90 Transpose_1757" [label="[1, 256, 26, 26]", style=solid]; +"88 Transpose_1723/fq_output_0" -> "91 leaky_re_lu_17/LeakyRelu" [label="[1, 128, 13, 13]", style=solid]; +"89 Transpose_1793/fq_output_0" -> "92 Convolution_749" [label="[1, 512, 13, 13]", style=solid]; +"90 Transpose_1757" -> "93 Transpose_1757/fq_output_0" [label="[1, 256, 26, 26]", style=solid]; +"91 leaky_re_lu_17/LeakyRelu" -> "94 up_sampling2d/Shape" [label="[1, 13, 13, 128]", style=solid]; +"91 leaky_re_lu_17/LeakyRelu" -> "95 up_sampling2d/resize/ResizeNearestNeighbor" [label="[1, 13, 13, 128]", style=solid]; +"92 Convolution_749" -> "96 Transpose_1799" [label="[1, 255, 13, 13]", style=solid]; +"93 Transpose_1757/fq_output_0" -> "97 Convolution_706" [label="[1, 256, 26, 26]", style=solid]; +"94 up_sampling2d/Shape" -> "98 up_sampling2d/strided_slice" [label="[4]", style=dashed]; +"95 up_sampling2d/resize/ResizeNearestNeighbor" -> "99 up_sampling2d/resize/ResizeNearestNeighbor/fq_output_0" [label="[1, 26, 26, 128]", style=solid]; +"96 Transpose_1799" -> "100 conv2d_17/BiasAdd" [label="[1, 255, 13, 13]", style=solid]; +"97 Convolution_706" -> "101 Transpose_1763" [label="[1, 255, 26, 26]", style=solid]; +"98 up_sampling2d/strided_slice" -> "102 up_sampling2d/mul" [label="[2]", style=dashed]; +"99 up_sampling2d/resize/ResizeNearestNeighbor/fq_output_0" -> "103 Transpose_1725" [label="[1, 26, 26, 128]", style=solid]; +"100 conv2d_17/BiasAdd" -> "104 conv2d_17/BiasAdd^0" [label="[1, 13, 13, 255]", style=solid]; +"101 Transpose_1763" -> "105 conv2d_20/BiasAdd" [label="[1, 255, 26, 26]", style=solid]; +"102 up_sampling2d/mul" -> "95 up_sampling2d/resize/ResizeNearestNeighbor" [label="[2]", style=dashed]; +"103 Transpose_1725" -> "81 Transpose_1727" [label="[1, 128, 26, 26]", style=solid]; +"105 conv2d_20/BiasAdd" -> "106 conv2d_20/BiasAdd^0" [label="[1, 26, 26, 255]", style=solid]; +"107 Constant_1762" -> "105 conv2d_20/BiasAdd" [label="[4]", style=dashed]; +"108 Transpose_1761" -> "101 Transpose_1763" [label="[1, 255, 1, 1]", style=solid]; +"109 Convolution_706/fq_weights_1" -> "97 Convolution_706" [label="[255, 256, 1, 1]", style=solid]; +"110 Constant_5705" -> "109 Convolution_706/fq_weights_1" [label="[255, 1, 1, 1]", style=solid]; +"111 Constant_5704" -> "109 Convolution_706/fq_weights_1" [label="[255, 1, 1, 1]", style=solid]; +"112 Constant_5703" -> "109 Convolution_706/fq_weights_1" [label="[255, 1, 1, 1]", style=solid]; +"113 Constant_5702" -> "109 Convolution_706/fq_weights_1" [label="[255, 1, 1, 1]", style=solid]; +"114 Transpose_705" -> "109 Convolution_706/fq_weights_1" [label="[255, 256, 1, 1]", style=solid]; +"115 Constant_5700" -> "93 Transpose_1757/fq_output_0" [label="[]", style=solid]; +"116 Constant_5699" -> "93 Transpose_1757/fq_output_0" [label="[]", style=solid]; +"117 Constant_5698" -> "93 Transpose_1757/fq_output_0" [label="[]", style=solid]; +"118 Constant_5697" -> "93 Transpose_1757/fq_output_0" [label="[]", style=solid]; +"119 Transpose_1755" -> "90 Transpose_1757" [label="[1, 1, 1, 1]", style=solid]; +"120 Constant_3945" -> "87 Transpose_1751" [label="[1, 256, 1, 1]", style=solid]; +"121 Multiply_3937/fq_weights_1" -> "84 Multiply_3937" [label="[256, 384, 3, 3]", style=solid]; +"122 Constant_5695" -> "121 Multiply_3937/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"123 Constant_5694" -> "121 Multiply_3937/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"124 Constant_5693" -> "121 Multiply_3937/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"125 Constant_5692" -> "121 Multiply_3937/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"126 Multiply_4070" -> "121 Multiply_3937/fq_weights_1" [label="[256, 384, 3, 3]", style=solid]; +"127 Constant_5565" -> "78 Transpose_1629/fq_output_0" [label="[]", style=solid]; +"128 Constant_5564" -> "78 Transpose_1629/fq_output_0" [label="[]", style=solid]; +"129 Constant_5563" -> "78 Transpose_1629/fq_output_0" [label="[]", style=solid]; +"130 Constant_5562" -> "78 Transpose_1629/fq_output_0" [label="[]", style=solid]; +"131 Transpose_1627" -> "76 Transpose_1629" [label="[1, 1, 1, 1]", style=solid]; +"132 Constant_3889" -> "73 Transpose_1623" [label="[1, 256, 1, 1]", style=solid]; +"133 Multiply_3881/fq_weights_1" -> "70 Multiply_3881" [label="[256, 256, 1, 1]", style=solid]; +"134 Constant_5660" -> "133 Multiply_3881/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"135 Constant_5659" -> "133 Multiply_3881/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"136 Constant_5658" -> "133 Multiply_3881/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"137 Constant_5657" -> "133 Multiply_3881/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"138 Multiply_4046" -> "133 Multiply_3881/fq_weights_1" [label="[256, 256, 1, 1]", style=solid]; +"139 Constant_5535" -> "64 Transpose_1565/fq_output_0" [label="[]", style=solid]; +"140 Constant_5534" -> "64 Transpose_1565/fq_output_0" [label="[]", style=solid]; +"141 Constant_5533" -> "64 Transpose_1565/fq_output_0" [label="[]", style=solid]; +"142 Constant_5532" -> "64 Transpose_1565/fq_output_0" [label="[]", style=solid]; +"143 Transpose_1563" -> "61 Transpose_1565" [label="[1, 1, 1, 1]", style=solid]; +"144 Constant_3861" -> "57 Transpose_1559" [label="[1, 128, 1, 1]", style=solid]; +"145 Multiply_3853/fq_weights_1" -> "53 Multiply_3853" [label="[128, 128, 3, 3]", style=solid]; +"146 Constant_5650" -> "145 Multiply_3853/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"147 Constant_5649" -> "145 Multiply_3853/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"148 Constant_5648" -> "145 Multiply_3853/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"149 Constant_5647" -> "145 Multiply_3853/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"150 Multiply_4034" -> "145 Multiply_3853/fq_weights_1" [label="[128, 128, 3, 3]", style=solid]; +"151 Constant_1531" -> "49 group_route_19/split" [label="[]", style=dashed]; +"152 Constant_5570" -> "45 Transpose_1530/fq_output_0" [label="[]", style=solid]; +"153 Constant_5569" -> "45 Transpose_1530/fq_output_0" [label="[]", style=solid]; +"154 Constant_5568" -> "45 Transpose_1530/fq_output_0" [label="[]", style=solid]; +"155 Constant_5567" -> "45 Transpose_1530/fq_output_0" [label="[]", style=solid]; +"156 Transpose_1528" -> "43 Transpose_1530" [label="[1, 1, 1, 1]", style=solid]; +"157 Constant_3847" -> "40 Transpose_1524" [label="[1, 256, 1, 1]", style=solid]; +"158 Multiply_3839/fq_weights_1" -> "36 Multiply_3839" [label="[256, 256, 3, 3]", style=solid]; +"159 Constant_5645" -> "158 Multiply_3839/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"160 Constant_5644" -> "158 Multiply_3839/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"161 Constant_5643" -> "158 Multiply_3839/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"162 Constant_5642" -> "158 Multiply_3839/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"163 Multiply_4028" -> "158 Multiply_3839/fq_weights_1" [label="[256, 256, 3, 3]", style=solid]; +"164 Constant_5560" -> "62 Transpose_1496/fq_output_0" [label="[]", style=solid]; +"165 Constant_5559" -> "62 Transpose_1496/fq_output_0" [label="[]", style=solid]; +"166 Constant_5558" -> "62 Transpose_1496/fq_output_0" [label="[]", style=solid]; +"167 Constant_5557" -> "62 Transpose_1496/fq_output_0" [label="[]", style=solid]; +"168 Transpose_1494" -> "59 Transpose_1496" [label="[1, 1, 1, 1]", style=solid]; +"169 Constant_3833" -> "55 Transpose_1490" [label="[1, 128, 1, 1]", style=solid]; +"170 Multiply_3825/fq_weights_1" -> "51 Multiply_3825" [label="[128, 128, 1, 1]", style=solid]; +"171 Constant_5640" -> "170 Multiply_3825/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"172 Constant_5639" -> "170 Multiply_3825/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"173 Constant_5638" -> "170 Multiply_3825/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"174 Constant_5637" -> "170 Multiply_3825/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"175 Multiply_4022" -> "170 Multiply_3825/fq_weights_1" [label="[128, 128, 1, 1]", style=solid]; +"176 Constant_5530" -> "44 Transpose_1432/fq_output_0" [label="[]", style=solid]; +"177 Constant_5529" -> "44 Transpose_1432/fq_output_0" [label="[]", style=solid]; +"178 Constant_5528" -> "44 Transpose_1432/fq_output_0" [label="[]", style=solid]; +"179 Constant_5527" -> "44 Transpose_1432/fq_output_0" [label="[]", style=solid]; +"180 Transpose_1430" -> "41 Transpose_1432" [label="[1, 1, 1, 1]", style=solid]; +"181 Constant_3805" -> "37 Transpose_1426" [label="[1, 64, 1, 1]", style=solid]; +"182 Multiply_3797/fq_weights_1" -> "33 Multiply_3797" [label="[64, 64, 3, 3]", style=solid]; +"183 Constant_5630" -> "182 Multiply_3797/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"184 Constant_5629" -> "182 Multiply_3797/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"185 Constant_5628" -> "182 Multiply_3797/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"186 Constant_5627" -> "182 Multiply_3797/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"187 Multiply_4010" -> "182 Multiply_3797/fq_weights_1" [label="[64, 64, 3, 3]", style=solid]; +"188 Constant_1398" -> "29 group_route_11/split" [label="[]", style=dashed]; +"189 Constant_5555" -> "25 Transpose_1397/fq_output_0" [label="[]", style=solid]; +"190 Constant_5554" -> "25 Transpose_1397/fq_output_0" [label="[]", style=solid]; +"191 Constant_5553" -> "25 Transpose_1397/fq_output_0" [label="[]", style=solid]; +"192 Constant_5552" -> "25 Transpose_1397/fq_output_0" [label="[]", style=solid]; +"193 Transpose_1395" -> "23 Transpose_1397" [label="[1, 1, 1, 1]", style=solid]; +"194 Constant_3791" -> "21 Transpose_1391" [label="[1, 128, 1, 1]", style=solid]; +"195 Multiply_3783/fq_weights_1" -> "19 Multiply_3783" [label="[128, 128, 3, 3]", style=solid]; +"196 Constant_5625" -> "195 Multiply_3783/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"197 Constant_5624" -> "195 Multiply_3783/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"198 Constant_5623" -> "195 Multiply_3783/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"199 Constant_5622" -> "195 Multiply_3783/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"200 Multiply_4004" -> "195 Multiply_3783/fq_weights_1" [label="[128, 128, 3, 3]", style=solid]; +"201 Constant_5550" -> "42 Transpose_1363/fq_output_0" [label="[]", style=solid]; +"202 Constant_5549" -> "42 Transpose_1363/fq_output_0" [label="[]", style=solid]; +"203 Constant_5548" -> "42 Transpose_1363/fq_output_0" [label="[]", style=solid]; +"204 Constant_5547" -> "42 Transpose_1363/fq_output_0" [label="[]", style=solid]; +"205 Transpose_1361" -> "39 Transpose_1363" [label="[1, 1, 1, 1]", style=solid]; +"206 Constant_3777" -> "35 Transpose_1357" [label="[1, 64, 1, 1]", style=solid]; +"207 Multiply_3769/fq_weights_1" -> "31 Multiply_3769" [label="[64, 64, 1, 1]", style=solid]; +"208 Constant_5620" -> "207 Multiply_3769/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"209 Constant_5619" -> "207 Multiply_3769/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"210 Constant_5618" -> "207 Multiply_3769/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"211 Constant_5617" -> "207 Multiply_3769/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"212 Multiply_3998" -> "207 Multiply_3769/fq_weights_1" [label="[64, 64, 1, 1]", style=solid]; +"213 Constant_5515" -> "24 Transpose_1299/fq_output_0" [label="[]", style=solid]; +"214 Constant_5514" -> "24 Transpose_1299/fq_output_0" [label="[]", style=solid]; +"215 Constant_5513" -> "24 Transpose_1299/fq_output_0" [label="[]", style=solid]; +"216 Constant_5512" -> "24 Transpose_1299/fq_output_0" [label="[]", style=solid]; +"217 Transpose_1297" -> "22 Transpose_1299" [label="[1, 1, 1, 1]", style=solid]; +"218 Constant_3749" -> "20 Transpose_1293" [label="[1, 32, 1, 1]", style=solid]; +"219 Multiply_3741/fq_weights_1" -> "18 Multiply_3741" [label="[32, 32, 3, 3]", style=solid]; +"220 Constant_5610" -> "219 Multiply_3741/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"221 Constant_5609" -> "219 Multiply_3741/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"222 Constant_5608" -> "219 Multiply_3741/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"223 Constant_5607" -> "219 Multiply_3741/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"224 Multiply_3986" -> "219 Multiply_3741/fq_weights_1" [label="[32, 32, 3, 3]", style=solid]; +"225 Constant_1265" -> "16 group_route_3/split" [label="[]", style=dashed]; +"226 Constant_5545" -> "14 Transpose_1264/fq_output_0" [label="[]", style=solid]; +"227 Constant_5544" -> "14 Transpose_1264/fq_output_0" [label="[]", style=solid]; +"228 Constant_5543" -> "14 Transpose_1264/fq_output_0" [label="[]", style=solid]; +"229 Constant_5542" -> "14 Transpose_1264/fq_output_0" [label="[]", style=solid]; +"230 Transpose_1262" -> "13 Transpose_1264" [label="[1, 1, 1, 1]", style=solid]; +"231 Constant_3735" -> "12 Transpose_1258" [label="[1, 64, 1, 1]", style=solid]; +"232 Multiply_3727/fq_weights_1" -> "11 Multiply_3727" [label="[64, 64, 3, 3]", style=solid]; +"233 Constant_5605" -> "232 Multiply_3727/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"234 Constant_5604" -> "232 Multiply_3727/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"235 Constant_5603" -> "232 Multiply_3727/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"236 Constant_5602" -> "232 Multiply_3727/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"237 Multiply_3980" -> "232 Multiply_3727/fq_weights_1" [label="[64, 64, 3, 3]", style=solid]; +"238 Constant_5600" -> "10 Transpose_1234/fq_output_0" [label="[]", style=solid]; +"239 Constant_5599" -> "10 Transpose_1234/fq_output_0" [label="[]", style=solid]; +"240 Constant_5598" -> "10 Transpose_1234/fq_output_0" [label="[]", style=solid]; +"241 Constant_5597" -> "10 Transpose_1234/fq_output_0" [label="[]", style=solid]; +"242 Transpose_1232" -> "9 Transpose_1234" [label="[1, 1, 1, 1]", style=solid]; +"243 Constant_3721" -> "8 Transpose_1228" [label="[1, 64, 1, 1]", style=solid]; +"244 Multiply_3713/fq_weights_1" -> "7 Multiply_3713" [label="[64, 32, 3, 3]", style=solid]; +"245 Constant_5595" -> "244 Multiply_3713/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"246 Constant_5594" -> "244 Multiply_3713/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"247 Constant_5593" -> "244 Multiply_3713/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"248 Constant_5592" -> "244 Multiply_3713/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"249 Multiply_3974" -> "244 Multiply_3713/fq_weights_1" [label="[64, 32, 3, 3]", style=solid]; +"250 Constant_5590" -> "6 Transpose_1177/fq_output_0" [label="[]", style=solid]; +"251 Constant_5589" -> "6 Transpose_1177/fq_output_0" [label="[]", style=solid]; +"252 Constant_5588" -> "6 Transpose_1177/fq_output_0" [label="[]", style=solid]; +"253 Constant_5587" -> "6 Transpose_1177/fq_output_0" [label="[]", style=solid]; +"254 Transpose_1175" -> "5 Transpose_1177" [label="[1, 1, 1, 1]", style=solid]; +"255 Constant_3707" -> "4 Transpose_1171" [label="[1, 32, 1, 1]", style=solid]; +"256 Multiply_3699/fq_weights_1" -> "3 Multiply_3699" [label="[32, 3, 3, 3]", style=solid]; +"257 Constant_5585" -> "256 Multiply_3699/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"258 Constant_5584" -> "256 Multiply_3699/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"259 Constant_5583" -> "256 Multiply_3699/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"260 Constant_5582" -> "256 Multiply_3699/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"261 Gather_4242" -> "256 Multiply_3699/fq_weights_1" [label="[32, 3, 3, 3]", style=solid]; +"262 Constant_2326" -> "2 Divide_2366" [label="[4]", style=dashed]; +"263 Constant_5580" -> "1 image_input/fq_output_0" [label="[]", style=solid]; +"264 Constant_5579" -> "1 image_input/fq_output_0" [label="[]", style=solid]; +"265 Constant_5578" -> "1 image_input/fq_output_0" [label="[]", style=solid]; +"266 Constant_5577" -> "1 image_input/fq_output_0" [label="[]", style=solid]; +"267 Constant_5520" -> "38 Transpose_1329/fq_output_0" [label="[]", style=solid]; +"268 Constant_5519" -> "38 Transpose_1329/fq_output_0" [label="[]", style=solid]; +"269 Constant_5518" -> "38 Transpose_1329/fq_output_0" [label="[]", style=solid]; +"270 Constant_5517" -> "38 Transpose_1329/fq_output_0" [label="[]", style=solid]; +"271 Transpose_1327" -> "34 Transpose_1329" [label="[1, 1, 1, 1]", style=solid]; +"272 Constant_3763" -> "30 Transpose_1323" [label="[1, 32, 1, 1]", style=solid]; +"273 Multiply_3755/fq_weights_1" -> "26 Multiply_3755" [label="[32, 32, 3, 3]", style=solid]; +"274 Constant_5615" -> "273 Multiply_3755/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"275 Constant_5614" -> "273 Multiply_3755/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"276 Constant_5613" -> "273 Multiply_3755/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"277 Constant_5612" -> "273 Multiply_3755/fq_weights_1" [label="[32, 1, 1, 1]", style=solid]; +"278 Multiply_3992" -> "273 Multiply_3755/fq_weights_1" [label="[32, 32, 3, 3]", style=solid]; +"279 Constant_5525" -> "58 Transpose_1462/fq_output_0" [label="[]", style=solid]; +"280 Constant_5524" -> "58 Transpose_1462/fq_output_0" [label="[]", style=solid]; +"281 Constant_5523" -> "58 Transpose_1462/fq_output_0" [label="[]", style=solid]; +"282 Constant_5522" -> "58 Transpose_1462/fq_output_0" [label="[]", style=solid]; +"283 Transpose_1460" -> "54 Transpose_1462" [label="[1, 1, 1, 1]", style=solid]; +"284 Constant_3819" -> "50 Transpose_1456" [label="[1, 64, 1, 1]", style=solid]; +"285 Multiply_3811/fq_weights_1" -> "46 Multiply_3811" [label="[64, 64, 3, 3]", style=solid]; +"286 Constant_5635" -> "285 Multiply_3811/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"287 Constant_5634" -> "285 Multiply_3811/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"288 Constant_5633" -> "285 Multiply_3811/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"289 Constant_5632" -> "285 Multiply_3811/fq_weights_1" [label="[64, 1, 1, 1]", style=solid]; +"290 Multiply_4016" -> "285 Multiply_3811/fq_weights_1" [label="[64, 64, 3, 3]", style=solid]; +"291 Constant_5540" -> "75 Transpose_1595/fq_output_0" [label="[]", style=solid]; +"292 Constant_5539" -> "75 Transpose_1595/fq_output_0" [label="[]", style=solid]; +"293 Constant_5538" -> "75 Transpose_1595/fq_output_0" [label="[]", style=solid]; +"294 Constant_5537" -> "75 Transpose_1595/fq_output_0" [label="[]", style=solid]; +"295 Transpose_1593" -> "72 Transpose_1595" [label="[1, 1, 1, 1]", style=solid]; +"296 Constant_3875" -> "69 Transpose_1589" [label="[1, 128, 1, 1]", style=solid]; +"297 Multiply_3867/fq_weights_1" -> "66 Multiply_3867" [label="[128, 128, 3, 3]", style=solid]; +"298 Constant_5655" -> "297 Multiply_3867/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"299 Constant_5654" -> "297 Multiply_3867/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"300 Constant_5653" -> "297 Multiply_3867/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"301 Constant_5652" -> "297 Multiply_3867/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"302 Multiply_4040" -> "297 Multiply_3867/fq_weights_1" [label="[128, 128, 3, 3]", style=solid]; +"303 Constant_1724" -> "103 Transpose_1725" [label="[4]", style=dashed]; +"304 Constant_5575" -> "99 up_sampling2d/resize/ResizeNearestNeighbor/fq_output_0" [label="[]", style=solid]; +"305 Constant_5574" -> "99 up_sampling2d/resize/ResizeNearestNeighbor/fq_output_0" [label="[]", style=solid]; +"306 Constant_5573" -> "99 up_sampling2d/resize/ResizeNearestNeighbor/fq_output_0" [label="[]", style=solid]; +"307 Constant_5572" -> "99 up_sampling2d/resize/ResizeNearestNeighbor/fq_output_0" [label="[]", style=solid]; +"308 Constant_669" -> "95 up_sampling2d/resize/ResizeNearestNeighbor" [label="[2]", style=dashed]; +"309 up_sampling2d/Const" -> "102 up_sampling2d/mul" [label="[2]", style=dashed]; +"310 up_sampling2d/strided_slice/stack_2" -> "98 up_sampling2d/strided_slice" [label="[1]", style=dashed]; +"311 up_sampling2d/strided_slice/stack_1" -> "98 up_sampling2d/strided_slice" [label="[1]", style=dashed]; +"312 up_sampling2d/strided_slice/stack" -> "98 up_sampling2d/strided_slice" [label="[1]", style=dashed]; +"313 Constant_1722" -> "91 leaky_re_lu_17/LeakyRelu" [label="[4]", style=dashed]; +"314 Constant_5690" -> "88 Transpose_1723/fq_output_0" [label="[]", style=solid]; +"315 Constant_5689" -> "88 Transpose_1723/fq_output_0" [label="[]", style=solid]; +"316 Constant_5688" -> "88 Transpose_1723/fq_output_0" [label="[]", style=solid]; +"317 Constant_5687" -> "88 Transpose_1723/fq_output_0" [label="[]", style=solid]; +"318 Transpose_1721" -> "85 Transpose_1723" [label="[1, 1, 1, 1]", style=solid]; +"319 Constant_3931" -> "82 Transpose_1717" [label="[1, 128, 1, 1]", style=solid]; +"320 Multiply_3923/fq_weights_1" -> "79 Multiply_3923" [label="[128, 256, 1, 1]", style=solid]; +"321 Constant_5685" -> "320 Multiply_3923/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"322 Constant_5684" -> "320 Multiply_3923/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"323 Constant_5683" -> "320 Multiply_3923/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"324 Constant_5682" -> "320 Multiply_3923/fq_weights_1" [label="[128, 1, 1, 1]", style=solid]; +"325 Multiply_4064" -> "320 Multiply_3923/fq_weights_1" [label="[128, 256, 1, 1]", style=solid]; +"326 Constant_5680" -> "77 Transpose_1693/fq_output_0" [label="[]", style=solid]; +"327 Constant_5679" -> "77 Transpose_1693/fq_output_0" [label="[]", style=solid]; +"328 Constant_5678" -> "77 Transpose_1693/fq_output_0" [label="[]", style=solid]; +"329 Constant_5677" -> "77 Transpose_1693/fq_output_0" [label="[]", style=solid]; +"330 Transpose_1691" -> "74 Transpose_1693" [label="[1, 1, 1, 1]", style=solid]; +"331 Constant_3917" -> "71 Transpose_1687" [label="[1, 256, 1, 1]", style=solid]; +"332 Multiply_3909/fq_weights_1" -> "68 Multiply_3909" [label="[256, 512, 1, 1]", style=solid]; +"333 Constant_5675" -> "332 Multiply_3909/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"334 Constant_5674" -> "332 Multiply_3909/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"335 Constant_5673" -> "332 Multiply_3909/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"336 Constant_5672" -> "332 Multiply_3909/fq_weights_1" [label="[256, 1, 1, 1]", style=solid]; +"337 Multiply_4058" -> "332 Multiply_3909/fq_weights_1" [label="[256, 512, 1, 1]", style=solid]; +"338 Constant_5670" -> "65 Transpose_1663/fq_output_0" [label="[]", style=solid]; +"339 Constant_5669" -> "65 Transpose_1663/fq_output_0" [label="[]", style=solid]; +"340 Constant_5668" -> "65 Transpose_1663/fq_output_0" [label="[]", style=solid]; +"341 Constant_5667" -> "65 Transpose_1663/fq_output_0" [label="[]", style=solid]; +"342 Transpose_1661" -> "63 Transpose_1663" [label="[1, 1, 1, 1]", style=solid]; +"343 Constant_3903" -> "60 Transpose_1657" [label="[1, 512, 1, 1]", style=solid]; +"344 Multiply_3895/fq_weights_1" -> "56 Multiply_3895" [label="[512, 512, 3, 3]", style=solid]; +"345 Constant_5665" -> "344 Multiply_3895/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"346 Constant_5664" -> "344 Multiply_3895/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"347 Constant_5663" -> "344 Multiply_3895/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"348 Constant_5662" -> "344 Multiply_3895/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"349 Multiply_4052" -> "344 Multiply_3895/fq_weights_1" [label="[512, 512, 3, 3]", style=solid]; +"350 Constant_1798" -> "100 conv2d_17/BiasAdd" [label="[4]", style=dashed]; +"351 Transpose_1797" -> "96 Transpose_1799" [label="[1, 255, 1, 1]", style=solid]; +"352 Convolution_749/fq_weights_1" -> "92 Convolution_749" [label="[255, 512, 1, 1]", style=solid]; +"353 Constant_5720" -> "352 Convolution_749/fq_weights_1" [label="[255, 1, 1, 1]", style=solid]; +"354 Constant_5719" -> "352 Convolution_749/fq_weights_1" [label="[255, 1, 1, 1]", style=solid]; +"355 Constant_5718" -> "352 Convolution_749/fq_weights_1" [label="[255, 1, 1, 1]", style=solid]; +"356 Constant_5717" -> "352 Convolution_749/fq_weights_1" [label="[255, 1, 1, 1]", style=solid]; +"357 Transpose_748" -> "352 Convolution_749/fq_weights_1" [label="[255, 512, 1, 1]", style=solid]; +"358 Constant_5715" -> "89 Transpose_1793/fq_output_0" [label="[]", style=solid]; +"359 Constant_5714" -> "89 Transpose_1793/fq_output_0" [label="[]", style=solid]; +"360 Constant_5713" -> "89 Transpose_1793/fq_output_0" [label="[]", style=solid]; +"361 Constant_5712" -> "89 Transpose_1793/fq_output_0" [label="[]", style=solid]; +"362 Transpose_1791" -> "86 Transpose_1793" [label="[1, 1, 1, 1]", style=solid]; +"363 Constant_3959" -> "83 Transpose_1787" [label="[1, 512, 1, 1]", style=solid]; +"364 Multiply_3951/fq_weights_1" -> "80 Multiply_3951" [label="[512, 256, 3, 3]", style=solid]; +"365 Constant_5710" -> "364 Multiply_3951/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"366 Constant_5709" -> "364 Multiply_3951/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"367 Constant_5708" -> "364 Multiply_3951/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"368 Constant_5707" -> "364 Multiply_3951/fq_weights_1" [label="[512, 1, 1, 1]", style=solid]; +"369 Multiply_4076" -> "364 Multiply_3951/fq_weights_1" [label="[512, 256, 3, 3]", style=solid]; +} diff --git a/tests/openvino/native/data/reference_scales/IntegerModel_compressed_weights.json b/tests/openvino/native/data/reference_scales/IntegerModel_compressed_weights.json new file mode 100644 index 00000000000..a720bbc4f97 --- /dev/null +++ b/tests/openvino/native/data/reference_scales/IntegerModel_compressed_weights.json @@ -0,0 +1,242 @@ +{ + "matmul_2_data": { + "compressed_weight": [ + [ + 115, + 51, + 154, + 255, + 79, + 18, + 139 + ], + [ + 59, + 27, + 174, + 89, + 201, + 60, + 255 + ], + [ + 110, + 32, + 189, + 255, + 132, + 255, + 150 + ], + [ + 190, + 255, + 255, + 255, + 206, + 255, + 223 + ], + [ + 165, + 245, + 129, + 229, + 222, + 255, + 36 + ], + [ + 192, + 245, + 255, + 4, + 228, + 255, + 253 + ] + ], + "zero_point": [ + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ] + ], + "scale": [ + [ + 0.0029188350308686495 + ], + [ + 0.0033386670984327793 + ], + [ + 0.003329785307869315 + ], + [ + 0.0022347758058458567 + ], + [ + 0.003204419743269682 + ], + [ + 0.0037901517935097218 + ] + ] + }, + "matmul_1_data": { + "compressed_weight": [ + [ + 119, + 168, + 11, + 49, + 255, + 255 + ], + [ + 255, + 159, + 255, + 255, + 255, + 255 + ], + [ + 255, + 169, + 59, + 255, + 228, + 135 + ], + [ + 202, + 255, + 255, + 149, + 238, + 134 + ], + [ + 229, + 130, + 151, + 255, + 87, + 240 + ], + [ + 26, + 255, + 245, + 75, + 255, + 18 + ] + ], + "zero_point": [ + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ] + ], + "scale": [ + [ + 0.0025188422296196222 + ], + [ + 0.002406009240075946 + ], + [ + 0.0022995758336037397 + ], + [ + 0.00240068300627172 + ], + [ + 0.002600457752123475 + ], + [ + 0.0032075142953544855 + ] + ] + }, + "gather_2_data": { + "compressed_weight": [ + [ + 181, + 77, + 12, + 5, + 231, + 255 + ], + [ + 166, + 200, + 149, + 255, + 223, + 1 + ], + [ + 255, + 10, + 224, + 54, + 255, + 166 + ] + ], + "zero_point": [ + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ] + ], + "scale": [ + [ + 0.0035146193113178015 + ], + [ + 0.003656211541965604 + ], + [ + 0.003253307193517685 + ] + ] + } +} \ No newline at end of file diff --git a/tests/openvino/native/data/reference_scales/IntegerModel_mixed.json b/tests/openvino/native/data/reference_scales/IntegerModel_mixed.json index 269b16183f1..4dc60d69c0a 100644 --- a/tests/openvino/native/data/reference_scales/IntegerModel_mixed.json +++ b/tests/openvino/native/data/reference_scales/IntegerModel_mixed.json @@ -1,3886 +1,236 @@ { "MatMul_2/fq_output_0": { "input_low": 0.0, - "input_high": 57.1154670715332, + "input_high": 2.933105707168579, "output_low": 0.0, - "output_high": 57.1154670715332 + "output_high": 2.933105707168579 }, "MatMul_2/fq_weights_1": { "input_low": [ [ - -0.9971370697021484 + -0.7963242530822754 ], [ - -0.9977759718894958 + -0.9421131014823914 ], [ - -0.9985922574996948 + -0.9545904994010925 ], [ - -0.9985772371292114 + -0.9950965046882629 ], [ - -0.9981985092163086 + -0.9320597052574158 ], [ - -0.9927956461906433 - ], - [ - -0.985166072845459 - ], - [ - -0.9947773814201355 - ], - [ - -0.9905215501785278 - ], - [ - -0.9992762207984924 - ], - [ - -0.9974793791770935 - ], - [ - -0.9943581223487854 - ], - [ - -0.9967403411865234 - ], - [ - -0.9930447936058044 - ], - [ - -0.998615562915802 - ], - [ - -0.9957616925239563 - ], - [ - -0.9993053674697876 - ], - [ - -0.9985345602035522 - ], - [ - -0.9952551126480103 - ], - [ - -0.9955055713653564 - ], - [ - -0.9950133562088013 - ], - [ - -0.9997806549072266 - ], - [ - -0.9947424530982971 - ], - [ - -0.999694287776947 - ], - [ - -0.9986006617546082 - ], - [ - -0.9902195930480957 - ], - [ - -0.9995226860046387 - ], - [ - -0.9848650693893433 - ], - [ - -0.9950981736183167 - ], - [ - -0.9987800121307373 - ], - [ - -0.997323751449585 - ], - [ - -0.9988982677459717 - ], - [ - -0.9920604825019836 - ], - [ - -0.9928877949714661 - ], - [ - -0.9807276129722595 - ], - [ - -0.9976627230644226 - ], - [ - -0.9999291896820068 - ], - [ - -0.9958432912826538 - ], - [ - -0.9938291311264038 - ], - [ - -0.9946627616882324 - ], - [ - -0.9998650550842285 - ], - [ - -0.9900315999984741 - ], - [ - -0.9974269866943359 - ], - [ - -0.9897720813751221 - ], - [ - -0.9957936406135559 - ], - [ - -0.9932218194007874 - ], - [ - -0.996446967124939 - ], - [ - -0.9940508604049683 - ], - [ - -0.9955636262893677 - ], - [ - -0.9948257803916931 - ], - [ - -0.9886698722839355 - ], - [ - -0.9988534450531006 - ], - [ - -0.9858165979385376 - ], - [ - -0.9916569590568542 - ], - [ - -0.9982323050498962 - ], - [ - -0.9838352799415588 - ], - [ - -0.9961373805999756 - ], - [ - -0.9817739129066467 - ], - [ - -0.994723379611969 - ], - [ - -0.9983773827552795 - ], - [ - -0.9897892475128174 - ], - [ - -0.998907208442688 - ], - [ - -0.9876444935798645 - ], - [ - -0.9996252059936523 - ], - [ - -0.9927592277526855 - ], - [ - -0.9966170787811279 - ], - [ - -0.9986746311187744 - ], - [ - -0.9993210434913635 - ], - [ - -0.9962468147277832 - ], - [ - -0.9961517453193665 - ], - [ - -0.9974855780601501 - ], - [ - -0.9987379908561707 - ], - [ - -0.9935858845710754 - ], - [ - -0.9991754293441772 - ], - [ - -0.9976705312728882 - ], - [ - -0.9942941665649414 - ], - [ - -0.9995023608207703 - ], - [ - -0.998296320438385 - ], - [ - -0.9980056285858154 - ], - [ - -0.9947104454040527 - ], - [ - -0.9981767535209656 - ], - [ - -0.9927842617034912 - ], - [ - -0.9992777109146118 - ], - [ - -0.999796450138092 - ], - [ - -0.9997242093086243 - ], - [ - -0.9993578195571899 - ], - [ - -0.9955887198448181 - ], - [ - -0.9814757108688354 - ], - [ - -0.9994866847991943 - ], - [ - -0.9925956130027771 - ], - [ - -0.9992111921310425 - ], - [ - -0.9569488167762756 - ], - [ - -0.9897102117538452 - ], - [ - -0.9939751625061035 - ], - [ - -0.9947844743728638 - ], - [ - -0.9927484393119812 - ], - [ - -0.9980167150497437 - ], - [ - -0.9944462776184082 - ], - [ - -0.99897700548172 - ], - [ - -0.9983107447624207 - ], - [ - -0.9956953525543213 - ], - [ - -0.9843021631240845 - ], - [ - -0.994292140007019 - ], - [ - -0.9900123476982117 - ], - [ - -0.9861699938774109 - ], - [ - -0.9963985681533813 - ], - [ - -0.9996615052223206 - ], - [ - -0.9802669882774353 - ], - [ - -0.9985330104827881 - ], - [ - -0.9970524311065674 - ], - [ - -0.9997991323471069 - ], - [ - -0.9907543063163757 - ], - [ - -0.998393714427948 - ], - [ - -0.995525062084198 - ], - [ - -0.9998023509979248 - ], - [ - -0.9971622228622437 - ], - [ - -0.9992753863334656 - ], - [ - -0.9999622106552124 - ], - [ - -0.9815661311149597 - ], - [ - -0.9994043707847595 - ], - [ - -0.9961485862731934 - ], - [ - -0.9938349723815918 - ], - [ - -0.9985218048095703 - ], - [ - -0.9955998063087463 - ], - [ - -0.9971383213996887 - ], - [ - -0.9976557493209839 - ], - [ - -0.9903542399406433 - ], - [ - -0.9938679337501526 - ], - [ - -0.9987586736679077 - ], - [ - -0.9968150854110718 - ], - [ - -0.9980546236038208 - ], - [ - -0.9992469549179077 - ], - [ - -0.98200523853302 - ], - [ - -0.9988876581192017 - ], - [ - -0.9937284588813782 - ], - [ - -0.9991428256034851 - ], - [ - -0.9938818216323853 - ], - [ - -0.9984561204910278 - ], - [ - -0.9907351136207581 - ], - [ - -0.9804718494415283 - ], - [ - -0.9957432150840759 - ], - [ - -0.9959821701049805 - ], - [ - -0.9964268803596497 - ], - [ - -0.9879118800163269 - ], - [ - -0.9956321120262146 - ], - [ - -0.993153989315033 - ], - [ - -0.9922581315040588 - ], - [ - -0.9957557320594788 - ], - [ - -0.9962975978851318 - ], - [ - -0.998455286026001 - ], - [ - -0.9958142638206482 - ], - [ - -0.9971563816070557 - ], - [ - -0.9996784925460815 - ], - [ - -0.9956899285316467 - ], - [ - -0.996746301651001 - ], - [ - -0.9967062473297119 - ], - [ - -0.9999867677688599 - ], - [ - -0.9880314469337463 - ], - [ - -0.9919611215591431 - ], - [ - -0.9958345293998718 - ] - ], - "input_high": [ - [ - 0.9971370697021484 - ], - [ - 0.9977759718894958 - ], - [ - 0.9985922574996948 - ], - [ - 0.9985772371292114 - ], - [ - 0.9981985092163086 - ], - [ - 0.9927956461906433 - ], - [ - 0.985166072845459 - ], - [ - 0.9947773814201355 - ], - [ - 0.9905215501785278 - ], - [ - 0.9992762207984924 - ], - [ - 0.9974793791770935 - ], - [ - 0.9943581223487854 - ], - [ - 0.9967403411865234 - ], - [ - 0.9930447936058044 - ], - [ - 0.998615562915802 - ], - [ - 0.9957616925239563 - ], - [ - 0.9993053674697876 - ], - [ - 0.9985345602035522 - ], - [ - 0.9952551126480103 - ], - [ - 0.9955055713653564 - ], - [ - 0.9950133562088013 - ], - [ - 0.9997806549072266 - ], - [ - 0.9947424530982971 - ], - [ - 0.999694287776947 - ], - [ - 0.9986006617546082 - ], - [ - 0.9902195930480957 - ], - [ - 0.9995226860046387 - ], - [ - 0.9848650693893433 - ], - [ - 0.9950981736183167 - ], - [ - 0.9987800121307373 - ], - [ - 0.997323751449585 - ], - [ - 0.9988982677459717 - ], - [ - 0.9920604825019836 - ], - [ - 0.9928877949714661 - ], - [ - 0.9807276129722595 - ], - [ - 0.9976627230644226 - ], - [ - 0.9999291896820068 - ], - [ - 0.9958432912826538 - ], - [ - 0.9938291311264038 - ], - [ - 0.9946627616882324 - ], - [ - 0.9998650550842285 - ], - [ - 0.9900315999984741 - ], - [ - 0.9974269866943359 - ], - [ - 0.9897720813751221 - ], - [ - 0.9957936406135559 - ], - [ - 0.9932218194007874 - ], - [ - 0.996446967124939 - ], - [ - 0.9940508604049683 - ], - [ - 0.9955636262893677 - ], - [ - 0.9948257803916931 - ], - [ - 0.9886698722839355 - ], - [ - 0.9988534450531006 - ], - [ - 0.9858165979385376 - ], - [ - 0.9916569590568542 - ], - [ - 0.9982323050498962 - ], - [ - 0.9838352799415588 - ], - [ - 0.9961373805999756 - ], - [ - 0.9817739129066467 - ], - [ - 0.994723379611969 - ], - [ - 0.9983773827552795 - ], - [ - 0.9897892475128174 - ], - [ - 0.998907208442688 - ], - [ - 0.9876444935798645 - ], - [ - 0.9996252059936523 - ], - [ - 0.9927592277526855 - ], - [ - 0.9966170787811279 - ], - [ - 0.9986746311187744 - ], - [ - 0.9993210434913635 - ], - [ - 0.9962468147277832 - ], - [ - 0.9961517453193665 - ], - [ - 0.9974855780601501 - ], - [ - 0.9987379908561707 - ], - [ - 0.9935858845710754 - ], - [ - 0.9991754293441772 - ], - [ - 0.9976705312728882 - ], - [ - 0.9942941665649414 - ], - [ - 0.9995023608207703 - ], - [ - 0.998296320438385 - ], - [ - 0.9980056285858154 - ], - [ - 0.9947104454040527 - ], - [ - 0.9981767535209656 - ], - [ - 0.9927842617034912 - ], - [ - 0.9992777109146118 - ], - [ - 0.999796450138092 - ], - [ - 0.9997242093086243 - ], - [ - 0.9993578195571899 - ], - [ - 0.9955887198448181 - ], - [ - 0.9814757108688354 - ], - [ - 0.9994866847991943 - ], - [ - 0.9925956130027771 - ], - [ - 0.9992111921310425 - ], - [ - 0.9569488167762756 - ], - [ - 0.9897102117538452 - ], - [ - 0.9939751625061035 - ], - [ - 0.9947844743728638 - ], - [ - 0.9927484393119812 - ], - [ - 0.9980167150497437 - ], - [ - 0.9944462776184082 - ], - [ - 0.99897700548172 - ], - [ - 0.9983107447624207 - ], - [ - 0.9956953525543213 - ], - [ - 0.9843021631240845 - ], - [ - 0.994292140007019 - ], - [ - 0.9900123476982117 - ], - [ - 0.9861699938774109 - ], - [ - 0.9963985681533813 - ], - [ - 0.9996615052223206 - ], - [ - 0.9802669882774353 - ], - [ - 0.9985330104827881 - ], - [ - 0.9970524311065674 - ], - [ - 0.9997991323471069 - ], - [ - 0.9907543063163757 - ], - [ - 0.998393714427948 - ], - [ - 0.995525062084198 - ], - [ - 0.9998023509979248 - ], - [ - 0.9971622228622437 - ], - [ - 0.9992753863334656 - ], - [ - 0.9999622106552124 - ], - [ - 0.9815661311149597 - ], - [ - 0.9994043707847595 - ], - [ - 0.9961485862731934 - ], - [ - 0.9938349723815918 - ], - [ - 0.9985218048095703 - ], - [ - 0.9955998063087463 - ], - [ - 0.9971383213996887 - ], - [ - 0.9976557493209839 - ], - [ - 0.9903542399406433 - ], - [ - 0.9938679337501526 - ], - [ - 0.9987586736679077 - ], - [ - 0.9968150854110718 - ], - [ - 0.9980546236038208 - ], - [ - 0.9992469549179077 - ], - [ - 0.98200523853302 - ], - [ - 0.9988876581192017 - ], - [ - 0.9937284588813782 - ], - [ - 0.9991428256034851 - ], - [ - 0.9938818216323853 - ], - [ - 0.9984561204910278 - ], - [ - 0.9907351136207581 - ], - [ - 0.9804718494415283 - ], - [ - 0.9957432150840759 - ], - [ - 0.9959821701049805 - ], - [ - 0.9964268803596497 - ], - [ - 0.9879118800163269 - ], - [ - 0.9956321120262146 - ], - [ - 0.993153989315033 - ], - [ - 0.9922581315040588 - ], - [ - 0.9957557320594788 - ], - [ - 0.9962975978851318 - ], - [ - 0.998455286026001 - ], - [ - 0.9958142638206482 - ], - [ - 0.9971563816070557 - ], - [ - 0.9996784925460815 - ], - [ - 0.9956899285316467 - ], - [ - 0.996746301651001 - ], - [ - 0.9967062473297119 - ], - [ - 0.9999867677688599 - ], - [ - 0.9880314469337463 - ], - [ - 0.9919611215591431 - ], - [ - 0.9958345293998718 - ] - ], - "output_low": [ - [ - -0.9971370697021484 - ], - [ - -0.9977759718894958 - ], - [ - -0.9985922574996948 - ], - [ - -0.9985772371292114 - ], - [ - -0.9981985092163086 - ], - [ - -0.9927956461906433 - ], - [ - -0.985166072845459 - ], - [ - -0.9947773814201355 - ], - [ - -0.9905215501785278 - ], - [ - -0.9992762207984924 - ], - [ - -0.9974793791770935 - ], - [ - -0.9943581223487854 - ], - [ - -0.9967403411865234 - ], - [ - -0.9930447936058044 - ], - [ - -0.998615562915802 - ], - [ - -0.9957616925239563 - ], - [ - -0.9993053674697876 - ], - [ - -0.9985345602035522 - ], - [ - -0.9952551126480103 - ], - [ - -0.9955055713653564 - ], - [ - -0.9950133562088013 - ], - [ - -0.9997806549072266 - ], - [ - -0.9947424530982971 - ], - [ - -0.999694287776947 - ], - [ - -0.9986006617546082 - ], - [ - -0.9902195930480957 - ], - [ - -0.9995226860046387 - ], - [ - -0.9848650693893433 - ], - [ - -0.9950981736183167 - ], - [ - -0.9987800121307373 - ], - [ - -0.997323751449585 - ], - [ - -0.9988982677459717 - ], - [ - -0.9920604825019836 - ], - [ - -0.9928877949714661 - ], - [ - -0.9807276129722595 - ], - [ - -0.9976627230644226 - ], - [ - -0.9999291896820068 - ], - [ - -0.9958432912826538 - ], - [ - -0.9938291311264038 - ], - [ - -0.9946627616882324 - ], - [ - -0.9998650550842285 - ], - [ - -0.9900315999984741 - ], - [ - -0.9974269866943359 - ], - [ - -0.9897720813751221 - ], - [ - -0.9957936406135559 - ], - [ - -0.9932218194007874 - ], - [ - -0.996446967124939 - ], - [ - -0.9940508604049683 - ], - [ - -0.9955636262893677 - ], - [ - -0.9948257803916931 - ], - [ - -0.9886698722839355 - ], - [ - -0.9988534450531006 - ], - [ - -0.9858165979385376 - ], - [ - -0.9916569590568542 - ], - [ - -0.9982323050498962 - ], - [ - -0.9838352799415588 - ], - [ - -0.9961373805999756 - ], - [ - -0.9817739129066467 - ], - [ - -0.994723379611969 - ], - [ - -0.9983773827552795 - ], - [ - -0.9897892475128174 - ], - [ - -0.998907208442688 - ], - [ - -0.9876444935798645 - ], - [ - -0.9996252059936523 - ], - [ - -0.9927592277526855 - ], - [ - -0.9966170787811279 - ], - [ - -0.9986746311187744 - ], - [ - -0.9993210434913635 - ], - [ - -0.9962468147277832 - ], - [ - -0.9961517453193665 - ], - [ - -0.9974855780601501 - ], - [ - -0.9987379908561707 - ], - [ - -0.9935858845710754 - ], - [ - -0.9991754293441772 - ], - [ - -0.9976705312728882 - ], - [ - -0.9942941665649414 - ], - [ - -0.9995023608207703 - ], - [ - -0.998296320438385 - ], - [ - -0.9980056285858154 - ], - [ - -0.9947104454040527 - ], - [ - -0.9981767535209656 - ], - [ - -0.9927842617034912 - ], - [ - -0.9992777109146118 - ], - [ - -0.999796450138092 - ], - [ - -0.9997242093086243 - ], - [ - -0.9993578195571899 - ], - [ - -0.9955887198448181 - ], - [ - -0.9814757108688354 - ], - [ - -0.9994866847991943 - ], - [ - -0.9925956130027771 - ], - [ - -0.9992111921310425 - ], - [ - -0.9569488167762756 - ], - [ - -0.9897102117538452 - ], - [ - -0.9939751625061035 - ], - [ - -0.9947844743728638 - ], - [ - -0.9927484393119812 - ], - [ - -0.9980167150497437 - ], - [ - -0.9944462776184082 - ], - [ - -0.99897700548172 - ], - [ - -0.9983107447624207 - ], - [ - -0.9956953525543213 - ], - [ - -0.9843021631240845 - ], - [ - -0.994292140007019 - ], - [ - -0.9900123476982117 - ], - [ - -0.9861699938774109 - ], - [ - -0.9963985681533813 - ], - [ - -0.9996615052223206 - ], - [ - -0.9802669882774353 - ], - [ - -0.9985330104827881 - ], - [ - -0.9970524311065674 - ], - [ - -0.9997991323471069 - ], - [ - -0.9907543063163757 - ], - [ - -0.998393714427948 - ], - [ - -0.995525062084198 - ], - [ - -0.9998023509979248 - ], - [ - -0.9971622228622437 - ], - [ - -0.9992753863334656 - ], - [ - -0.9999622106552124 - ], - [ - -0.9815661311149597 - ], - [ - -0.9994043707847595 - ], - [ - -0.9961485862731934 - ], - [ - -0.9938349723815918 - ], - [ - -0.9985218048095703 - ], - [ - -0.9955998063087463 - ], - [ - -0.9971383213996887 - ], - [ - -0.9976557493209839 - ], - [ - -0.9903542399406433 - ], - [ - -0.9938679337501526 - ], - [ - -0.9987586736679077 - ], - [ - -0.9968150854110718 - ], - [ - -0.9980546236038208 - ], - [ - -0.9992469549179077 - ], - [ - -0.98200523853302 - ], - [ - -0.9988876581192017 - ], - [ - -0.9937284588813782 - ], - [ - -0.9991428256034851 - ], - [ - -0.9938818216323853 - ], - [ - -0.9984561204910278 - ], - [ - -0.9907351136207581 - ], - [ - -0.9804718494415283 - ], - [ - -0.9957432150840759 - ], - [ - -0.9959821701049805 - ], - [ - -0.9964268803596497 - ], - [ - -0.9879118800163269 - ], - [ - -0.9956321120262146 - ], - [ - -0.993153989315033 - ], - [ - -0.9922581315040588 - ], - [ - -0.9957557320594788 - ], - [ - -0.9962975978851318 - ], - [ - -0.998455286026001 - ], - [ - -0.9958142638206482 - ], - [ - -0.9971563816070557 - ], - [ - -0.9996784925460815 - ], - [ - -0.9956899285316467 - ], - [ - -0.996746301651001 - ], - [ - -0.9967062473297119 - ], - [ - -0.9999867677688599 - ], - [ - -0.9880314469337463 - ], - [ - -0.9919611215591431 - ], - [ - -0.9958345293998718 - ] - ], - "output_high": [ - [ - 0.9971370697021484 - ], - [ - 0.9977759718894958 - ], - [ - 0.9985922574996948 - ], - [ - 0.9985772371292114 - ], - [ - 0.9981985092163086 - ], - [ - 0.9927956461906433 - ], - [ - 0.985166072845459 - ], - [ - 0.9947773814201355 - ], - [ - 0.9905215501785278 - ], - [ - 0.9992762207984924 - ], - [ - 0.9974793791770935 - ], - [ - 0.9943581223487854 - ], - [ - 0.9967403411865234 - ], - [ - 0.9930447936058044 - ], - [ - 0.998615562915802 - ], - [ - 0.9957616925239563 - ], - [ - 0.9993053674697876 - ], - [ - 0.9985345602035522 - ], - [ - 0.9952551126480103 - ], - [ - 0.9955055713653564 - ], - [ - 0.9950133562088013 - ], - [ - 0.9997806549072266 - ], - [ - 0.9947424530982971 - ], - [ - 0.999694287776947 - ], - [ - 0.9986006617546082 - ], - [ - 0.9902195930480957 - ], - [ - 0.9995226860046387 - ], - [ - 0.9848650693893433 - ], - [ - 0.9950981736183167 - ], - [ - 0.9987800121307373 - ], - [ - 0.997323751449585 - ], - [ - 0.9988982677459717 - ], - [ - 0.9920604825019836 - ], - [ - 0.9928877949714661 - ], - [ - 0.9807276129722595 - ], - [ - 0.9976627230644226 - ], - [ - 0.9999291896820068 - ], - [ - 0.9958432912826538 - ], - [ - 0.9938291311264038 - ], - [ - 0.9946627616882324 - ], - [ - 0.9998650550842285 - ], - [ - 0.9900315999984741 - ], - [ - 0.9974269866943359 - ], - [ - 0.9897720813751221 - ], - [ - 0.9957936406135559 - ], - [ - 0.9932218194007874 - ], - [ - 0.996446967124939 - ], - [ - 0.9940508604049683 - ], - [ - 0.9955636262893677 - ], - [ - 0.9948257803916931 - ], - [ - 0.9886698722839355 - ], - [ - 0.9988534450531006 - ], - [ - 0.9858165979385376 - ], - [ - 0.9916569590568542 - ], - [ - 0.9982323050498962 - ], - [ - 0.9838352799415588 - ], - [ - 0.9961373805999756 - ], - [ - 0.9817739129066467 - ], - [ - 0.994723379611969 - ], - [ - 0.9983773827552795 - ], - [ - 0.9897892475128174 - ], - [ - 0.998907208442688 - ], - [ - 0.9876444935798645 - ], - [ - 0.9996252059936523 - ], - [ - 0.9927592277526855 - ], - [ - 0.9966170787811279 - ], - [ - 0.9986746311187744 - ], - [ - 0.9993210434913635 - ], - [ - 0.9962468147277832 - ], - [ - 0.9961517453193665 - ], - [ - 0.9974855780601501 - ], - [ - 0.9987379908561707 - ], - [ - 0.9935858845710754 - ], - [ - 0.9991754293441772 - ], - [ - 0.9976705312728882 - ], - [ - 0.9942941665649414 - ], - [ - 0.9995023608207703 - ], - [ - 0.998296320438385 - ], - [ - 0.9980056285858154 - ], - [ - 0.9947104454040527 - ], - [ - 0.9981767535209656 - ], - [ - 0.9927842617034912 - ], - [ - 0.9992777109146118 - ], - [ - 0.999796450138092 - ], - [ - 0.9997242093086243 - ], - [ - 0.9993578195571899 - ], - [ - 0.9955887198448181 - ], - [ - 0.9814757108688354 - ], - [ - 0.9994866847991943 - ], - [ - 0.9925956130027771 - ], - [ - 0.9992111921310425 - ], - [ - 0.9569488167762756 - ], - [ - 0.9897102117538452 - ], - [ - 0.9939751625061035 - ], - [ - 0.9947844743728638 - ], - [ - 0.9927484393119812 - ], - [ - 0.9980167150497437 - ], - [ - 0.9944462776184082 - ], - [ - 0.99897700548172 - ], - [ - 0.9983107447624207 - ], - [ - 0.9956953525543213 - ], - [ - 0.9843021631240845 - ], - [ - 0.994292140007019 - ], - [ - 0.9900123476982117 - ], - [ - 0.9861699938774109 - ], - [ - 0.9963985681533813 - ], - [ - 0.9996615052223206 - ], - [ - 0.9802669882774353 - ], - [ - 0.9985330104827881 - ], - [ - 0.9970524311065674 - ], - [ - 0.9997991323471069 - ], - [ - 0.9907543063163757 - ], - [ - 0.998393714427948 - ], - [ - 0.995525062084198 - ], - [ - 0.9998023509979248 - ], - [ - 0.9971622228622437 - ], - [ - 0.9992753863334656 - ], - [ - 0.9999622106552124 - ], - [ - 0.9815661311149597 - ], - [ - 0.9994043707847595 - ], - [ - 0.9961485862731934 - ], - [ - 0.9938349723815918 - ], - [ - 0.9985218048095703 - ], - [ - 0.9955998063087463 - ], - [ - 0.9971383213996887 - ], - [ - 0.9976557493209839 - ], - [ - 0.9903542399406433 - ], - [ - 0.9938679337501526 - ], - [ - 0.9987586736679077 - ], - [ - 0.9968150854110718 - ], - [ - 0.9980546236038208 - ], - [ - 0.9992469549179077 - ], - [ - 0.98200523853302 - ], - [ - 0.9988876581192017 - ], - [ - 0.9937284588813782 - ], - [ - 0.9991428256034851 - ], - [ - 0.9938818216323853 - ], - [ - 0.9984561204910278 - ], - [ - 0.9907351136207581 - ], - [ - 0.9804718494415283 - ], - [ - 0.9957432150840759 - ], - [ - 0.9959821701049805 - ], - [ - 0.9964268803596497 - ], - [ - 0.9879118800163269 - ], - [ - 0.9956321120262146 - ], - [ - 0.993153989315033 - ], - [ - 0.9922581315040588 - ], - [ - 0.9957557320594788 - ], - [ - 0.9962975978851318 - ], - [ - 0.998455286026001 - ], - [ - 0.9958142638206482 - ], - [ - 0.9971563816070557 - ], - [ - 0.9996784925460815 - ], - [ - 0.9956899285316467 - ], - [ - 0.996746301651001 - ], - [ - 0.9967062473297119 - ], - [ - 0.9999867677688599 - ], - [ - 0.9880314469337463 - ], - [ - 0.9919611215591431 - ], - [ - 0.9958345293998718 - ] - ] - }, - "Input/fq_output_0": { - "input_low": 0.0, - "input_high": 0.997209906578064, - "output_low": 0.0, - "output_high": 0.997209906578064 - }, - "MatMul_1/fq_output_0": { - "input_low": 0.0, - "input_high": 47.62752914428711, - "output_low": 0.0, - "output_high": 47.62752914428711 - }, - "MatMul_1/fq_weights_1": { - "input_low": [ - [ - -0.9970278739929199 - ], - [ - -0.9808408617973328 - ], - [ - -0.9944129586219788 - ], - [ - -0.9971938133239746 - ], - [ - -0.9992196559906006 - ], - [ - -0.998683750629425 - ], - [ - -0.99868243932724 - ], - [ - -0.9950684905052185 - ], - [ - -0.976850688457489 - ], - [ - -0.9997206926345825 - ], - [ - -0.993675708770752 - ], - [ - -0.9989456534385681 - ], - [ - -0.9884542226791382 - ], - [ - -0.9923633933067322 - ], - [ - -0.9966399073600769 - ], - [ - -0.9916086792945862 - ], - [ - -0.9996746182441711 - ], - [ - -0.9927780032157898 - ], - [ - -0.9985724091529846 - ], - [ - -0.9699029326438904 - ], - [ - -0.9918563961982727 - ], - [ - -0.999505877494812 - ], - [ - -0.9729548096656799 - ], - [ - -0.9898425340652466 - ], - [ - -0.9969669580459595 - ], - [ - -0.9981495141983032 - ], - [ - -0.9948304891586304 - ], - [ - -0.9993724822998047 - ], - [ - -0.9923088550567627 - ], - [ - -0.999050498008728 - ], - [ - -0.9883591532707214 - ], - [ - -0.9938135147094727 - ], - [ - -0.9923365712165833 - ], - [ - -0.9936618804931641 - ], - [ - -0.9931996464729309 - ], - [ - -0.9953643679618835 - ], - [ - -0.9999123215675354 - ], - [ - -0.9848306775093079 - ], - [ - -0.9940652251243591 - ], - [ - -0.9896738529205322 - ], - [ - -0.9745385050773621 - ], - [ - -0.9898044466972351 - ], - [ - -0.9963973760604858 - ], - [ - -0.9967551231384277 - ], - [ - -0.9933338165283203 - ], - [ - -0.9810178279876709 - ], - [ - -0.9956452250480652 - ], - [ - -0.9694769382476807 - ], - [ - -0.9754481911659241 - ], - [ - -0.9985426664352417 - ], - [ - -0.993887722492218 - ], - [ - -0.9912910461425781 - ], - [ - -0.9948264360427856 - ], - [ - -0.9901152849197388 - ], - [ - -0.995610773563385 - ], - [ - -0.9969016909599304 - ], - [ - -0.9890735149383545 - ], - [ - -0.9930339455604553 - ], - [ - -0.9849486947059631 - ], - [ - -0.9969156384468079 - ], - [ - -0.9991021752357483 - ], - [ - -0.9978500604629517 - ], - [ - -0.9976779818534851 - ], - [ - -0.9949596524238586 - ], - [ - -0.996618390083313 - ], - [ - -0.988395094871521 - ], - [ - -0.9954425096511841 - ], - [ - -0.997184157371521 - ], - [ - -0.9966145157814026 - ], - [ - -0.995242714881897 - ], - [ - -0.9966570734977722 - ], - [ - -0.9994654655456543 - ], - [ - -0.99825119972229 - ], - [ - -0.999591588973999 - ], - [ - -0.9902287125587463 - ], - [ - -0.9998602867126465 - ], - [ - -0.9954801201820374 - ], - [ - -0.9977607131004333 - ], - [ - -0.9887634515762329 - ], - [ - -0.9845426082611084 - ], - [ - -0.9950025677680969 - ], - [ - -0.9947744607925415 - ], - [ - -0.9819414019584656 - ], - [ - -0.9989105463027954 - ], - [ - -0.9920969009399414 - ], - [ - -0.99006187915802 - ], - [ - -0.9791988134384155 - ], - [ - -0.9981624484062195 - ], - [ - -0.999700665473938 - ], - [ - -0.9956314563751221 - ], - [ - -0.989037036895752 - ], - [ - -0.9909722805023193 - ], - [ - -0.9862728714942932 - ], - [ - -0.9966010451316833 - ], - [ - -0.9992076754570007 - ], - [ - -0.9947077631950378 - ], - [ - -0.9873133301734924 - ], - [ - -0.9985927939414978 - ], - [ - -0.9910207390785217 - ], - [ - -0.99626624584198 - ], - [ - -0.9944427013397217 - ], - [ - -0.9871867299079895 - ], - [ - -0.9864006638526917 - ], - [ - -0.9973322153091431 - ], - [ - -0.998275876045227 - ], - [ - -0.9989286065101624 - ], - [ - -0.9973595142364502 - ], - [ - -0.9950085878372192 - ], - [ - -0.9998424053192139 - ], - [ - -0.9961771368980408 - ], - [ - -0.999405026435852 - ], - [ - -0.9921284914016724 - ], - [ - -0.996304452419281 - ], - [ - -0.9969595074653625 - ], - [ - -0.9977396130561829 - ], - [ - -0.9927499890327454 - ], - [ - -0.9986027479171753 - ], - [ - -0.9997433423995972 - ], - [ - -0.9998571276664734 - ], - [ - -0.9922271966934204 - ], - [ - -0.9866094589233398 - ], - [ - -0.9990732073783875 - ], - [ - -0.9958819150924683 - ], - [ - -0.9940115213394165 - ], - [ - -0.9925696849822998 - ], - [ - -0.998586893081665 - ], - [ - -0.9847491979598999 - ], - [ - -0.9829020500183105 - ], - [ - -0.999756395816803 - ], - [ - -0.9919022917747498 - ], - [ - -0.9922955632209778 - ], - [ - -0.9874224662780762 - ], - [ - -0.9996545910835266 - ], - [ - -0.993647575378418 - ], - [ - -0.9771861433982849 - ], - [ - -0.988274335861206 - ], - [ - -0.9978176355361938 - ], - [ - -0.9904887676239014 - ], - [ - -0.984954297542572 - ], - [ - -0.9931925535202026 - ], - [ - -0.9651272892951965 - ], - [ - -0.9998226165771484 - ], - [ - -0.9826046824455261 - ], - [ - -0.9991958141326904 - ], - [ - -0.9997920989990234 - ], - [ - -0.9984151124954224 - ], - [ - -0.998444139957428 - ], - [ - -0.9945566058158875 - ], - [ - -0.9985499382019043 - ], - [ - -0.9909688830375671 - ], - [ - -0.9951391220092773 - ], - [ - -0.9981335401535034 - ], - [ - -0.994732677936554 - ], - [ - -0.991540253162384 - ], - [ - -0.996556282043457 - ], - [ - -0.9998652935028076 - ], - [ - -0.999691367149353 - ], - [ - -0.9937717318534851 - ], - [ - -0.9903605580329895 - ], - [ - -0.9969991445541382 - ] - ], - "input_high": [ - [ - 0.9970278739929199 - ], - [ - 0.9808408617973328 - ], - [ - 0.9944129586219788 - ], - [ - 0.9971938133239746 - ], - [ - 0.9992196559906006 - ], - [ - 0.998683750629425 - ], - [ - 0.99868243932724 - ], - [ - 0.9950684905052185 - ], - [ - 0.976850688457489 - ], - [ - 0.9997206926345825 - ], - [ - 0.993675708770752 - ], - [ - 0.9989456534385681 - ], - [ - 0.9884542226791382 - ], - [ - 0.9923633933067322 - ], - [ - 0.9966399073600769 - ], - [ - 0.9916086792945862 - ], - [ - 0.9996746182441711 - ], - [ - 0.9927780032157898 - ], - [ - 0.9985724091529846 - ], - [ - 0.9699029326438904 - ], - [ - 0.9918563961982727 - ], - [ - 0.999505877494812 - ], - [ - 0.9729548096656799 - ], - [ - 0.9898425340652466 - ], - [ - 0.9969669580459595 - ], - [ - 0.9981495141983032 - ], - [ - 0.9948304891586304 - ], - [ - 0.9993724822998047 - ], - [ - 0.9923088550567627 - ], - [ - 0.999050498008728 - ], - [ - 0.9883591532707214 - ], - [ - 0.9938135147094727 - ], - [ - 0.9923365712165833 - ], - [ - 0.9936618804931641 - ], - [ - 0.9931996464729309 - ], - [ - 0.9953643679618835 - ], - [ - 0.9999123215675354 - ], - [ - 0.9848306775093079 - ], - [ - 0.9940652251243591 - ], - [ - 0.9896738529205322 - ], - [ - 0.9745385050773621 - ], - [ - 0.9898044466972351 - ], - [ - 0.9963973760604858 - ], - [ - 0.9967551231384277 - ], - [ - 0.9933338165283203 - ], - [ - 0.9810178279876709 - ], - [ - 0.9956452250480652 - ], - [ - 0.9694769382476807 - ], - [ - 0.9754481911659241 - ], - [ - 0.9985426664352417 - ], - [ - 0.993887722492218 - ], - [ - 0.9912910461425781 - ], - [ - 0.9948264360427856 - ], - [ - 0.9901152849197388 - ], - [ - 0.995610773563385 - ], - [ - 0.9969016909599304 - ], - [ - 0.9890735149383545 - ], - [ - 0.9930339455604553 - ], - [ - 0.9849486947059631 - ], - [ - 0.9969156384468079 - ], - [ - 0.9991021752357483 - ], - [ - 0.9978500604629517 - ], - [ - 0.9976779818534851 - ], - [ - 0.9949596524238586 - ], - [ - 0.996618390083313 - ], - [ - 0.988395094871521 - ], - [ - 0.9954425096511841 - ], - [ - 0.997184157371521 - ], - [ - 0.9966145157814026 - ], - [ - 0.995242714881897 - ], - [ - 0.9966570734977722 - ], - [ - 0.9994654655456543 - ], - [ - 0.99825119972229 - ], - [ - 0.999591588973999 - ], - [ - 0.9902287125587463 - ], - [ - 0.9998602867126465 - ], - [ - 0.9954801201820374 - ], - [ - 0.9977607131004333 - ], - [ - 0.9887634515762329 - ], - [ - 0.9845426082611084 - ], - [ - 0.9950025677680969 - ], - [ - 0.9947744607925415 - ], - [ - 0.9819414019584656 - ], - [ - 0.9989105463027954 - ], - [ - 0.9920969009399414 - ], - [ - 0.99006187915802 - ], - [ - 0.9791988134384155 - ], - [ - 0.9981624484062195 - ], - [ - 0.999700665473938 - ], - [ - 0.9956314563751221 - ], - [ - 0.989037036895752 - ], - [ - 0.9909722805023193 - ], - [ - 0.9862728714942932 - ], - [ - 0.9966010451316833 - ], - [ - 0.9992076754570007 - ], - [ - 0.9947077631950378 - ], - [ - 0.9873133301734924 - ], - [ - 0.9985927939414978 - ], - [ - 0.9910207390785217 - ], - [ - 0.99626624584198 - ], - [ - 0.9944427013397217 - ], - [ - 0.9871867299079895 - ], - [ - 0.9864006638526917 - ], - [ - 0.9973322153091431 - ], - [ - 0.998275876045227 - ], - [ - 0.9989286065101624 - ], - [ - 0.9973595142364502 - ], - [ - 0.9950085878372192 - ], - [ - 0.9998424053192139 - ], - [ - 0.9961771368980408 - ], - [ - 0.999405026435852 - ], - [ - 0.9921284914016724 - ], - [ - 0.996304452419281 - ], - [ - 0.9969595074653625 - ], - [ - 0.9977396130561829 - ], - [ - 0.9927499890327454 - ], - [ - 0.9986027479171753 - ], - [ - 0.9997433423995972 - ], - [ - 0.9998571276664734 - ], - [ - 0.9922271966934204 - ], - [ - 0.9866094589233398 - ], - [ - 0.9990732073783875 - ], - [ - 0.9958819150924683 - ], - [ - 0.9940115213394165 - ], - [ - 0.9925696849822998 - ], - [ - 0.998586893081665 - ], - [ - 0.9847491979598999 - ], - [ - 0.9829020500183105 - ], - [ - 0.999756395816803 - ], - [ - 0.9919022917747498 - ], - [ - 0.9922955632209778 - ], - [ - 0.9874224662780762 - ], - [ - 0.9996545910835266 - ], - [ - 0.993647575378418 - ], - [ - 0.9771861433982849 - ], - [ - 0.988274335861206 - ], - [ - 0.9978176355361938 - ], - [ - 0.9904887676239014 - ], - [ - 0.984954297542572 - ], - [ - 0.9931925535202026 - ], - [ - 0.9651272892951965 - ], - [ - 0.9998226165771484 - ], - [ - 0.9826046824455261 - ], - [ - 0.9991958141326904 - ], - [ - 0.9997920989990234 - ], - [ - 0.9984151124954224 - ], - [ - 0.998444139957428 - ], - [ - 0.9945566058158875 - ], - [ - 0.9985499382019043 - ], - [ - 0.9909688830375671 - ], - [ - 0.9951391220092773 - ], - [ - 0.9981335401535034 - ], - [ - 0.994732677936554 - ], - [ - 0.991540253162384 - ], - [ - 0.996556282043457 - ], - [ - 0.9998652935028076 - ], - [ - 0.999691367149353 - ], - [ - 0.9937717318534851 - ], - [ - 0.9903605580329895 - ], - [ - 0.9969991445541382 - ] - ], - "output_low": [ - [ - -0.9970278739929199 - ], - [ - -0.9808408617973328 - ], - [ - -0.9944129586219788 - ], - [ - -0.9971938133239746 - ], - [ - -0.9992196559906006 - ], - [ - -0.998683750629425 - ], - [ - -0.99868243932724 - ], - [ - -0.9950684905052185 - ], - [ - -0.976850688457489 - ], - [ - -0.9997206926345825 - ], - [ - -0.993675708770752 - ], - [ - -0.9989456534385681 - ], - [ - -0.9884542226791382 - ], - [ - -0.9923633933067322 - ], - [ - -0.9966399073600769 - ], - [ - -0.9916086792945862 - ], - [ - -0.9996746182441711 - ], - [ - -0.9927780032157898 - ], - [ - -0.9985724091529846 - ], - [ - -0.9699029326438904 - ], - [ - -0.9918563961982727 - ], - [ - -0.999505877494812 - ], - [ - -0.9729548096656799 - ], - [ - -0.9898425340652466 - ], - [ - -0.9969669580459595 - ], - [ - -0.9981495141983032 - ], - [ - -0.9948304891586304 - ], - [ - -0.9993724822998047 - ], - [ - -0.9923088550567627 - ], - [ - -0.999050498008728 - ], - [ - -0.9883591532707214 - ], - [ - -0.9938135147094727 - ], - [ - -0.9923365712165833 - ], - [ - -0.9936618804931641 - ], - [ - -0.9931996464729309 - ], - [ - -0.9953643679618835 - ], - [ - -0.9999123215675354 - ], - [ - -0.9848306775093079 - ], - [ - -0.9940652251243591 - ], - [ - -0.9896738529205322 - ], - [ - -0.9745385050773621 - ], - [ - -0.9898044466972351 - ], - [ - -0.9963973760604858 - ], - [ - -0.9967551231384277 - ], - [ - -0.9933338165283203 - ], - [ - -0.9810178279876709 - ], - [ - -0.9956452250480652 - ], - [ - -0.9694769382476807 - ], - [ - -0.9754481911659241 - ], - [ - -0.9985426664352417 - ], - [ - -0.993887722492218 - ], - [ - -0.9912910461425781 - ], - [ - -0.9948264360427856 - ], - [ - -0.9901152849197388 - ], - [ - -0.995610773563385 - ], - [ - -0.9969016909599304 - ], - [ - -0.9890735149383545 - ], - [ - -0.9930339455604553 - ], - [ - -0.9849486947059631 - ], - [ - -0.9969156384468079 - ], - [ - -0.9991021752357483 - ], - [ - -0.9978500604629517 - ], - [ - -0.9976779818534851 - ], - [ - -0.9949596524238586 - ], - [ - -0.996618390083313 - ], - [ - -0.988395094871521 - ], - [ - -0.9954425096511841 - ], - [ - -0.997184157371521 - ], - [ - -0.9966145157814026 - ], - [ - -0.995242714881897 - ], - [ - -0.9966570734977722 - ], - [ - -0.9994654655456543 - ], - [ - -0.99825119972229 - ], - [ - -0.999591588973999 - ], - [ - -0.9902287125587463 - ], - [ - -0.9998602867126465 - ], - [ - -0.9954801201820374 - ], - [ - -0.9977607131004333 - ], - [ - -0.9887634515762329 - ], - [ - -0.9845426082611084 - ], - [ - -0.9950025677680969 - ], - [ - -0.9947744607925415 - ], - [ - -0.9819414019584656 - ], - [ - -0.9989105463027954 - ], - [ - -0.9920969009399414 - ], - [ - -0.99006187915802 - ], - [ - -0.9791988134384155 - ], - [ - -0.9981624484062195 - ], - [ - -0.999700665473938 - ], - [ - -0.9956314563751221 - ], - [ - -0.989037036895752 - ], - [ - -0.9909722805023193 - ], - [ - -0.9862728714942932 - ], - [ - -0.9966010451316833 - ], - [ - -0.9992076754570007 - ], - [ - -0.9947077631950378 - ], - [ - -0.9873133301734924 - ], - [ - -0.9985927939414978 - ], - [ - -0.9910207390785217 - ], - [ - -0.99626624584198 - ], - [ - -0.9944427013397217 - ], - [ - -0.9871867299079895 - ], - [ - -0.9864006638526917 - ], - [ - -0.9973322153091431 - ], - [ - -0.998275876045227 - ], - [ - -0.9989286065101624 - ], - [ - -0.9973595142364502 - ], - [ - -0.9950085878372192 - ], - [ - -0.9998424053192139 - ], - [ - -0.9961771368980408 - ], - [ - -0.999405026435852 - ], - [ - -0.9921284914016724 - ], - [ - -0.996304452419281 - ], - [ - -0.9969595074653625 - ], - [ - -0.9977396130561829 - ], - [ - -0.9927499890327454 - ], - [ - -0.9986027479171753 - ], - [ - -0.9997433423995972 - ], - [ - -0.9998571276664734 - ], - [ - -0.9922271966934204 - ], - [ - -0.9866094589233398 - ], - [ - -0.9990732073783875 - ], - [ - -0.9958819150924683 - ], - [ - -0.9940115213394165 - ], - [ - -0.9925696849822998 - ], - [ - -0.998586893081665 - ], - [ - -0.9847491979598999 - ], - [ - -0.9829020500183105 - ], - [ - -0.999756395816803 - ], - [ - -0.9919022917747498 - ], - [ - -0.9922955632209778 - ], - [ - -0.9874224662780762 - ], - [ - -0.9996545910835266 - ], - [ - -0.993647575378418 - ], - [ - -0.9771861433982849 - ], - [ - -0.988274335861206 - ], - [ - -0.9978176355361938 - ], - [ - -0.9904887676239014 - ], - [ - -0.984954297542572 - ], - [ - -0.9931925535202026 - ], - [ - -0.9651272892951965 - ], - [ - -0.9998226165771484 - ], - [ - -0.9826046824455261 - ], - [ - -0.9991958141326904 - ], - [ - -0.9997920989990234 - ], - [ - -0.9984151124954224 - ], - [ - -0.998444139957428 - ], - [ - -0.9945566058158875 - ], - [ - -0.9985499382019043 - ], - [ - -0.9909688830375671 - ], - [ - -0.9951391220092773 - ], - [ - -0.9981335401535034 - ], - [ - -0.994732677936554 - ], - [ - -0.991540253162384 - ], - [ - -0.996556282043457 - ], - [ - -0.9998652935028076 - ], - [ - -0.999691367149353 - ], - [ - -0.9937717318534851 - ], - [ - -0.9903605580329895 - ], - [ - -0.9969991445541382 - ] - ], - "output_high": [ - [ - 0.9970278739929199 - ], - [ - 0.9808408617973328 - ], - [ - 0.9944129586219788 - ], - [ - 0.9971938133239746 - ], - [ - 0.9992196559906006 - ], - [ - 0.998683750629425 - ], - [ - 0.99868243932724 - ], - [ - 0.9950684905052185 - ], - [ - 0.976850688457489 - ], - [ - 0.9997206926345825 - ], - [ - 0.993675708770752 - ], - [ - 0.9989456534385681 - ], - [ - 0.9884542226791382 - ], - [ - 0.9923633933067322 - ], - [ - 0.9966399073600769 - ], - [ - 0.9916086792945862 - ], - [ - 0.9996746182441711 - ], - [ - 0.9927780032157898 - ], - [ - 0.9985724091529846 - ], - [ - 0.9699029326438904 - ], - [ - 0.9918563961982727 - ], - [ - 0.999505877494812 - ], - [ - 0.9729548096656799 - ], - [ - 0.9898425340652466 - ], - [ - 0.9969669580459595 - ], - [ - 0.9981495141983032 - ], - [ - 0.9948304891586304 - ], - [ - 0.9993724822998047 - ], - [ - 0.9923088550567627 - ], - [ - 0.999050498008728 - ], - [ - 0.9883591532707214 - ], - [ - 0.9938135147094727 - ], - [ - 0.9923365712165833 - ], - [ - 0.9936618804931641 - ], - [ - 0.9931996464729309 - ], - [ - 0.9953643679618835 - ], - [ - 0.9999123215675354 - ], - [ - 0.9848306775093079 - ], - [ - 0.9940652251243591 - ], - [ - 0.9896738529205322 - ], - [ - 0.9745385050773621 - ], - [ - 0.9898044466972351 - ], - [ - 0.9963973760604858 - ], - [ - 0.9967551231384277 - ], - [ - 0.9933338165283203 - ], - [ - 0.9810178279876709 - ], - [ - 0.9956452250480652 - ], - [ - 0.9694769382476807 - ], - [ - 0.9754481911659241 - ], - [ - 0.9985426664352417 - ], - [ - 0.993887722492218 - ], - [ - 0.9912910461425781 - ], - [ - 0.9948264360427856 - ], - [ - 0.9901152849197388 - ], - [ - 0.995610773563385 - ], - [ - 0.9969016909599304 - ], - [ - 0.9890735149383545 - ], - [ - 0.9930339455604553 - ], - [ - 0.9849486947059631 - ], - [ - 0.9969156384468079 - ], - [ - 0.9991021752357483 - ], - [ - 0.9978500604629517 - ], - [ - 0.9976779818534851 - ], - [ - 0.9949596524238586 - ], - [ - 0.996618390083313 - ], - [ - 0.988395094871521 - ], - [ - 0.9954425096511841 - ], - [ - 0.997184157371521 - ], - [ - 0.9966145157814026 - ], - [ - 0.995242714881897 - ], - [ - 0.9966570734977722 - ], - [ - 0.9994654655456543 - ], - [ - 0.99825119972229 - ], - [ - 0.999591588973999 - ], - [ - 0.9902287125587463 - ], - [ - 0.9998602867126465 - ], - [ - 0.9954801201820374 - ], - [ - 0.9977607131004333 - ], - [ - 0.9887634515762329 - ], - [ - 0.9845426082611084 - ], - [ - 0.9950025677680969 - ], - [ - 0.9947744607925415 - ], - [ - 0.9819414019584656 - ], - [ - 0.9989105463027954 - ], - [ - 0.9920969009399414 - ], - [ - 0.99006187915802 - ], - [ - 0.9791988134384155 - ], - [ - 0.9981624484062195 - ], - [ - 0.999700665473938 - ], - [ - 0.9956314563751221 - ], - [ - 0.989037036895752 - ], - [ - 0.9909722805023193 - ], - [ - 0.9862728714942932 - ], - [ - 0.9966010451316833 - ], - [ - 0.9992076754570007 - ], - [ - 0.9947077631950378 - ], - [ - 0.9873133301734924 - ], - [ - 0.9985927939414978 - ], - [ - 0.9910207390785217 - ], - [ - 0.99626624584198 - ], - [ - 0.9944427013397217 - ], - [ - 0.9871867299079895 - ], - [ - 0.9864006638526917 - ], - [ - 0.9973322153091431 - ], - [ - 0.998275876045227 - ], - [ - 0.9989286065101624 - ], + -0.9811950325965881 + ] + ], + "input_high": [ [ - 0.9973595142364502 + 0.7963242530822754 ], [ - 0.9950085878372192 + 0.9421131014823914 ], [ - 0.9998424053192139 + 0.9545904994010925 ], [ - 0.9961771368980408 + 0.9950965046882629 ], [ - 0.999405026435852 + 0.9320597052574158 ], [ - 0.9921284914016724 - ], + 0.9811950325965881 + ] + ], + "output_low": [ [ - 0.996304452419281 + -0.7963242530822754 ], [ - 0.9969595074653625 + -0.9421131014823914 ], [ - 0.9977396130561829 + -0.9545904994010925 ], [ - 0.9927499890327454 + -0.9950965046882629 ], [ - 0.9986027479171753 + -0.9320597052574158 ], [ - 0.9997433423995972 - ], + -0.9811950325965881 + ] + ], + "output_high": [ [ - 0.9998571276664734 + 0.7963242530822754 ], [ - 0.9922271966934204 + 0.9421131014823914 ], [ - 0.9866094589233398 + 0.9545904994010925 ], [ - 0.9990732073783875 + 0.9950965046882629 ], [ - 0.9958819150924683 + 0.9320597052574158 ], [ - 0.9940115213394165 - ], + 0.9811950325965881 + ] + ] + }, + "Gather_4/fq_input_0": { + "input_low": 0.0, + "input_high": 0.91275554895401, + "output_low": 0.0, + "output_high": 0.91275554895401 + }, + "MatMul_1/fq_output_0": { + "input_low": 0.0, + "input_high": 1.7037991285324097, + "output_low": 0.0, + "output_high": 1.7037991285324097 + }, + "MatMul_1/fq_weights_1": { + "input_low": [ [ - 0.9925696849822998 + -0.6706244349479675 ], [ - 0.998586893081665 + -0.997209906578064 ], [ - 0.9847491979598999 + -0.721488356590271 ], [ - 0.9829020500183105 + -0.9340435266494751 ], [ - 0.999756395816803 + -0.8902743458747864 ], [ - 0.9919022917747498 - ], + -0.876484215259552 + ] + ], + "input_high": [ [ - 0.9922955632209778 + 0.6706244349479675 ], [ - 0.9874224662780762 + 0.997209906578064 ], [ - 0.9996545910835266 + 0.721488356590271 ], [ - 0.993647575378418 + 0.9340435266494751 ], [ - 0.9771861433982849 + 0.8902743458747864 ], [ - 0.988274335861206 - ], + 0.876484215259552 + ] + ], + "output_low": [ [ - 0.9978176355361938 + -0.6706244349479675 ], [ - 0.9904887676239014 + -0.997209906578064 ], [ - 0.984954297542572 + -0.721488356590271 ], [ - 0.9931925535202026 + -0.9340435266494751 ], [ - 0.9651272892951965 + -0.8902743458747864 ], [ - 0.9998226165771484 - ], + -0.876484215259552 + ] + ], + "output_high": [ [ - 0.9826046824455261 + 0.6706244349479675 ], [ - 0.9991958141326904 + 0.997209906578064 ], [ - 0.9997920989990234 + 0.721488356590271 ], [ - 0.9984151124954224 + 0.9340435266494751 ], [ - 0.998444139957428 + 0.8902743458747864 ], [ - 0.9945566058158875 - ], + 0.876484215259552 + ] + ] + }, + "Gather_2/fq_output_0": { + "input_low": 0.0, + "input_high": 0.91275554895401, + "output_low": 0.0, + "output_high": 0.91275554895401 + }, + "Gather_2/fq_weights_0": { + "input_low": [ [ - 0.9985499382019043 + -0.91275554895401 ], [ - 0.9909688830375671 + -0.9350724220275879 ], [ - 0.9951391220092773 - ], + -0.8631789088249207 + ] + ], + "input_high": [ [ - 0.9981335401535034 + 0.91275554895401 ], [ - 0.994732677936554 + 0.9350724220275879 ], [ - 0.991540253162384 - ], + 0.8631789088249207 + ] + ], + "output_low": [ [ - 0.996556282043457 + -0.91275554895401 ], [ - 0.9998652935028076 + -0.9350724220275879 ], [ - 0.999691367149353 - ], + -0.8631789088249207 + ] + ], + "output_high": [ [ - 0.9937717318534851 + 0.91275554895401 ], [ - 0.9903605580329895 + 0.9350724220275879 ], [ - 0.9969991445541382 + 0.8631789088249207 ] ] - }, - "Gather_2/fq_output_0": { - "input_low": 0.0, - "input_high": 0.997209906578064, - "output_low": 0.0, - "output_high": 0.997209906578064 } } \ No newline at end of file diff --git a/tests/openvino/native/data/reference_scales/IntegerModel_performance.json b/tests/openvino/native/data/reference_scales/IntegerModel_performance.json index 269b16183f1..645988e4a01 100644 --- a/tests/openvino/native/data/reference_scales/IntegerModel_performance.json +++ b/tests/openvino/native/data/reference_scales/IntegerModel_performance.json @@ -1,3886 +1,190 @@ { "MatMul_2/fq_output_0": { "input_low": 0.0, - "input_high": 57.1154670715332, + "input_high": 2.933105707168579, "output_low": 0.0, - "output_high": 57.1154670715332 + "output_high": 2.933105707168579 }, "MatMul_2/fq_weights_1": { "input_low": [ [ - -0.9971370697021484 + -0.7963242530822754 ], [ - -0.9977759718894958 + -0.9421131014823914 ], [ - -0.9985922574996948 + -0.9545904994010925 ], [ - -0.9985772371292114 + -0.9950965046882629 ], [ - -0.9981985092163086 + -0.9320597052574158 ], [ - -0.9927956461906433 - ], - [ - -0.985166072845459 - ], - [ - -0.9947773814201355 - ], - [ - -0.9905215501785278 - ], - [ - -0.9992762207984924 - ], - [ - -0.9974793791770935 - ], - [ - -0.9943581223487854 - ], - [ - -0.9967403411865234 - ], - [ - -0.9930447936058044 - ], - [ - -0.998615562915802 - ], - [ - -0.9957616925239563 - ], - [ - -0.9993053674697876 - ], - [ - -0.9985345602035522 - ], - [ - -0.9952551126480103 - ], - [ - -0.9955055713653564 - ], - [ - -0.9950133562088013 - ], - [ - -0.9997806549072266 - ], - [ - -0.9947424530982971 - ], - [ - -0.999694287776947 - ], - [ - -0.9986006617546082 - ], - [ - -0.9902195930480957 - ], - [ - -0.9995226860046387 - ], - [ - -0.9848650693893433 - ], - [ - -0.9950981736183167 - ], - [ - -0.9987800121307373 - ], - [ - -0.997323751449585 - ], - [ - -0.9988982677459717 - ], - [ - -0.9920604825019836 - ], - [ - -0.9928877949714661 - ], - [ - -0.9807276129722595 - ], - [ - -0.9976627230644226 - ], - [ - -0.9999291896820068 - ], - [ - -0.9958432912826538 - ], - [ - -0.9938291311264038 - ], - [ - -0.9946627616882324 - ], - [ - -0.9998650550842285 - ], - [ - -0.9900315999984741 - ], - [ - -0.9974269866943359 - ], - [ - -0.9897720813751221 - ], - [ - -0.9957936406135559 - ], - [ - -0.9932218194007874 - ], - [ - -0.996446967124939 - ], - [ - -0.9940508604049683 - ], - [ - -0.9955636262893677 - ], - [ - -0.9948257803916931 - ], - [ - -0.9886698722839355 - ], - [ - -0.9988534450531006 - ], - [ - -0.9858165979385376 - ], - [ - -0.9916569590568542 - ], - [ - -0.9982323050498962 - ], - [ - -0.9838352799415588 - ], - [ - -0.9961373805999756 - ], - [ - -0.9817739129066467 - ], - [ - -0.994723379611969 - ], - [ - -0.9983773827552795 - ], - [ - -0.9897892475128174 - ], - [ - -0.998907208442688 - ], - [ - -0.9876444935798645 - ], - [ - -0.9996252059936523 - ], - [ - -0.9927592277526855 - ], - [ - -0.9966170787811279 - ], - [ - -0.9986746311187744 - ], - [ - -0.9993210434913635 - ], - [ - -0.9962468147277832 - ], - [ - -0.9961517453193665 - ], - [ - -0.9974855780601501 - ], - [ - -0.9987379908561707 - ], - [ - -0.9935858845710754 - ], - [ - -0.9991754293441772 - ], - [ - -0.9976705312728882 - ], - [ - -0.9942941665649414 - ], - [ - -0.9995023608207703 - ], - [ - -0.998296320438385 - ], - [ - -0.9980056285858154 - ], - [ - -0.9947104454040527 - ], - [ - -0.9981767535209656 - ], - [ - -0.9927842617034912 - ], - [ - -0.9992777109146118 - ], - [ - -0.999796450138092 - ], - [ - -0.9997242093086243 - ], - [ - -0.9993578195571899 - ], - [ - -0.9955887198448181 - ], - [ - -0.9814757108688354 - ], - [ - -0.9994866847991943 - ], - [ - -0.9925956130027771 - ], - [ - -0.9992111921310425 - ], - [ - -0.9569488167762756 - ], - [ - -0.9897102117538452 - ], - [ - -0.9939751625061035 - ], - [ - -0.9947844743728638 - ], - [ - -0.9927484393119812 - ], - [ - -0.9980167150497437 - ], - [ - -0.9944462776184082 - ], - [ - -0.99897700548172 - ], - [ - -0.9983107447624207 - ], - [ - -0.9956953525543213 - ], - [ - -0.9843021631240845 - ], - [ - -0.994292140007019 - ], - [ - -0.9900123476982117 - ], - [ - -0.9861699938774109 - ], - [ - -0.9963985681533813 - ], - [ - -0.9996615052223206 - ], - [ - -0.9802669882774353 - ], - [ - -0.9985330104827881 - ], - [ - -0.9970524311065674 - ], - [ - -0.9997991323471069 - ], - [ - -0.9907543063163757 - ], - [ - -0.998393714427948 - ], - [ - -0.995525062084198 - ], - [ - -0.9998023509979248 - ], - [ - -0.9971622228622437 - ], - [ - -0.9992753863334656 - ], - [ - -0.9999622106552124 - ], - [ - -0.9815661311149597 - ], - [ - -0.9994043707847595 - ], - [ - -0.9961485862731934 - ], - [ - -0.9938349723815918 - ], - [ - -0.9985218048095703 - ], - [ - -0.9955998063087463 - ], - [ - -0.9971383213996887 - ], - [ - -0.9976557493209839 - ], - [ - -0.9903542399406433 - ], - [ - -0.9938679337501526 - ], - [ - -0.9987586736679077 - ], - [ - -0.9968150854110718 - ], - [ - -0.9980546236038208 - ], - [ - -0.9992469549179077 - ], - [ - -0.98200523853302 - ], - [ - -0.9988876581192017 - ], - [ - -0.9937284588813782 - ], - [ - -0.9991428256034851 - ], - [ - -0.9938818216323853 - ], - [ - -0.9984561204910278 - ], - [ - -0.9907351136207581 - ], - [ - -0.9804718494415283 - ], - [ - -0.9957432150840759 - ], - [ - -0.9959821701049805 - ], - [ - -0.9964268803596497 - ], - [ - -0.9879118800163269 - ], - [ - -0.9956321120262146 - ], - [ - -0.993153989315033 - ], - [ - -0.9922581315040588 - ], - [ - -0.9957557320594788 - ], - [ - -0.9962975978851318 - ], - [ - -0.998455286026001 - ], - [ - -0.9958142638206482 - ], - [ - -0.9971563816070557 - ], - [ - -0.9996784925460815 - ], - [ - -0.9956899285316467 - ], - [ - -0.996746301651001 - ], - [ - -0.9967062473297119 - ], - [ - -0.9999867677688599 - ], - [ - -0.9880314469337463 - ], - [ - -0.9919611215591431 - ], - [ - -0.9958345293998718 - ] - ], - "input_high": [ - [ - 0.9971370697021484 - ], - [ - 0.9977759718894958 - ], - [ - 0.9985922574996948 - ], - [ - 0.9985772371292114 - ], - [ - 0.9981985092163086 - ], - [ - 0.9927956461906433 - ], - [ - 0.985166072845459 - ], - [ - 0.9947773814201355 - ], - [ - 0.9905215501785278 - ], - [ - 0.9992762207984924 - ], - [ - 0.9974793791770935 - ], - [ - 0.9943581223487854 - ], - [ - 0.9967403411865234 - ], - [ - 0.9930447936058044 - ], - [ - 0.998615562915802 - ], - [ - 0.9957616925239563 - ], - [ - 0.9993053674697876 - ], - [ - 0.9985345602035522 - ], - [ - 0.9952551126480103 - ], - [ - 0.9955055713653564 - ], - [ - 0.9950133562088013 - ], - [ - 0.9997806549072266 - ], - [ - 0.9947424530982971 - ], - [ - 0.999694287776947 - ], - [ - 0.9986006617546082 - ], - [ - 0.9902195930480957 - ], - [ - 0.9995226860046387 - ], - [ - 0.9848650693893433 - ], - [ - 0.9950981736183167 - ], - [ - 0.9987800121307373 - ], - [ - 0.997323751449585 - ], - [ - 0.9988982677459717 - ], - [ - 0.9920604825019836 - ], - [ - 0.9928877949714661 - ], - [ - 0.9807276129722595 - ], - [ - 0.9976627230644226 - ], - [ - 0.9999291896820068 - ], - [ - 0.9958432912826538 - ], - [ - 0.9938291311264038 - ], - [ - 0.9946627616882324 - ], - [ - 0.9998650550842285 - ], - [ - 0.9900315999984741 - ], - [ - 0.9974269866943359 - ], - [ - 0.9897720813751221 - ], - [ - 0.9957936406135559 - ], - [ - 0.9932218194007874 - ], - [ - 0.996446967124939 - ], - [ - 0.9940508604049683 - ], - [ - 0.9955636262893677 - ], - [ - 0.9948257803916931 - ], - [ - 0.9886698722839355 - ], - [ - 0.9988534450531006 - ], - [ - 0.9858165979385376 - ], - [ - 0.9916569590568542 - ], - [ - 0.9982323050498962 - ], - [ - 0.9838352799415588 - ], - [ - 0.9961373805999756 - ], - [ - 0.9817739129066467 - ], - [ - 0.994723379611969 - ], - [ - 0.9983773827552795 - ], - [ - 0.9897892475128174 - ], - [ - 0.998907208442688 - ], - [ - 0.9876444935798645 - ], - [ - 0.9996252059936523 - ], - [ - 0.9927592277526855 - ], - [ - 0.9966170787811279 - ], - [ - 0.9986746311187744 - ], - [ - 0.9993210434913635 - ], - [ - 0.9962468147277832 - ], - [ - 0.9961517453193665 - ], - [ - 0.9974855780601501 - ], - [ - 0.9987379908561707 - ], - [ - 0.9935858845710754 - ], - [ - 0.9991754293441772 - ], - [ - 0.9976705312728882 - ], - [ - 0.9942941665649414 - ], - [ - 0.9995023608207703 - ], - [ - 0.998296320438385 - ], - [ - 0.9980056285858154 - ], - [ - 0.9947104454040527 - ], - [ - 0.9981767535209656 - ], - [ - 0.9927842617034912 - ], - [ - 0.9992777109146118 - ], - [ - 0.999796450138092 - ], - [ - 0.9997242093086243 - ], - [ - 0.9993578195571899 - ], - [ - 0.9955887198448181 - ], - [ - 0.9814757108688354 - ], - [ - 0.9994866847991943 - ], - [ - 0.9925956130027771 - ], - [ - 0.9992111921310425 - ], - [ - 0.9569488167762756 - ], - [ - 0.9897102117538452 - ], - [ - 0.9939751625061035 - ], - [ - 0.9947844743728638 - ], - [ - 0.9927484393119812 - ], - [ - 0.9980167150497437 - ], - [ - 0.9944462776184082 - ], - [ - 0.99897700548172 - ], - [ - 0.9983107447624207 - ], - [ - 0.9956953525543213 - ], - [ - 0.9843021631240845 - ], - [ - 0.994292140007019 - ], - [ - 0.9900123476982117 - ], - [ - 0.9861699938774109 - ], - [ - 0.9963985681533813 - ], - [ - 0.9996615052223206 - ], - [ - 0.9802669882774353 - ], - [ - 0.9985330104827881 - ], - [ - 0.9970524311065674 - ], - [ - 0.9997991323471069 - ], - [ - 0.9907543063163757 - ], - [ - 0.998393714427948 - ], - [ - 0.995525062084198 - ], - [ - 0.9998023509979248 - ], - [ - 0.9971622228622437 - ], - [ - 0.9992753863334656 - ], - [ - 0.9999622106552124 - ], - [ - 0.9815661311149597 - ], - [ - 0.9994043707847595 - ], - [ - 0.9961485862731934 - ], - [ - 0.9938349723815918 - ], - [ - 0.9985218048095703 - ], - [ - 0.9955998063087463 - ], - [ - 0.9971383213996887 - ], - [ - 0.9976557493209839 - ], - [ - 0.9903542399406433 - ], - [ - 0.9938679337501526 - ], - [ - 0.9987586736679077 - ], - [ - 0.9968150854110718 - ], - [ - 0.9980546236038208 - ], - [ - 0.9992469549179077 - ], - [ - 0.98200523853302 - ], - [ - 0.9988876581192017 - ], - [ - 0.9937284588813782 - ], - [ - 0.9991428256034851 - ], - [ - 0.9938818216323853 - ], - [ - 0.9984561204910278 - ], - [ - 0.9907351136207581 - ], - [ - 0.9804718494415283 - ], - [ - 0.9957432150840759 - ], - [ - 0.9959821701049805 - ], - [ - 0.9964268803596497 - ], - [ - 0.9879118800163269 - ], - [ - 0.9956321120262146 - ], - [ - 0.993153989315033 - ], - [ - 0.9922581315040588 - ], - [ - 0.9957557320594788 - ], - [ - 0.9962975978851318 - ], - [ - 0.998455286026001 - ], - [ - 0.9958142638206482 - ], - [ - 0.9971563816070557 - ], - [ - 0.9996784925460815 - ], - [ - 0.9956899285316467 - ], - [ - 0.996746301651001 - ], - [ - 0.9967062473297119 - ], - [ - 0.9999867677688599 - ], - [ - 0.9880314469337463 - ], - [ - 0.9919611215591431 - ], - [ - 0.9958345293998718 - ] - ], - "output_low": [ - [ - -0.9971370697021484 - ], - [ - -0.9977759718894958 - ], - [ - -0.9985922574996948 - ], - [ - -0.9985772371292114 - ], - [ - -0.9981985092163086 - ], - [ - -0.9927956461906433 - ], - [ - -0.985166072845459 - ], - [ - -0.9947773814201355 - ], - [ - -0.9905215501785278 - ], - [ - -0.9992762207984924 - ], - [ - -0.9974793791770935 - ], - [ - -0.9943581223487854 - ], - [ - -0.9967403411865234 - ], - [ - -0.9930447936058044 - ], - [ - -0.998615562915802 - ], - [ - -0.9957616925239563 - ], - [ - -0.9993053674697876 - ], - [ - -0.9985345602035522 - ], - [ - -0.9952551126480103 - ], - [ - -0.9955055713653564 - ], - [ - -0.9950133562088013 - ], - [ - -0.9997806549072266 - ], - [ - -0.9947424530982971 - ], - [ - -0.999694287776947 - ], - [ - -0.9986006617546082 - ], - [ - -0.9902195930480957 - ], - [ - -0.9995226860046387 - ], - [ - -0.9848650693893433 - ], - [ - -0.9950981736183167 - ], - [ - -0.9987800121307373 - ], - [ - -0.997323751449585 - ], - [ - -0.9988982677459717 - ], - [ - -0.9920604825019836 - ], - [ - -0.9928877949714661 - ], - [ - -0.9807276129722595 - ], - [ - -0.9976627230644226 - ], - [ - -0.9999291896820068 - ], - [ - -0.9958432912826538 - ], - [ - -0.9938291311264038 - ], - [ - -0.9946627616882324 - ], - [ - -0.9998650550842285 - ], - [ - -0.9900315999984741 - ], - [ - -0.9974269866943359 - ], - [ - -0.9897720813751221 - ], - [ - -0.9957936406135559 - ], - [ - -0.9932218194007874 - ], - [ - -0.996446967124939 - ], - [ - -0.9940508604049683 - ], - [ - -0.9955636262893677 - ], - [ - -0.9948257803916931 - ], - [ - -0.9886698722839355 - ], - [ - -0.9988534450531006 - ], - [ - -0.9858165979385376 - ], - [ - -0.9916569590568542 - ], - [ - -0.9982323050498962 - ], - [ - -0.9838352799415588 - ], - [ - -0.9961373805999756 - ], - [ - -0.9817739129066467 - ], - [ - -0.994723379611969 - ], - [ - -0.9983773827552795 - ], - [ - -0.9897892475128174 - ], - [ - -0.998907208442688 - ], - [ - -0.9876444935798645 - ], - [ - -0.9996252059936523 - ], - [ - -0.9927592277526855 - ], - [ - -0.9966170787811279 - ], - [ - -0.9986746311187744 - ], - [ - -0.9993210434913635 - ], - [ - -0.9962468147277832 - ], - [ - -0.9961517453193665 - ], - [ - -0.9974855780601501 - ], - [ - -0.9987379908561707 - ], - [ - -0.9935858845710754 - ], - [ - -0.9991754293441772 - ], - [ - -0.9976705312728882 - ], - [ - -0.9942941665649414 - ], - [ - -0.9995023608207703 - ], - [ - -0.998296320438385 - ], - [ - -0.9980056285858154 - ], - [ - -0.9947104454040527 - ], - [ - -0.9981767535209656 - ], - [ - -0.9927842617034912 - ], - [ - -0.9992777109146118 - ], - [ - -0.999796450138092 - ], - [ - -0.9997242093086243 - ], - [ - -0.9993578195571899 - ], - [ - -0.9955887198448181 - ], - [ - -0.9814757108688354 - ], - [ - -0.9994866847991943 - ], - [ - -0.9925956130027771 - ], - [ - -0.9992111921310425 - ], - [ - -0.9569488167762756 - ], - [ - -0.9897102117538452 - ], - [ - -0.9939751625061035 - ], - [ - -0.9947844743728638 - ], - [ - -0.9927484393119812 - ], - [ - -0.9980167150497437 - ], - [ - -0.9944462776184082 - ], - [ - -0.99897700548172 - ], - [ - -0.9983107447624207 - ], - [ - -0.9956953525543213 - ], - [ - -0.9843021631240845 - ], - [ - -0.994292140007019 - ], - [ - -0.9900123476982117 - ], - [ - -0.9861699938774109 - ], - [ - -0.9963985681533813 - ], - [ - -0.9996615052223206 - ], - [ - -0.9802669882774353 - ], - [ - -0.9985330104827881 - ], - [ - -0.9970524311065674 - ], - [ - -0.9997991323471069 - ], - [ - -0.9907543063163757 - ], - [ - -0.998393714427948 - ], - [ - -0.995525062084198 - ], - [ - -0.9998023509979248 - ], - [ - -0.9971622228622437 - ], - [ - -0.9992753863334656 - ], - [ - -0.9999622106552124 - ], - [ - -0.9815661311149597 - ], - [ - -0.9994043707847595 - ], - [ - -0.9961485862731934 - ], - [ - -0.9938349723815918 - ], - [ - -0.9985218048095703 - ], - [ - -0.9955998063087463 - ], - [ - -0.9971383213996887 - ], - [ - -0.9976557493209839 - ], - [ - -0.9903542399406433 - ], - [ - -0.9938679337501526 - ], - [ - -0.9987586736679077 - ], - [ - -0.9968150854110718 - ], - [ - -0.9980546236038208 - ], - [ - -0.9992469549179077 - ], - [ - -0.98200523853302 - ], - [ - -0.9988876581192017 - ], - [ - -0.9937284588813782 - ], - [ - -0.9991428256034851 - ], - [ - -0.9938818216323853 - ], - [ - -0.9984561204910278 - ], - [ - -0.9907351136207581 - ], - [ - -0.9804718494415283 - ], - [ - -0.9957432150840759 - ], - [ - -0.9959821701049805 - ], - [ - -0.9964268803596497 - ], - [ - -0.9879118800163269 - ], - [ - -0.9956321120262146 - ], - [ - -0.993153989315033 - ], - [ - -0.9922581315040588 - ], - [ - -0.9957557320594788 - ], - [ - -0.9962975978851318 - ], - [ - -0.998455286026001 - ], - [ - -0.9958142638206482 - ], - [ - -0.9971563816070557 - ], - [ - -0.9996784925460815 - ], - [ - -0.9956899285316467 - ], - [ - -0.996746301651001 - ], - [ - -0.9967062473297119 - ], - [ - -0.9999867677688599 - ], - [ - -0.9880314469337463 - ], - [ - -0.9919611215591431 - ], - [ - -0.9958345293998718 - ] - ], - "output_high": [ - [ - 0.9971370697021484 - ], - [ - 0.9977759718894958 - ], - [ - 0.9985922574996948 - ], - [ - 0.9985772371292114 - ], - [ - 0.9981985092163086 - ], - [ - 0.9927956461906433 - ], - [ - 0.985166072845459 - ], - [ - 0.9947773814201355 - ], - [ - 0.9905215501785278 - ], - [ - 0.9992762207984924 - ], - [ - 0.9974793791770935 - ], - [ - 0.9943581223487854 - ], - [ - 0.9967403411865234 - ], - [ - 0.9930447936058044 - ], - [ - 0.998615562915802 - ], - [ - 0.9957616925239563 - ], - [ - 0.9993053674697876 - ], - [ - 0.9985345602035522 - ], - [ - 0.9952551126480103 - ], - [ - 0.9955055713653564 - ], - [ - 0.9950133562088013 - ], - [ - 0.9997806549072266 - ], - [ - 0.9947424530982971 - ], - [ - 0.999694287776947 - ], - [ - 0.9986006617546082 - ], - [ - 0.9902195930480957 - ], - [ - 0.9995226860046387 - ], - [ - 0.9848650693893433 - ], - [ - 0.9950981736183167 - ], - [ - 0.9987800121307373 - ], - [ - 0.997323751449585 - ], - [ - 0.9988982677459717 - ], - [ - 0.9920604825019836 - ], - [ - 0.9928877949714661 - ], - [ - 0.9807276129722595 - ], - [ - 0.9976627230644226 - ], - [ - 0.9999291896820068 - ], - [ - 0.9958432912826538 - ], - [ - 0.9938291311264038 - ], - [ - 0.9946627616882324 - ], - [ - 0.9998650550842285 - ], - [ - 0.9900315999984741 - ], - [ - 0.9974269866943359 - ], - [ - 0.9897720813751221 - ], - [ - 0.9957936406135559 - ], - [ - 0.9932218194007874 - ], - [ - 0.996446967124939 - ], - [ - 0.9940508604049683 - ], - [ - 0.9955636262893677 - ], - [ - 0.9948257803916931 - ], - [ - 0.9886698722839355 - ], - [ - 0.9988534450531006 - ], - [ - 0.9858165979385376 - ], - [ - 0.9916569590568542 - ], - [ - 0.9982323050498962 - ], - [ - 0.9838352799415588 - ], - [ - 0.9961373805999756 - ], - [ - 0.9817739129066467 - ], - [ - 0.994723379611969 - ], - [ - 0.9983773827552795 - ], - [ - 0.9897892475128174 - ], - [ - 0.998907208442688 - ], - [ - 0.9876444935798645 - ], - [ - 0.9996252059936523 - ], - [ - 0.9927592277526855 - ], - [ - 0.9966170787811279 - ], - [ - 0.9986746311187744 - ], - [ - 0.9993210434913635 - ], - [ - 0.9962468147277832 - ], - [ - 0.9961517453193665 - ], - [ - 0.9974855780601501 - ], - [ - 0.9987379908561707 - ], - [ - 0.9935858845710754 - ], - [ - 0.9991754293441772 - ], - [ - 0.9976705312728882 - ], - [ - 0.9942941665649414 - ], - [ - 0.9995023608207703 - ], - [ - 0.998296320438385 - ], - [ - 0.9980056285858154 - ], - [ - 0.9947104454040527 - ], - [ - 0.9981767535209656 - ], - [ - 0.9927842617034912 - ], - [ - 0.9992777109146118 - ], - [ - 0.999796450138092 - ], - [ - 0.9997242093086243 - ], - [ - 0.9993578195571899 - ], - [ - 0.9955887198448181 - ], - [ - 0.9814757108688354 - ], - [ - 0.9994866847991943 - ], - [ - 0.9925956130027771 - ], - [ - 0.9992111921310425 - ], - [ - 0.9569488167762756 - ], - [ - 0.9897102117538452 - ], - [ - 0.9939751625061035 - ], - [ - 0.9947844743728638 - ], - [ - 0.9927484393119812 - ], - [ - 0.9980167150497437 - ], - [ - 0.9944462776184082 - ], - [ - 0.99897700548172 - ], - [ - 0.9983107447624207 - ], - [ - 0.9956953525543213 - ], - [ - 0.9843021631240845 - ], - [ - 0.994292140007019 - ], - [ - 0.9900123476982117 - ], - [ - 0.9861699938774109 - ], - [ - 0.9963985681533813 - ], - [ - 0.9996615052223206 - ], - [ - 0.9802669882774353 - ], - [ - 0.9985330104827881 - ], - [ - 0.9970524311065674 - ], - [ - 0.9997991323471069 - ], - [ - 0.9907543063163757 - ], - [ - 0.998393714427948 - ], - [ - 0.995525062084198 - ], - [ - 0.9998023509979248 - ], - [ - 0.9971622228622437 - ], - [ - 0.9992753863334656 - ], - [ - 0.9999622106552124 - ], - [ - 0.9815661311149597 - ], - [ - 0.9994043707847595 - ], - [ - 0.9961485862731934 - ], - [ - 0.9938349723815918 - ], - [ - 0.9985218048095703 - ], - [ - 0.9955998063087463 - ], - [ - 0.9971383213996887 - ], - [ - 0.9976557493209839 - ], - [ - 0.9903542399406433 - ], - [ - 0.9938679337501526 - ], - [ - 0.9987586736679077 - ], - [ - 0.9968150854110718 - ], - [ - 0.9980546236038208 - ], - [ - 0.9992469549179077 - ], - [ - 0.98200523853302 - ], - [ - 0.9988876581192017 - ], - [ - 0.9937284588813782 - ], - [ - 0.9991428256034851 - ], - [ - 0.9938818216323853 - ], - [ - 0.9984561204910278 - ], - [ - 0.9907351136207581 - ], - [ - 0.9804718494415283 - ], - [ - 0.9957432150840759 - ], - [ - 0.9959821701049805 - ], - [ - 0.9964268803596497 - ], - [ - 0.9879118800163269 - ], - [ - 0.9956321120262146 - ], - [ - 0.993153989315033 - ], - [ - 0.9922581315040588 - ], - [ - 0.9957557320594788 - ], - [ - 0.9962975978851318 - ], - [ - 0.998455286026001 - ], - [ - 0.9958142638206482 - ], - [ - 0.9971563816070557 - ], - [ - 0.9996784925460815 - ], - [ - 0.9956899285316467 - ], - [ - 0.996746301651001 - ], - [ - 0.9967062473297119 - ], - [ - 0.9999867677688599 - ], - [ - 0.9880314469337463 - ], - [ - 0.9919611215591431 - ], - [ - 0.9958345293998718 - ] - ] - }, - "Input/fq_output_0": { - "input_low": 0.0, - "input_high": 0.997209906578064, - "output_low": 0.0, - "output_high": 0.997209906578064 - }, - "MatMul_1/fq_output_0": { - "input_low": 0.0, - "input_high": 47.62752914428711, - "output_low": 0.0, - "output_high": 47.62752914428711 - }, - "MatMul_1/fq_weights_1": { - "input_low": [ - [ - -0.9970278739929199 - ], - [ - -0.9808408617973328 - ], - [ - -0.9944129586219788 - ], - [ - -0.9971938133239746 - ], - [ - -0.9992196559906006 - ], - [ - -0.998683750629425 - ], - [ - -0.99868243932724 - ], - [ - -0.9950684905052185 - ], - [ - -0.976850688457489 - ], - [ - -0.9997206926345825 - ], - [ - -0.993675708770752 - ], - [ - -0.9989456534385681 - ], - [ - -0.9884542226791382 - ], - [ - -0.9923633933067322 - ], - [ - -0.9966399073600769 - ], - [ - -0.9916086792945862 - ], - [ - -0.9996746182441711 - ], - [ - -0.9927780032157898 - ], - [ - -0.9985724091529846 - ], - [ - -0.9699029326438904 - ], - [ - -0.9918563961982727 - ], - [ - -0.999505877494812 - ], - [ - -0.9729548096656799 - ], - [ - -0.9898425340652466 - ], - [ - -0.9969669580459595 - ], - [ - -0.9981495141983032 - ], - [ - -0.9948304891586304 - ], - [ - -0.9993724822998047 - ], - [ - -0.9923088550567627 - ], - [ - -0.999050498008728 - ], - [ - -0.9883591532707214 - ], - [ - -0.9938135147094727 - ], - [ - -0.9923365712165833 - ], - [ - -0.9936618804931641 - ], - [ - -0.9931996464729309 - ], - [ - -0.9953643679618835 - ], - [ - -0.9999123215675354 - ], - [ - -0.9848306775093079 - ], - [ - -0.9940652251243591 - ], - [ - -0.9896738529205322 - ], - [ - -0.9745385050773621 - ], - [ - -0.9898044466972351 - ], - [ - -0.9963973760604858 - ], - [ - -0.9967551231384277 - ], - [ - -0.9933338165283203 - ], - [ - -0.9810178279876709 - ], - [ - -0.9956452250480652 - ], - [ - -0.9694769382476807 - ], - [ - -0.9754481911659241 - ], - [ - -0.9985426664352417 - ], - [ - -0.993887722492218 - ], - [ - -0.9912910461425781 - ], - [ - -0.9948264360427856 - ], - [ - -0.9901152849197388 - ], - [ - -0.995610773563385 - ], - [ - -0.9969016909599304 - ], - [ - -0.9890735149383545 - ], - [ - -0.9930339455604553 - ], - [ - -0.9849486947059631 - ], - [ - -0.9969156384468079 - ], - [ - -0.9991021752357483 - ], - [ - -0.9978500604629517 - ], - [ - -0.9976779818534851 - ], - [ - -0.9949596524238586 - ], - [ - -0.996618390083313 - ], - [ - -0.988395094871521 - ], - [ - -0.9954425096511841 - ], - [ - -0.997184157371521 - ], - [ - -0.9966145157814026 - ], - [ - -0.995242714881897 - ], - [ - -0.9966570734977722 - ], - [ - -0.9994654655456543 - ], - [ - -0.99825119972229 - ], - [ - -0.999591588973999 - ], - [ - -0.9902287125587463 - ], - [ - -0.9998602867126465 - ], - [ - -0.9954801201820374 - ], - [ - -0.9977607131004333 - ], - [ - -0.9887634515762329 - ], - [ - -0.9845426082611084 - ], - [ - -0.9950025677680969 - ], - [ - -0.9947744607925415 - ], - [ - -0.9819414019584656 - ], - [ - -0.9989105463027954 - ], - [ - -0.9920969009399414 - ], - [ - -0.99006187915802 - ], - [ - -0.9791988134384155 - ], - [ - -0.9981624484062195 - ], - [ - -0.999700665473938 - ], - [ - -0.9956314563751221 - ], - [ - -0.989037036895752 - ], - [ - -0.9909722805023193 - ], - [ - -0.9862728714942932 - ], - [ - -0.9966010451316833 - ], - [ - -0.9992076754570007 - ], - [ - -0.9947077631950378 - ], - [ - -0.9873133301734924 - ], - [ - -0.9985927939414978 - ], - [ - -0.9910207390785217 - ], - [ - -0.99626624584198 - ], - [ - -0.9944427013397217 - ], - [ - -0.9871867299079895 - ], - [ - -0.9864006638526917 - ], - [ - -0.9973322153091431 - ], - [ - -0.998275876045227 - ], - [ - -0.9989286065101624 - ], - [ - -0.9973595142364502 - ], - [ - -0.9950085878372192 - ], - [ - -0.9998424053192139 - ], - [ - -0.9961771368980408 - ], - [ - -0.999405026435852 - ], - [ - -0.9921284914016724 - ], - [ - -0.996304452419281 - ], - [ - -0.9969595074653625 - ], - [ - -0.9977396130561829 - ], - [ - -0.9927499890327454 - ], - [ - -0.9986027479171753 - ], - [ - -0.9997433423995972 - ], - [ - -0.9998571276664734 - ], - [ - -0.9922271966934204 - ], - [ - -0.9866094589233398 - ], - [ - -0.9990732073783875 - ], - [ - -0.9958819150924683 - ], - [ - -0.9940115213394165 - ], - [ - -0.9925696849822998 - ], - [ - -0.998586893081665 - ], - [ - -0.9847491979598999 - ], - [ - -0.9829020500183105 - ], - [ - -0.999756395816803 - ], - [ - -0.9919022917747498 - ], - [ - -0.9922955632209778 - ], - [ - -0.9874224662780762 - ], - [ - -0.9996545910835266 - ], - [ - -0.993647575378418 - ], - [ - -0.9771861433982849 - ], - [ - -0.988274335861206 - ], - [ - -0.9978176355361938 - ], - [ - -0.9904887676239014 - ], - [ - -0.984954297542572 - ], - [ - -0.9931925535202026 - ], - [ - -0.9651272892951965 - ], - [ - -0.9998226165771484 - ], - [ - -0.9826046824455261 - ], - [ - -0.9991958141326904 - ], - [ - -0.9997920989990234 - ], - [ - -0.9984151124954224 - ], - [ - -0.998444139957428 - ], - [ - -0.9945566058158875 - ], - [ - -0.9985499382019043 - ], - [ - -0.9909688830375671 - ], - [ - -0.9951391220092773 - ], - [ - -0.9981335401535034 - ], - [ - -0.994732677936554 - ], - [ - -0.991540253162384 - ], - [ - -0.996556282043457 - ], - [ - -0.9998652935028076 - ], - [ - -0.999691367149353 - ], - [ - -0.9937717318534851 - ], - [ - -0.9903605580329895 - ], - [ - -0.9969991445541382 - ] - ], - "input_high": [ - [ - 0.9970278739929199 - ], - [ - 0.9808408617973328 - ], - [ - 0.9944129586219788 - ], - [ - 0.9971938133239746 - ], - [ - 0.9992196559906006 - ], - [ - 0.998683750629425 - ], - [ - 0.99868243932724 - ], - [ - 0.9950684905052185 - ], - [ - 0.976850688457489 - ], - [ - 0.9997206926345825 - ], - [ - 0.993675708770752 - ], - [ - 0.9989456534385681 - ], - [ - 0.9884542226791382 - ], - [ - 0.9923633933067322 - ], - [ - 0.9966399073600769 - ], - [ - 0.9916086792945862 - ], - [ - 0.9996746182441711 - ], - [ - 0.9927780032157898 - ], - [ - 0.9985724091529846 - ], - [ - 0.9699029326438904 - ], - [ - 0.9918563961982727 - ], - [ - 0.999505877494812 - ], - [ - 0.9729548096656799 - ], - [ - 0.9898425340652466 - ], - [ - 0.9969669580459595 - ], - [ - 0.9981495141983032 - ], - [ - 0.9948304891586304 - ], - [ - 0.9993724822998047 - ], - [ - 0.9923088550567627 - ], - [ - 0.999050498008728 - ], - [ - 0.9883591532707214 - ], - [ - 0.9938135147094727 - ], - [ - 0.9923365712165833 - ], - [ - 0.9936618804931641 - ], - [ - 0.9931996464729309 - ], - [ - 0.9953643679618835 - ], - [ - 0.9999123215675354 - ], - [ - 0.9848306775093079 - ], - [ - 0.9940652251243591 - ], - [ - 0.9896738529205322 - ], - [ - 0.9745385050773621 - ], - [ - 0.9898044466972351 - ], - [ - 0.9963973760604858 - ], - [ - 0.9967551231384277 - ], - [ - 0.9933338165283203 - ], - [ - 0.9810178279876709 - ], - [ - 0.9956452250480652 - ], - [ - 0.9694769382476807 - ], - [ - 0.9754481911659241 - ], - [ - 0.9985426664352417 - ], - [ - 0.993887722492218 - ], - [ - 0.9912910461425781 - ], - [ - 0.9948264360427856 - ], - [ - 0.9901152849197388 - ], - [ - 0.995610773563385 - ], - [ - 0.9969016909599304 - ], - [ - 0.9890735149383545 - ], - [ - 0.9930339455604553 - ], - [ - 0.9849486947059631 - ], - [ - 0.9969156384468079 - ], - [ - 0.9991021752357483 - ], - [ - 0.9978500604629517 - ], - [ - 0.9976779818534851 - ], - [ - 0.9949596524238586 - ], - [ - 0.996618390083313 - ], - [ - 0.988395094871521 - ], - [ - 0.9954425096511841 - ], - [ - 0.997184157371521 - ], - [ - 0.9966145157814026 - ], - [ - 0.995242714881897 - ], - [ - 0.9966570734977722 - ], - [ - 0.9994654655456543 - ], - [ - 0.99825119972229 - ], - [ - 0.999591588973999 - ], - [ - 0.9902287125587463 - ], - [ - 0.9998602867126465 - ], - [ - 0.9954801201820374 - ], - [ - 0.9977607131004333 - ], - [ - 0.9887634515762329 - ], - [ - 0.9845426082611084 - ], - [ - 0.9950025677680969 - ], - [ - 0.9947744607925415 - ], - [ - 0.9819414019584656 - ], - [ - 0.9989105463027954 - ], - [ - 0.9920969009399414 - ], - [ - 0.99006187915802 - ], - [ - 0.9791988134384155 - ], - [ - 0.9981624484062195 - ], - [ - 0.999700665473938 - ], - [ - 0.9956314563751221 - ], - [ - 0.989037036895752 - ], - [ - 0.9909722805023193 - ], - [ - 0.9862728714942932 - ], - [ - 0.9966010451316833 - ], - [ - 0.9992076754570007 - ], - [ - 0.9947077631950378 - ], - [ - 0.9873133301734924 - ], - [ - 0.9985927939414978 - ], - [ - 0.9910207390785217 - ], - [ - 0.99626624584198 - ], - [ - 0.9944427013397217 - ], - [ - 0.9871867299079895 - ], - [ - 0.9864006638526917 - ], - [ - 0.9973322153091431 - ], - [ - 0.998275876045227 - ], - [ - 0.9989286065101624 - ], - [ - 0.9973595142364502 - ], - [ - 0.9950085878372192 - ], - [ - 0.9998424053192139 - ], - [ - 0.9961771368980408 - ], - [ - 0.999405026435852 - ], - [ - 0.9921284914016724 - ], - [ - 0.996304452419281 - ], - [ - 0.9969595074653625 - ], - [ - 0.9977396130561829 - ], - [ - 0.9927499890327454 - ], - [ - 0.9986027479171753 - ], - [ - 0.9997433423995972 - ], - [ - 0.9998571276664734 - ], - [ - 0.9922271966934204 - ], - [ - 0.9866094589233398 - ], - [ - 0.9990732073783875 - ], - [ - 0.9958819150924683 - ], - [ - 0.9940115213394165 - ], - [ - 0.9925696849822998 - ], - [ - 0.998586893081665 - ], - [ - 0.9847491979598999 - ], - [ - 0.9829020500183105 - ], - [ - 0.999756395816803 - ], - [ - 0.9919022917747498 - ], - [ - 0.9922955632209778 - ], - [ - 0.9874224662780762 - ], - [ - 0.9996545910835266 - ], - [ - 0.993647575378418 - ], - [ - 0.9771861433982849 - ], - [ - 0.988274335861206 - ], - [ - 0.9978176355361938 - ], - [ - 0.9904887676239014 - ], - [ - 0.984954297542572 - ], - [ - 0.9931925535202026 - ], - [ - 0.9651272892951965 - ], - [ - 0.9998226165771484 - ], - [ - 0.9826046824455261 - ], - [ - 0.9991958141326904 - ], - [ - 0.9997920989990234 - ], - [ - 0.9984151124954224 - ], - [ - 0.998444139957428 - ], - [ - 0.9945566058158875 - ], - [ - 0.9985499382019043 - ], - [ - 0.9909688830375671 - ], - [ - 0.9951391220092773 - ], - [ - 0.9981335401535034 - ], - [ - 0.994732677936554 - ], - [ - 0.991540253162384 - ], - [ - 0.996556282043457 - ], - [ - 0.9998652935028076 - ], - [ - 0.999691367149353 - ], - [ - 0.9937717318534851 - ], - [ - 0.9903605580329895 - ], - [ - 0.9969991445541382 - ] - ], - "output_low": [ - [ - -0.9970278739929199 - ], - [ - -0.9808408617973328 - ], - [ - -0.9944129586219788 - ], - [ - -0.9971938133239746 - ], - [ - -0.9992196559906006 - ], - [ - -0.998683750629425 - ], - [ - -0.99868243932724 - ], - [ - -0.9950684905052185 - ], - [ - -0.976850688457489 - ], - [ - -0.9997206926345825 - ], - [ - -0.993675708770752 - ], - [ - -0.9989456534385681 - ], - [ - -0.9884542226791382 - ], - [ - -0.9923633933067322 - ], - [ - -0.9966399073600769 - ], - [ - -0.9916086792945862 - ], - [ - -0.9996746182441711 - ], - [ - -0.9927780032157898 - ], - [ - -0.9985724091529846 - ], - [ - -0.9699029326438904 - ], - [ - -0.9918563961982727 - ], - [ - -0.999505877494812 - ], - [ - -0.9729548096656799 - ], - [ - -0.9898425340652466 - ], - [ - -0.9969669580459595 - ], - [ - -0.9981495141983032 - ], - [ - -0.9948304891586304 - ], - [ - -0.9993724822998047 - ], - [ - -0.9923088550567627 - ], - [ - -0.999050498008728 - ], - [ - -0.9883591532707214 - ], - [ - -0.9938135147094727 - ], - [ - -0.9923365712165833 - ], - [ - -0.9936618804931641 - ], - [ - -0.9931996464729309 - ], - [ - -0.9953643679618835 - ], - [ - -0.9999123215675354 - ], - [ - -0.9848306775093079 - ], - [ - -0.9940652251243591 - ], - [ - -0.9896738529205322 - ], - [ - -0.9745385050773621 - ], - [ - -0.9898044466972351 - ], - [ - -0.9963973760604858 - ], - [ - -0.9967551231384277 - ], - [ - -0.9933338165283203 - ], - [ - -0.9810178279876709 - ], - [ - -0.9956452250480652 - ], - [ - -0.9694769382476807 - ], - [ - -0.9754481911659241 - ], - [ - -0.9985426664352417 - ], - [ - -0.993887722492218 - ], - [ - -0.9912910461425781 - ], - [ - -0.9948264360427856 - ], - [ - -0.9901152849197388 - ], - [ - -0.995610773563385 - ], - [ - -0.9969016909599304 - ], - [ - -0.9890735149383545 - ], - [ - -0.9930339455604553 - ], - [ - -0.9849486947059631 - ], - [ - -0.9969156384468079 - ], - [ - -0.9991021752357483 - ], - [ - -0.9978500604629517 - ], - [ - -0.9976779818534851 - ], - [ - -0.9949596524238586 - ], - [ - -0.996618390083313 - ], - [ - -0.988395094871521 - ], - [ - -0.9954425096511841 - ], - [ - -0.997184157371521 - ], - [ - -0.9966145157814026 - ], - [ - -0.995242714881897 - ], - [ - -0.9966570734977722 - ], - [ - -0.9994654655456543 - ], - [ - -0.99825119972229 - ], - [ - -0.999591588973999 - ], - [ - -0.9902287125587463 - ], - [ - -0.9998602867126465 - ], - [ - -0.9954801201820374 - ], - [ - -0.9977607131004333 - ], - [ - -0.9887634515762329 - ], - [ - -0.9845426082611084 - ], - [ - -0.9950025677680969 - ], - [ - -0.9947744607925415 - ], - [ - -0.9819414019584656 - ], - [ - -0.9989105463027954 - ], - [ - -0.9920969009399414 - ], - [ - -0.99006187915802 - ], - [ - -0.9791988134384155 - ], - [ - -0.9981624484062195 - ], - [ - -0.999700665473938 - ], - [ - -0.9956314563751221 - ], - [ - -0.989037036895752 - ], - [ - -0.9909722805023193 - ], - [ - -0.9862728714942932 - ], - [ - -0.9966010451316833 - ], - [ - -0.9992076754570007 - ], - [ - -0.9947077631950378 - ], - [ - -0.9873133301734924 - ], - [ - -0.9985927939414978 - ], - [ - -0.9910207390785217 - ], - [ - -0.99626624584198 - ], - [ - -0.9944427013397217 - ], - [ - -0.9871867299079895 - ], - [ - -0.9864006638526917 - ], - [ - -0.9973322153091431 - ], - [ - -0.998275876045227 - ], - [ - -0.9989286065101624 - ], - [ - -0.9973595142364502 - ], - [ - -0.9950085878372192 - ], - [ - -0.9998424053192139 - ], - [ - -0.9961771368980408 - ], - [ - -0.999405026435852 - ], - [ - -0.9921284914016724 - ], - [ - -0.996304452419281 - ], - [ - -0.9969595074653625 - ], - [ - -0.9977396130561829 - ], - [ - -0.9927499890327454 - ], - [ - -0.9986027479171753 - ], - [ - -0.9997433423995972 - ], - [ - -0.9998571276664734 - ], - [ - -0.9922271966934204 - ], - [ - -0.9866094589233398 - ], - [ - -0.9990732073783875 - ], - [ - -0.9958819150924683 - ], - [ - -0.9940115213394165 - ], - [ - -0.9925696849822998 - ], - [ - -0.998586893081665 - ], - [ - -0.9847491979598999 - ], - [ - -0.9829020500183105 - ], - [ - -0.999756395816803 - ], - [ - -0.9919022917747498 - ], - [ - -0.9922955632209778 - ], - [ - -0.9874224662780762 - ], - [ - -0.9996545910835266 - ], - [ - -0.993647575378418 - ], - [ - -0.9771861433982849 - ], - [ - -0.988274335861206 - ], - [ - -0.9978176355361938 - ], - [ - -0.9904887676239014 - ], - [ - -0.984954297542572 - ], - [ - -0.9931925535202026 - ], - [ - -0.9651272892951965 - ], - [ - -0.9998226165771484 - ], - [ - -0.9826046824455261 - ], - [ - -0.9991958141326904 - ], - [ - -0.9997920989990234 - ], - [ - -0.9984151124954224 - ], - [ - -0.998444139957428 - ], - [ - -0.9945566058158875 - ], - [ - -0.9985499382019043 - ], - [ - -0.9909688830375671 - ], - [ - -0.9951391220092773 - ], - [ - -0.9981335401535034 - ], - [ - -0.994732677936554 - ], - [ - -0.991540253162384 - ], - [ - -0.996556282043457 - ], - [ - -0.9998652935028076 - ], - [ - -0.999691367149353 - ], - [ - -0.9937717318534851 - ], - [ - -0.9903605580329895 - ], - [ - -0.9969991445541382 - ] - ], - "output_high": [ - [ - 0.9970278739929199 - ], - [ - 0.9808408617973328 - ], - [ - 0.9944129586219788 - ], - [ - 0.9971938133239746 - ], - [ - 0.9992196559906006 - ], - [ - 0.998683750629425 - ], - [ - 0.99868243932724 - ], - [ - 0.9950684905052185 - ], - [ - 0.976850688457489 - ], - [ - 0.9997206926345825 - ], - [ - 0.993675708770752 - ], - [ - 0.9989456534385681 - ], - [ - 0.9884542226791382 - ], - [ - 0.9923633933067322 - ], - [ - 0.9966399073600769 - ], - [ - 0.9916086792945862 - ], - [ - 0.9996746182441711 - ], - [ - 0.9927780032157898 - ], - [ - 0.9985724091529846 - ], - [ - 0.9699029326438904 - ], - [ - 0.9918563961982727 - ], - [ - 0.999505877494812 - ], - [ - 0.9729548096656799 - ], - [ - 0.9898425340652466 - ], - [ - 0.9969669580459595 - ], - [ - 0.9981495141983032 - ], - [ - 0.9948304891586304 - ], - [ - 0.9993724822998047 - ], - [ - 0.9923088550567627 - ], - [ - 0.999050498008728 - ], - [ - 0.9883591532707214 - ], - [ - 0.9938135147094727 - ], - [ - 0.9923365712165833 - ], - [ - 0.9936618804931641 - ], - [ - 0.9931996464729309 - ], - [ - 0.9953643679618835 - ], - [ - 0.9999123215675354 - ], - [ - 0.9848306775093079 - ], - [ - 0.9940652251243591 - ], - [ - 0.9896738529205322 - ], - [ - 0.9745385050773621 - ], - [ - 0.9898044466972351 - ], - [ - 0.9963973760604858 - ], - [ - 0.9967551231384277 - ], - [ - 0.9933338165283203 - ], - [ - 0.9810178279876709 - ], - [ - 0.9956452250480652 - ], - [ - 0.9694769382476807 - ], - [ - 0.9754481911659241 - ], - [ - 0.9985426664352417 - ], - [ - 0.993887722492218 - ], - [ - 0.9912910461425781 - ], - [ - 0.9948264360427856 - ], - [ - 0.9901152849197388 - ], - [ - 0.995610773563385 - ], - [ - 0.9969016909599304 - ], - [ - 0.9890735149383545 - ], - [ - 0.9930339455604553 - ], - [ - 0.9849486947059631 - ], - [ - 0.9969156384468079 - ], - [ - 0.9991021752357483 - ], - [ - 0.9978500604629517 - ], - [ - 0.9976779818534851 - ], - [ - 0.9949596524238586 - ], - [ - 0.996618390083313 - ], - [ - 0.988395094871521 - ], - [ - 0.9954425096511841 - ], - [ - 0.997184157371521 - ], - [ - 0.9966145157814026 - ], - [ - 0.995242714881897 - ], - [ - 0.9966570734977722 - ], - [ - 0.9994654655456543 - ], - [ - 0.99825119972229 - ], - [ - 0.999591588973999 - ], - [ - 0.9902287125587463 - ], - [ - 0.9998602867126465 - ], - [ - 0.9954801201820374 - ], - [ - 0.9977607131004333 - ], - [ - 0.9887634515762329 - ], - [ - 0.9845426082611084 - ], - [ - 0.9950025677680969 - ], - [ - 0.9947744607925415 - ], - [ - 0.9819414019584656 - ], - [ - 0.9989105463027954 - ], - [ - 0.9920969009399414 - ], - [ - 0.99006187915802 - ], - [ - 0.9791988134384155 - ], - [ - 0.9981624484062195 - ], - [ - 0.999700665473938 - ], - [ - 0.9956314563751221 - ], - [ - 0.989037036895752 - ], - [ - 0.9909722805023193 - ], - [ - 0.9862728714942932 - ], - [ - 0.9966010451316833 - ], - [ - 0.9992076754570007 - ], - [ - 0.9947077631950378 - ], - [ - 0.9873133301734924 - ], - [ - 0.9985927939414978 - ], - [ - 0.9910207390785217 - ], - [ - 0.99626624584198 - ], - [ - 0.9944427013397217 - ], - [ - 0.9871867299079895 - ], - [ - 0.9864006638526917 - ], - [ - 0.9973322153091431 - ], - [ - 0.998275876045227 - ], - [ - 0.9989286065101624 - ], - [ - 0.9973595142364502 - ], - [ - 0.9950085878372192 - ], - [ - 0.9998424053192139 - ], - [ - 0.9961771368980408 - ], - [ - 0.999405026435852 - ], - [ - 0.9921284914016724 - ], - [ - 0.996304452419281 - ], - [ - 0.9969595074653625 - ], - [ - 0.9977396130561829 - ], - [ - 0.9927499890327454 - ], - [ - 0.9986027479171753 - ], - [ - 0.9997433423995972 - ], + -0.9811950325965881 + ] + ], + "input_high": [ [ - 0.9998571276664734 + 0.7963242530822754 ], [ - 0.9922271966934204 + 0.9421131014823914 ], [ - 0.9866094589233398 + 0.9545904994010925 ], [ - 0.9990732073783875 + 0.9950965046882629 ], [ - 0.9958819150924683 + 0.9320597052574158 ], [ - 0.9940115213394165 - ], + 0.9811950325965881 + ] + ], + "output_low": [ [ - 0.9925696849822998 + -0.7963242530822754 ], [ - 0.998586893081665 + -0.9421131014823914 ], [ - 0.9847491979598999 + -0.9545904994010925 ], [ - 0.9829020500183105 + -0.9950965046882629 ], [ - 0.999756395816803 + -0.9320597052574158 ], [ - 0.9919022917747498 - ], + -0.9811950325965881 + ] + ], + "output_high": [ [ - 0.9922955632209778 + 0.7963242530822754 ], [ - 0.9874224662780762 + 0.9421131014823914 ], [ - 0.9996545910835266 + 0.9545904994010925 ], [ - 0.993647575378418 + 0.9950965046882629 ], [ - 0.9771861433982849 + 0.9320597052574158 ], [ - 0.988274335861206 - ], + 0.9811950325965881 + ] + ] + }, + "Gather_4/fq_input_0": { + "input_low": 0.0, + "input_high": 0.91275554895401, + "output_low": 0.0, + "output_high": 0.91275554895401 + }, + "MatMul_1/fq_output_0": { + "input_low": 0.0, + "input_high": 1.7037991285324097, + "output_low": 0.0, + "output_high": 1.7037991285324097 + }, + "MatMul_1/fq_weights_1": { + "input_low": [ [ - 0.9978176355361938 + -0.6706244349479675 ], [ - 0.9904887676239014 + -0.997209906578064 ], [ - 0.984954297542572 + -0.721488356590271 ], [ - 0.9931925535202026 + -0.9340435266494751 ], [ - 0.9651272892951965 + -0.8902743458747864 ], [ - 0.9998226165771484 - ], + -0.876484215259552 + ] + ], + "input_high": [ [ - 0.9826046824455261 + 0.6706244349479675 ], [ - 0.9991958141326904 + 0.997209906578064 ], [ - 0.9997920989990234 + 0.721488356590271 ], [ - 0.9984151124954224 + 0.9340435266494751 ], [ - 0.998444139957428 + 0.8902743458747864 ], [ - 0.9945566058158875 - ], + 0.876484215259552 + ] + ], + "output_low": [ [ - 0.9985499382019043 + -0.6706244349479675 ], [ - 0.9909688830375671 + -0.997209906578064 ], [ - 0.9951391220092773 + -0.721488356590271 ], [ - 0.9981335401535034 + -0.9340435266494751 ], [ - 0.994732677936554 + -0.8902743458747864 ], [ - 0.991540253162384 - ], + -0.876484215259552 + ] + ], + "output_high": [ [ - 0.996556282043457 + 0.6706244349479675 ], [ - 0.9998652935028076 + 0.997209906578064 ], [ - 0.999691367149353 + 0.721488356590271 ], [ - 0.9937717318534851 + 0.9340435266494751 ], [ - 0.9903605580329895 + 0.8902743458747864 ], [ - 0.9969991445541382 + 0.876484215259552 ] ] }, - "Gather_2/fq_output_0": { - "input_low": 0.0, - "input_high": 0.997209906578064, - "output_low": 0.0, - "output_high": 0.997209906578064 + "Gather_2/fq_weights_0": { + "input_low": -0.9350724220275879, + "input_high": 0.9350724220275879, + "output_low": -0.9350724220275879, + "output_high": 0.9350724220275879 } } \ No newline at end of file diff --git a/tests/openvino/native/data/reference_scales/LSTMSequenceModel_mixed.json b/tests/openvino/native/data/reference_scales/LSTMSequenceModel_mixed.json index ea4697fe60b..e8ee468c0c2 100644 --- a/tests/openvino/native/data/reference_scales/LSTMSequenceModel_mixed.json +++ b/tests/openvino/native/data/reference_scales/LSTMSequenceModel_mixed.json @@ -4,6 +4,8 @@ [ [ [ + -0.9949173331260681, + -0.992023229598999, -0.997209906578064 ] ] @@ -13,6 +15,8 @@ [ [ [ + 0.9949173331260681, + 0.992023229598999, 0.997209906578064 ] ] @@ -22,6 +26,8 @@ [ [ [ + -0.9949173331260681, + -0.992023229598999, -0.997209906578064 ] ] @@ -31,6 +37,8 @@ [ [ [ + 0.9949173331260681, + 0.992023229598999, 0.997209906578064 ] ] @@ -46,57 +54,12321 @@ "LSTMSequence/fq_weights_5": { "input_low": [ [ + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ] + ] + ], + "input_high": [ + [ + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ] + ] + ], + "output_low": [ + [ + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ] + ] + ], + "output_high": [ + [ + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ] + ] + ] + }, + "LSTMSequence/fq_weights_4": { + "input_low": [ + [ + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ] + ] + ], + "input_high": [ + [ + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ] + ] + ], + "output_low": [ + [ + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], [ -7.999999797903001e-05 ] ] ], - "input_high": [ + "output_high": [ [ [ 7.999999797903001e-05 - ] - ] - ], - "output_low": [ - [ + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], [ - -7.999999797903001e-05 - ] - ] - ], - "output_high": [ - [ + 7.999999797903001e-05 + ], [ 7.999999797903001e-05 - ] - ] - ] - }, - "LSTMSequence/fq_weights_4": { - "input_low": [ - [ + ], [ - -7.999999797903001e-05 - ] - ] - ], - "input_high": [ - [ + 7.999999797903001e-05 + ], [ 7.999999797903001e-05 - ] - ] - ], - "output_low": [ - [ + ], [ - -7.999999797903001e-05 - ] - ] - ], - "output_high": [ - [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], [ 7.999999797903001e-05 ] diff --git a/tests/openvino/native/data/reference_scales/LSTMSequenceModel_performance.json b/tests/openvino/native/data/reference_scales/LSTMSequenceModel_performance.json index ea4697fe60b..e8ee468c0c2 100644 --- a/tests/openvino/native/data/reference_scales/LSTMSequenceModel_performance.json +++ b/tests/openvino/native/data/reference_scales/LSTMSequenceModel_performance.json @@ -4,6 +4,8 @@ [ [ [ + -0.9949173331260681, + -0.992023229598999, -0.997209906578064 ] ] @@ -13,6 +15,8 @@ [ [ [ + 0.9949173331260681, + 0.992023229598999, 0.997209906578064 ] ] @@ -22,6 +26,8 @@ [ [ [ + -0.9949173331260681, + -0.992023229598999, -0.997209906578064 ] ] @@ -31,6 +37,8 @@ [ [ [ + 0.9949173331260681, + 0.992023229598999, 0.997209906578064 ] ] @@ -46,57 +54,12321 @@ "LSTMSequence/fq_weights_5": { "input_low": [ [ + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ] + ] + ], + "input_high": [ + [ + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ] + ] + ], + "output_low": [ + [ + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ] + ] + ], + "output_high": [ + [ + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ] + ] + ] + }, + "LSTMSequence/fq_weights_4": { + "input_low": [ + [ + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ] + ] + ], + "input_high": [ + [ + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ] + ] + ], + "output_low": [ + [ + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], + [ + -7.999999797903001e-05 + ], [ -7.999999797903001e-05 ] ] ], - "input_high": [ + "output_high": [ [ [ 7.999999797903001e-05 - ] - ] - ], - "output_low": [ - [ + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], [ - -7.999999797903001e-05 - ] - ] - ], - "output_high": [ - [ + 7.999999797903001e-05 + ], [ 7.999999797903001e-05 - ] - ] - ] - }, - "LSTMSequence/fq_weights_4": { - "input_low": [ - [ + ], [ - -7.999999797903001e-05 - ] - ] - ], - "input_high": [ - [ + 7.999999797903001e-05 + ], [ 7.999999797903001e-05 - ] - ] - ], - "output_low": [ - [ + ], [ - -7.999999797903001e-05 - ] - ] - ], - "output_high": [ - [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], + [ + 7.999999797903001e-05 + ], [ 7.999999797903001e-05 ] diff --git a/tests/openvino/native/data/reference_scales/UnifiedEmbeddingModel_mixed.json b/tests/openvino/native/data/reference_scales/UnifiedEmbeddingModel_mixed.json new file mode 100644 index 00000000000..efc4a7e1feb --- /dev/null +++ b/tests/openvino/native/data/reference_scales/UnifiedEmbeddingModel_mixed.json @@ -0,0 +1,218 @@ +{ + "MatMul_2/fq_weights_1": { + "input_low": [ + [ + -0.9421131014823914 + ] + ], + "input_high": [ + [ + 0.9421131014823914 + ] + ], + "output_low": [ + [ + -0.9421131014823914 + ] + ], + "output_high": [ + [ + 0.9421131014823914 + ] + ] + }, + "MatMul_1/fq_output_0": { + "input_low": 0.0, + "input_high": 0.8132702112197876, + "output_low": 0.0, + "output_high": 0.8132702112197876 + }, + "MatMul_1/fq_weights_1": { + "input_low": [ + [ + [ + -0.6884467601776123, + -0.997209906578064, + -0.9808353185653687, + -0.721488356590271, + -0.6504592895507812 + ] + ], + [ + [ + -0.8902743458747864, + -0.48583537340164185, + -0.8894878625869751, + -0.9340435266494751, + -0.8326441645622253 + ] + ], + [ + [ + -0.7870982885360718, + -0.4503393769264221, + -0.876484215259552, + -0.5803323984146118, + -0.33611705899238586 + ] + ] + ], + "input_high": [ + [ + [ + 0.6884467601776123, + 0.997209906578064, + 0.9808353185653687, + 0.721488356590271, + 0.6504592895507812 + ] + ], + [ + [ + 0.8902743458747864, + 0.48583537340164185, + 0.8894878625869751, + 0.9340435266494751, + 0.8326441645622253 + ] + ], + [ + [ + 0.7870982885360718, + 0.4503393769264221, + 0.876484215259552, + 0.5803323984146118, + 0.33611705899238586 + ] + ] + ], + "output_low": [ + [ + [ + -0.6884467601776123, + -0.997209906578064, + -0.9808353185653687, + -0.721488356590271, + -0.6504592895507812 + ] + ], + [ + [ + -0.8902743458747864, + -0.48583537340164185, + -0.8894878625869751, + -0.9340435266494751, + -0.8326441645622253 + ] + ], + [ + [ + -0.7870982885360718, + -0.4503393769264221, + -0.876484215259552, + -0.5803323984146118, + -0.33611705899238586 + ] + ] + ], + "output_high": [ + [ + [ + 0.6884467601776123, + 0.997209906578064, + 0.9808353185653687, + 0.721488356590271, + 0.6504592895507812 + ] + ], + [ + [ + 0.8902743458747864, + 0.48583537340164185, + 0.8894878625869751, + 0.9340435266494751, + 0.8326441645622253 + ] + ], + [ + [ + 0.7870982885360718, + 0.4503393769264221, + 0.876484215259552, + 0.5803323984146118, + 0.33611705899238586 + ] + ] + ] + }, + "MatMul_1/fq_input_0": { + "input_low": 0.0, + "input_high": 0.6369616985321045, + "output_low": 0.0, + "output_high": 0.6369616985321045 + }, + "Gather_1/fq_output_0": { + "input_low": 0.0, + "input_high": 0.8132702112197876, + "output_low": 0.0, + "output_high": 0.8132702112197876 + }, + "Gather_1/fq_weights_0": { + "input_low": [ + [ + -0.8132702112197876 + ], + [ + -0.9350724220275879 + ], + [ + -0.8574042916297913 + ], + [ + -0.8631789088249207 + ] + ], + "input_high": [ + [ + 0.8132702112197876 + ], + [ + 0.9350724220275879 + ], + [ + 0.8574042916297913 + ], + [ + 0.8631789088249207 + ] + ], + "output_low": [ + [ + -0.8132702112197876 + ], + [ + -0.9350724220275879 + ], + [ + -0.8574042916297913 + ], + [ + -0.8631789088249207 + ] + ], + "output_high": [ + [ + 0.8132702112197876 + ], + [ + 0.9350724220275879 + ], + [ + 0.8574042916297913 + ], + [ + 0.8631789088249207 + ] + ] + } +} \ No newline at end of file diff --git a/tests/openvino/native/data/reference_scales/UnifiedEmbeddingModel_performance.json b/tests/openvino/native/data/reference_scales/UnifiedEmbeddingModel_performance.json new file mode 100644 index 00000000000..2fe0a1d4ff4 --- /dev/null +++ b/tests/openvino/native/data/reference_scales/UnifiedEmbeddingModel_performance.json @@ -0,0 +1,160 @@ +{ + "MatMul_2/fq_weights_1": { + "input_low": [ + [ + -0.9421131014823914 + ] + ], + "input_high": [ + [ + 0.9421131014823914 + ] + ], + "output_low": [ + [ + -0.9421131014823914 + ] + ], + "output_high": [ + [ + 0.9421131014823914 + ] + ] + }, + "MatMul_1/fq_output_0": { + "input_low": -0.9424352049827576, + "input_high": 0.9350724220275879, + "output_low": -0.9424352049827576, + "output_high": 0.9350724220275879 + }, + "MatMul_1/fq_weights_1": { + "input_low": [ + [ + [ + -0.6884467601776123, + -0.997209906578064, + -0.9808353185653687, + -0.721488356590271, + -0.6504592895507812 + ] + ], + [ + [ + -0.8902743458747864, + -0.48583537340164185, + -0.8894878625869751, + -0.9340435266494751, + -0.8326441645622253 + ] + ], + [ + [ + -0.7870982885360718, + -0.4503393769264221, + -0.876484215259552, + -0.5803323984146118, + -0.33611705899238586 + ] + ] + ], + "input_high": [ + [ + [ + 0.6884467601776123, + 0.997209906578064, + 0.9808353185653687, + 0.721488356590271, + 0.6504592895507812 + ] + ], + [ + [ + 0.8902743458747864, + 0.48583537340164185, + 0.8894878625869751, + 0.9340435266494751, + 0.8326441645622253 + ] + ], + [ + [ + 0.7870982885360718, + 0.4503393769264221, + 0.876484215259552, + 0.5803323984146118, + 0.33611705899238586 + ] + ] + ], + "output_low": [ + [ + [ + -0.6884467601776123, + -0.997209906578064, + -0.9808353185653687, + -0.721488356590271, + -0.6504592895507812 + ] + ], + [ + [ + -0.8902743458747864, + -0.48583537340164185, + -0.8894878625869751, + -0.9340435266494751, + -0.8326441645622253 + ] + ], + [ + [ + -0.7870982885360718, + -0.4503393769264221, + -0.876484215259552, + -0.5803323984146118, + -0.33611705899238586 + ] + ] + ], + "output_high": [ + [ + [ + 0.6884467601776123, + 0.997209906578064, + 0.9808353185653687, + 0.721488356590271, + 0.6504592895507812 + ] + ], + [ + [ + 0.8902743458747864, + 0.48583537340164185, + 0.8894878625869751, + 0.9340435266494751, + 0.8326441645622253 + ] + ], + [ + [ + 0.7870982885360718, + 0.4503393769264221, + 0.876484215259552, + 0.5803323984146118, + 0.33611705899238586 + ] + ] + ] + }, + "MatMul_1/fq_input_0": { + "input_low": 0.0, + "input_high": 0.6369616985321045, + "output_low": 0.0, + "output_high": 0.6369616985321045 + }, + "Gather_1/fq_weights_0": { + "input_low": -0.9424352049827576, + "input_high": 0.9350724220275879, + "output_low": -0.9424352049827576, + "output_high": 0.9350724220275879 + } +} \ No newline at end of file diff --git a/tests/openvino/native/data/reference_scales/yolo-v4-tiny-tf_mixed.json b/tests/openvino/native/data/reference_scales/yolo-v4-tiny-tf_mixed.json index d545a83f8a0..e17f1468f36 100644 --- a/tests/openvino/native/data/reference_scales/yolo-v4-tiny-tf_mixed.json +++ b/tests/openvino/native/data/reference_scales/yolo-v4-tiny-tf_mixed.json @@ -1,5 +1,5 @@ { - "Convolution_711/fq_weights_1": { + "Convolution_706/fq_weights_1": { "input_low": [ [ [ @@ -7149,13 +7149,13 @@ ] ] }, - "Transpose_1774/fq_output_0": { - "input_low": -0.3571317195892334, - "input_high": 4.4359517097473145, - "output_low": -0.3571317195892334, - "output_high": 4.4359517097473145 + "Transpose_1757/fq_output_0": { + "input_low": -0.3571314811706543, + "input_high": 4.435948848724365, + "output_low": -0.3571314811706543, + "output_high": 4.435948848724365 }, - "Multiply_3818/fq_weights_1": { + "Multiply_3937/fq_weights_1": { "input_low": [ [ [ @@ -14333,13 +14333,13 @@ ] ] }, - "Transpose_1638/fq_output_0": { - "input_low": -0.7855932116508484, - "input_high": 3.5693256855010986, - "output_low": -0.7855932116508484, - "output_high": 3.5693256855010986 + "Transpose_1629/fq_output_0": { + "input_low": -0.7855930924415588, + "input_high": 3.5693252086639404, + "output_low": -0.7855930924415588, + "output_high": 3.5693252086639404 }, - "Multiply_3762/fq_weights_1": { + "Multiply_3881/fq_weights_1": { "input_low": [ [ [ @@ -21517,13 +21517,13 @@ ] ] }, - "Transpose_1574/fq_output_0": { + "Transpose_1565/fq_output_0": { "input_low": -0.5701775550842285, "input_high": 4.62251091003418, "output_low": -0.5701775550842285, "output_high": 4.62251091003418 }, - "Multiply_3734/fq_weights_1": { + "Multiply_3853/fq_weights_1": { "input_low": [ [ [ @@ -25117,13 +25117,13 @@ ] ] }, - "Transpose_1539/fq_output_0": { - "input_low": -0.7855932116508484, - "input_high": 3.5693256855010986, - "output_low": -0.7855932116508484, - "output_high": 3.5693256855010986 + "Transpose_1530/fq_output_0": { + "input_low": -0.7855930924415588, + "input_high": 3.5693252086639404, + "output_low": -0.7855930924415588, + "output_high": 3.5693252086639404 }, - "Multiply_3720/fq_weights_1": { + "Multiply_3839/fq_weights_1": { "input_low": [ [ [ @@ -32301,13 +32301,13 @@ ] ] }, - "Transpose_1505/fq_output_0": { - "input_low": -0.993779182434082, - "input_high": 5.855239391326904, - "output_low": -0.993779182434082, - "output_high": 5.855239391326904 + "Transpose_1496/fq_output_0": { + "input_low": -0.9937790632247925, + "input_high": 5.855238914489746, + "output_low": -0.9937790632247925, + "output_high": 5.855238914489746 }, - "Multiply_3706/fq_weights_1": { + "Multiply_3825/fq_weights_1": { "input_low": [ [ [ @@ -35901,13 +35901,13 @@ ] ] }, - "Transpose_1441/fq_output_0": { - "input_low": -0.7652093768119812, - "input_high": 9.504706382751465, - "output_low": -0.7652093768119812, - "output_high": 9.504706382751465 + "Transpose_1432/fq_output_0": { + "input_low": -0.7652094960212708, + "input_high": 9.504707336425781, + "output_low": -0.7652094960212708, + "output_high": 9.504707336425781 }, - "Multiply_3678/fq_weights_1": { + "Multiply_3797/fq_weights_1": { "input_low": [ [ [ @@ -37709,13 +37709,13 @@ ] ] }, - "Transpose_1406/fq_output_0": { - "input_low": -0.993779182434082, - "input_high": 5.855239391326904, - "output_low": -0.993779182434082, - "output_high": 5.855239391326904 + "Transpose_1397/fq_output_0": { + "input_low": -0.9937790632247925, + "input_high": 5.855238914489746, + "output_low": -0.9937790632247925, + "output_high": 5.855238914489746 }, - "Multiply_3664/fq_weights_1": { + "Multiply_3783/fq_weights_1": { "input_low": [ [ [ @@ -38000,7 +38000,7 @@ [ [ [ - -0.41558733582496643 + -0.41558730602264404 ] ] ], @@ -38898,7 +38898,7 @@ [ [ [ - 0.41558733582496643 + 0.41558730602264404 ] ] ], @@ -39796,7 +39796,7 @@ [ [ [ - -0.41558733582496643 + -0.41558730602264404 ] ] ], @@ -40694,7 +40694,7 @@ [ [ [ - 0.41558733582496643 + 0.41558730602264404 ] ] ], @@ -41309,13 +41309,13 @@ ] ] }, - "Transpose_1372/fq_output_0": { + "Transpose_1363/fq_output_0": { "input_low": -1.2554621696472168, "input_high": 10.178211212158203, "output_low": -1.2554621696472168, "output_high": 10.178211212158203 }, - "Multiply_3650/fq_weights_1": { + "Multiply_3769/fq_weights_1": { "input_low": [ [ [ @@ -43117,13 +43117,13 @@ ] ] }, - "Transpose_1308/fq_output_0": { + "Transpose_1299/fq_output_0": { "input_low": -0.791810154914856, "input_high": 6.170658588409424, "output_low": -0.791810154914856, "output_high": 6.170658588409424 }, - "Multiply_3622/fq_weights_1": { + "Multiply_3741/fq_weights_1": { "input_low": [ [ [ @@ -44029,13 +44029,13 @@ ] ] }, - "Transpose_1273/fq_output_0": { + "Transpose_1264/fq_output_0": { "input_low": -1.2554621696472168, "input_high": 10.178211212158203, "output_low": -1.2554621696472168, "output_high": 10.178211212158203 }, - "Multiply_3608/fq_weights_1": { + "Multiply_3727/fq_weights_1": { "input_low": [ [ [ @@ -45837,13 +45837,13 @@ ] ] }, - "Transpose_1243/fq_output_0": { + "Transpose_1234/fq_output_0": { "input_low": -0.5645939111709595, "input_high": 12.523719787597656, "output_low": -0.5645939111709595, "output_high": 12.523719787597656 }, - "Multiply_3594/fq_weights_1": { + "Multiply_3713/fq_weights_1": { "input_low": [ [ [ @@ -47645,13 +47645,13 @@ ] ] }, - "Transpose_1188/fq_output_0": { + "Transpose_1177/fq_output_0": { "input_low": -1.0276108980178833, "input_high": 2.375516176223755, "output_low": -1.0276108980178833, "output_high": 2.375516176223755 }, - "Multiply_3580/fq_weights_1": { + "Multiply_3699/fq_weights_1": { "input_low": [ [ [ @@ -47663,7 +47663,7 @@ [ [ [ - -0.06439225375652313 + -0.06439224630594254 ] ] ], @@ -47705,7 +47705,7 @@ [ [ [ - -0.06539495289325714 + -0.06539496034383774 ] ] ], @@ -47768,7 +47768,7 @@ [ [ [ - -0.0461965873837471 + -0.0461965911090374 ] ] ], @@ -47889,7 +47889,7 @@ [ [ [ - 0.06439225375652313 + 0.06439224630594254 ] ] ], @@ -47931,7 +47931,7 @@ [ [ [ - 0.06539495289325714 + 0.06539496034383774 ] ] ], @@ -47994,7 +47994,7 @@ [ [ [ - 0.0461965873837471 + 0.0461965911090374 ] ] ], @@ -48115,7 +48115,7 @@ [ [ [ - -0.06439225375652313 + -0.06439224630594254 ] ] ], @@ -48157,7 +48157,7 @@ [ [ [ - -0.06539495289325714 + -0.06539496034383774 ] ] ], @@ -48220,7 +48220,7 @@ [ [ [ - -0.0461965873837471 + -0.0461965911090374 ] ] ], @@ -48341,7 +48341,7 @@ [ [ [ - 0.06439225375652313 + 0.06439224630594254 ] ] ], @@ -48383,7 +48383,7 @@ [ [ [ - 0.06539495289325714 + 0.06539496034383774 ] ] ], @@ -48446,7 +48446,7 @@ [ [ [ - 0.0461965873837471 + 0.0461965911090374 ] ] ], @@ -48563,13 +48563,13 @@ "output_low": 0.0, "output_high": 0.9999967813491821 }, - "Transpose_1338/fq_output_0": { + "Transpose_1329/fq_output_0": { "input_low": -0.791810154914856, "input_high": 6.170658588409424, "output_low": -0.791810154914856, "output_high": 6.170658588409424 }, - "Multiply_3636/fq_weights_1": { + "Multiply_3755/fq_weights_1": { "input_low": [ [ [ @@ -49475,13 +49475,13 @@ ] ] }, - "Transpose_1471/fq_output_0": { - "input_low": -0.7652093768119812, - "input_high": 9.504706382751465, - "output_low": -0.7652093768119812, - "output_high": 9.504706382751465 + "Transpose_1462/fq_output_0": { + "input_low": -0.7652094960212708, + "input_high": 9.504707336425781, + "output_low": -0.7652094960212708, + "output_high": 9.504707336425781 }, - "Multiply_3692/fq_weights_1": { + "Multiply_3811/fq_weights_1": { "input_low": [ [ [ @@ -51283,13 +51283,13 @@ ] ] }, - "Transpose_1604/fq_output_0": { + "Transpose_1595/fq_output_0": { "input_low": -0.5701775550842285, "input_high": 4.62251091003418, "output_low": -0.5701775550842285, "output_high": 4.62251091003418 }, - "Multiply_3748/fq_weights_1": { + "Multiply_3867/fq_weights_1": { "input_low": [ [ [ @@ -54883,19 +54883,19 @@ ] ] }, - "Transpose_1740/fq_output_0": { - "input_low": -0.7855932116508484, - "input_high": 3.5693256855010986, - "output_low": -0.7855932116508484, - "output_high": 3.5693256855010986 + "up_sampling2d/resize/ResizeNearestNeighbor/fq_output_0": { + "input_low": -0.7855930924415588, + "input_high": 3.5693252086639404, + "output_low": -0.7855930924415588, + "output_high": 3.5693252086639404 }, - "Transpose_1732/fq_output_0": { - "input_low": -0.49521908164024353, - "input_high": 3.578357219696045, - "output_low": -0.49521908164024353, - "output_high": 3.578357219696045 + "Transpose_1723/fq_output_0": { + "input_low": -0.4952194392681122, + "input_high": 3.578359842300415, + "output_low": -0.4952194392681122, + "output_high": 3.578359842300415 }, - "Multiply_3804/fq_weights_1": { + "Multiply_3923/fq_weights_1": { "input_low": [ [ [ @@ -58489,13 +58489,13 @@ ] ] }, - "Transpose_1702/fq_output_0": { - "input_low": -0.6254711151123047, - "input_high": 2.1244449615478516, - "output_low": -0.6254711151123047, - "output_high": 2.1244449615478516 + "Transpose_1693/fq_output_0": { + "input_low": -0.6254710555076599, + "input_high": 2.1244447231292725, + "output_low": -0.6254710555076599, + "output_high": 2.1244447231292725 }, - "Multiply_3790/fq_weights_1": { + "Multiply_3909/fq_weights_1": { "input_low": [ [ [ @@ -58976,7 +58976,7 @@ [ [ [ - -0.5015769600868225 + -0.5015769004821777 ] ] ], @@ -60770,7 +60770,7 @@ [ [ [ - 0.5015769600868225 + 0.5015769004821777 ] ] ], @@ -62564,7 +62564,7 @@ [ [ [ - -0.5015769600868225 + -0.5015769004821777 ] ] ], @@ -64358,7 +64358,7 @@ [ [ [ - 0.5015769600868225 + 0.5015769004821777 ] ] ], @@ -65673,13 +65673,13 @@ ] ] }, - "Transpose_1672/fq_output_0": { - "input_low": -0.5939714908599854, - "input_high": 1.4808604717254639, - "output_low": -0.5939714908599854, - "output_high": 1.4808604717254639 + "Transpose_1663/fq_output_0": { + "input_low": -0.5939716100692749, + "input_high": 1.480860710144043, + "output_low": -0.5939716100692749, + "output_high": 1.480860710144043 }, - "Multiply_3776/fq_weights_1": { + "Multiply_3895/fq_weights_1": { "input_low": [ [ [ @@ -65726,7 +65726,7 @@ [ [ [ - -0.14676715433597565 + -0.14676713943481445 ] ] ], @@ -66153,7 +66153,7 @@ [ [ [ - -0.05792614072561264 + -0.05792613327503204 ] ] ], @@ -69312,7 +69312,7 @@ [ [ [ - 0.14676715433597565 + 0.14676713943481445 ] ] ], @@ -69739,7 +69739,7 @@ [ [ [ - 0.05792614072561264 + 0.05792613327503204 ] ] ], @@ -72898,7 +72898,7 @@ [ [ [ - -0.14676715433597565 + -0.14676713943481445 ] ] ], @@ -73325,7 +73325,7 @@ [ [ [ - -0.05792614072561264 + -0.05792613327503204 ] ] ], @@ -76484,7 +76484,7 @@ [ [ [ - 0.14676715433597565 + 0.14676713943481445 ] ] ], @@ -76911,7 +76911,7 @@ [ [ [ - 0.05792614072561264 + 0.05792613327503204 ] ] ], @@ -80025,7 +80025,7 @@ ] ] }, - "Convolution_754/fq_weights_1": { + "Convolution_749/fq_weights_1": { "input_low": [ [ [ @@ -87175,13 +87175,13 @@ ] ] }, - "Transpose_1810/fq_output_0": { - "input_low": -0.6007987856864929, - "input_high": 4.870761394500732, - "output_low": -0.6007987856864929, - "output_high": 4.870761394500732 + "Transpose_1793/fq_output_0": { + "input_low": -0.6007991433143616, + "input_high": 4.870764255523682, + "output_low": -0.6007991433143616, + "output_high": 4.870764255523682 }, - "Multiply_3832/fq_weights_1": { + "Multiply_3951/fq_weights_1": { "input_low": [ [ [ diff --git a/tests/openvino/native/data/reference_scales/yolo-v4-tiny-tf_performance.json b/tests/openvino/native/data/reference_scales/yolo-v4-tiny-tf_performance.json index 0714ce0a41d..649752db07a 100644 --- a/tests/openvino/native/data/reference_scales/yolo-v4-tiny-tf_performance.json +++ b/tests/openvino/native/data/reference_scales/yolo-v4-tiny-tf_performance.json @@ -1,5 +1,5 @@ { - "Convolution_711/fq_weights_1": { + "Convolution_706/fq_weights_1": { "input_low": [ [ [ @@ -7149,13 +7149,13 @@ ] ] }, - "Transpose_1774/fq_output_0": { - "input_low": -4.470880508422852, - "input_high": 4.4359517097473145, - "output_low": -4.470880508422852, - "output_high": 4.4359517097473145 + "Transpose_1757/fq_output_0": { + "input_low": -4.470877647399902, + "input_high": 4.435948848724365, + "output_low": -4.470877647399902, + "output_high": 4.435948848724365 }, - "Multiply_3818/fq_weights_1": { + "Multiply_3937/fq_weights_1": { "input_low": [ [ [ @@ -14333,13 +14333,13 @@ ] ] }, - "Transpose_1638/fq_output_0": { - "input_low": -3.59743070602417, - "input_high": 3.5693256855010986, - "output_low": -3.59743070602417, - "output_high": 3.5693256855010986 + "Transpose_1629/fq_output_0": { + "input_low": -3.5974302291870117, + "input_high": 3.5693252086639404, + "output_low": -3.5974302291870117, + "output_high": 3.5693252086639404 }, - "Multiply_3762/fq_weights_1": { + "Multiply_3881/fq_weights_1": { "input_low": [ [ [ @@ -21517,13 +21517,13 @@ ] ] }, - "Transpose_1574/fq_output_0": { + "Transpose_1565/fq_output_0": { "input_low": -4.5784101486206055, "input_high": 4.5426411628723145, "output_low": -4.5784101486206055, "output_high": 4.5426411628723145 }, - "Multiply_3734/fq_weights_1": { + "Multiply_3853/fq_weights_1": { "input_low": [ [ [ @@ -25117,13 +25117,13 @@ ] ] }, - "Transpose_1539/fq_output_0": { - "input_low": -3.59743070602417, - "input_high": 3.5693256855010986, - "output_low": -3.59743070602417, - "output_high": 3.5693256855010986 + "Transpose_1530/fq_output_0": { + "input_low": -3.5974302291870117, + "input_high": 3.5693252086639404, + "output_low": -3.5974302291870117, + "output_high": 3.5693252086639404 }, - "Multiply_3720/fq_weights_1": { + "Multiply_3839/fq_weights_1": { "input_low": [ [ [ @@ -32301,13 +32301,13 @@ ] ] }, - "Transpose_1505/fq_output_0": { - "input_low": -5.901343822479248, - "input_high": 5.855239391326904, - "output_low": -5.901343822479248, - "output_high": 5.855239391326904 + "Transpose_1496/fq_output_0": { + "input_low": -5.90134334564209, + "input_high": 5.855238914489746, + "output_low": -5.90134334564209, + "output_high": 5.855238914489746 }, - "Multiply_3706/fq_weights_1": { + "Multiply_3825/fq_weights_1": { "input_low": [ [ [ @@ -35901,13 +35901,13 @@ ] ] }, - "Transpose_1441/fq_output_0": { - "input_low": -9.338680267333984, - "input_high": 9.265722274780273, - "output_low": -9.338680267333984, - "output_high": 9.265722274780273 + "Transpose_1432/fq_output_0": { + "input_low": -9.338682174682617, + "input_high": 9.265724182128906, + "output_low": -9.338682174682617, + "output_high": 9.265724182128906 }, - "Multiply_3678/fq_weights_1": { + "Multiply_3797/fq_weights_1": { "input_low": [ [ [ @@ -37709,13 +37709,13 @@ ] ] }, - "Transpose_1406/fq_output_0": { - "input_low": -5.901343822479248, - "input_high": 5.855239391326904, - "output_low": -5.901343822479248, - "output_high": 5.855239391326904 + "Transpose_1397/fq_output_0": { + "input_low": -5.90134334564209, + "input_high": 5.855238914489746, + "output_low": -5.90134334564209, + "output_high": 5.855238914489746 }, - "Multiply_3664/fq_weights_1": { + "Multiply_3783/fq_weights_1": { "input_low": [ [ [ @@ -38000,7 +38000,7 @@ [ [ [ - -0.41558733582496643 + -0.41558730602264404 ] ] ], @@ -38898,7 +38898,7 @@ [ [ [ - 0.41558733582496643 + 0.41558730602264404 ] ] ], @@ -39796,7 +39796,7 @@ [ [ [ - -0.41558733582496643 + -0.41558730602264404 ] ] ], @@ -40694,7 +40694,7 @@ [ [ [ - 0.41558733582496643 + 0.41558730602264404 ] ] ], @@ -41309,13 +41309,13 @@ ] ] }, - "Transpose_1372/fq_output_0": { + "Transpose_1363/fq_output_0": { "input_low": -10.120508193969727, "input_high": 10.041441917419434, "output_low": -10.120508193969727, "output_high": 10.041441917419434 }, - "Multiply_3650/fq_weights_1": { + "Multiply_3769/fq_weights_1": { "input_low": [ [ [ @@ -43117,13 +43117,13 @@ ] ] }, - "Transpose_1308/fq_output_0": { - "input_low": -6.130911350250244, - "input_high": 6.083013534545898, - "output_low": -6.130911350250244, - "output_high": 6.083013534545898 + "Transpose_1299/fq_output_0": { + "input_low": -6.1309123039245605, + "input_high": 6.083014488220215, + "output_low": -6.1309123039245605, + "output_high": 6.083014488220215 }, - "Multiply_3622/fq_weights_1": { + "Multiply_3741/fq_weights_1": { "input_low": [ [ [ @@ -44029,13 +44029,13 @@ ] ] }, - "Transpose_1273/fq_output_0": { + "Transpose_1264/fq_output_0": { "input_low": -10.120508193969727, "input_high": 10.041441917419434, "output_low": -10.120508193969727, "output_high": 10.041441917419434 }, - "Multiply_3608/fq_weights_1": { + "Multiply_3727/fq_weights_1": { "input_low": [ [ [ @@ -45837,13 +45837,13 @@ ] ] }, - "Transpose_1243/fq_output_0": { + "Transpose_1234/fq_output_0": { "input_low": -12.622331619262695, "input_high": 12.523719787597656, "output_low": -12.622331619262695, "output_high": 12.523719787597656 }, - "Multiply_3594/fq_weights_1": { + "Multiply_3713/fq_weights_1": { "input_low": [ [ [ @@ -47645,13 +47645,13 @@ ] ] }, - "Transpose_1188/fq_output_0": { + "Transpose_1177/fq_output_0": { "input_low": -2.385321617126465, "input_high": 2.3666863441467285, "output_low": -2.385321617126465, "output_high": 2.3666863441467285 }, - "Multiply_3580/fq_weights_1": { + "Multiply_3699/fq_weights_1": { "input_low": [ [ [ @@ -47663,7 +47663,7 @@ [ [ [ - -0.06439225375652313 + -0.06439224630594254 ] ] ], @@ -47705,7 +47705,7 @@ [ [ [ - -0.06539495289325714 + -0.06539496034383774 ] ] ], @@ -47889,7 +47889,7 @@ [ [ [ - 0.06439225375652313 + 0.06439224630594254 ] ] ], @@ -47994,7 +47994,7 @@ [ [ [ - 0.0461965873837471 + 0.0461965911090374 ] ] ], @@ -48157,7 +48157,7 @@ [ [ [ - -0.06539495289325714 + -0.06539496034383774 ] ] ], @@ -48341,7 +48341,7 @@ [ [ [ - 0.06439225375652313 + 0.06439224630594254 ] ] ], @@ -48446,7 +48446,7 @@ [ [ [ - 0.0461965873837471 + 0.0461965911090374 ] ] ], @@ -48563,13 +48563,13 @@ "output_low": 0.0, "output_high": 0.9999967813491821 }, - "Transpose_1338/fq_output_0": { - "input_low": -6.130911350250244, - "input_high": 6.083013534545898, - "output_low": -6.130911350250244, - "output_high": 6.083013534545898 + "Transpose_1329/fq_output_0": { + "input_low": -6.1309123039245605, + "input_high": 6.083014488220215, + "output_low": -6.1309123039245605, + "output_high": 6.083014488220215 }, - "Multiply_3636/fq_weights_1": { + "Multiply_3755/fq_weights_1": { "input_low": [ [ [ @@ -49475,13 +49475,13 @@ ] ] }, - "Transpose_1471/fq_output_0": { - "input_low": -9.338680267333984, - "input_high": 9.265722274780273, - "output_low": -9.338680267333984, - "output_high": 9.265722274780273 + "Transpose_1462/fq_output_0": { + "input_low": -9.338682174682617, + "input_high": 9.265724182128906, + "output_low": -9.338682174682617, + "output_high": 9.265724182128906 }, - "Multiply_3692/fq_weights_1": { + "Multiply_3811/fq_weights_1": { "input_low": [ [ [ @@ -51283,13 +51283,13 @@ ] ] }, - "Transpose_1604/fq_output_0": { + "Transpose_1595/fq_output_0": { "input_low": -4.5784101486206055, "input_high": 4.5426411628723145, "output_low": -4.5784101486206055, "output_high": 4.5426411628723145 }, - "Multiply_3748/fq_weights_1": { + "Multiply_3867/fq_weights_1": { "input_low": [ [ [ @@ -54883,19 +54883,19 @@ ] ] }, - "Transpose_1740/fq_output_0": { - "input_low": -3.59743070602417, - "input_high": 3.5693256855010986, - "output_low": -3.59743070602417, - "output_high": 3.5693256855010986 + "up_sampling2d/resize/ResizeNearestNeighbor/fq_output_0": { + "input_low": -3.5974302291870117, + "input_high": 3.5693252086639404, + "output_low": -3.5974302291870117, + "output_high": 3.5693252086639404 }, - "Transpose_1732/fq_output_0": { - "input_low": -3.59743070602417, - "input_high": 3.5693256855010986, - "output_low": -3.59743070602417, - "output_high": 3.5693256855010986 + "Transpose_1723/fq_output_0": { + "input_low": -3.5974302291870117, + "input_high": 3.5693252086639404, + "output_low": -3.5974302291870117, + "output_high": 3.5693252086639404 }, - "Multiply_3804/fq_weights_1": { + "Multiply_3923/fq_weights_1": { "input_low": [ [ [ @@ -58489,13 +58489,13 @@ ] ] }, - "Transpose_1702/fq_output_0": { - "input_low": -2.133545160293579, - "input_high": 2.1168768405914307, - "output_low": -2.133545160293579, - "output_high": 2.1168768405914307 + "Transpose_1693/fq_output_0": { + "input_low": -2.1335461139678955, + "input_high": 2.116877794265747, + "output_low": -2.1335461139678955, + "output_high": 2.116877794265747 }, - "Multiply_3790/fq_weights_1": { + "Multiply_3909/fq_weights_1": { "input_low": [ [ [ @@ -58976,7 +58976,7 @@ [ [ [ - -0.5015769600868225 + -0.5015769004821777 ] ] ], @@ -62564,7 +62564,7 @@ [ [ [ - -0.5015769600868225 + -0.5015769004821777 ] ] ], @@ -65673,13 +65673,13 @@ ] ] }, - "Transpose_1672/fq_output_0": { - "input_low": -1.492520809173584, - "input_high": 1.4808604717254639, - "output_low": -1.492520809173584, - "output_high": 1.4808604717254639 + "Transpose_1663/fq_output_0": { + "input_low": -1.492521047592163, + "input_high": 1.480860710144043, + "output_low": -1.492521047592163, + "output_high": 1.480860710144043 }, - "Multiply_3776/fq_weights_1": { + "Multiply_3895/fq_weights_1": { "input_low": [ [ [ @@ -65726,7 +65726,7 @@ [ [ [ - -0.14676715433597565 + -0.14676713943481445 ] ] ], @@ -69312,7 +69312,7 @@ [ [ [ - 0.14676715433597565 + 0.14676713943481445 ] ] ], @@ -72898,7 +72898,7 @@ [ [ [ - -0.14676715433597565 + -0.14676713943481445 ] ] ], @@ -76484,7 +76484,7 @@ [ [ [ - 0.14676715433597565 + 0.14676713943481445 ] ] ], @@ -80025,7 +80025,7 @@ ] ] }, - "Convolution_754/fq_weights_1": { + "Convolution_749/fq_weights_1": { "input_low": [ [ [ @@ -87175,13 +87175,13 @@ ] ] }, - "Transpose_1810/fq_output_0": { - "input_low": -4.909113883972168, - "input_high": 4.870761394500732, - "output_low": -4.909113883972168, - "output_high": 4.870761394500732 + "Transpose_1793/fq_output_0": { + "input_low": -4.909116744995117, + "input_high": 4.870764255523682, + "output_low": -4.909116744995117, + "output_high": 4.870764255523682 }, - "Multiply_3832/fq_weights_1": { + "Multiply_3951/fq_weights_1": { "input_low": [ [ [ diff --git a/tests/openvino/native/models.py b/tests/openvino/native/models.py index 83ecfc1dc51..1a7fe58bf2c 100644 --- a/tests/openvino/native/models.py +++ b/tests/openvino/native/models.py @@ -211,9 +211,9 @@ def _create_ov_model(self): conv, kernel_2, output_shape, strides, pads, pads, dilations, name="Conv_backprop" ) - weights_1 = self._rng.random((1, 4)).astype(np.float32) + weights_1 = opset.constant(self._rng.random((1, 4)), dtype=np.float32, name="weights_1") matmul_1 = opset.matmul(conv_tr, weights_1, transpose_a=False, transpose_b=False, name="MatMul_1") - weights_0 = self._rng.random((1, 1)).astype(np.float32) + weights_0 = opset.constant(self._rng.random((1, 1)), dtype=np.float32, name="weights_0") matmul_0 = opset.matmul(weights_0, matmul_1, transpose_a=False, transpose_b=False, name="MatMul_0") matmul = opset.matmul(matmul_0, matmul_1, transpose_a=False, transpose_b=True, name="MatMul") matmul_const = opset.matmul(weights_1, weights_0, transpose_a=True, transpose_b=False, name="MatMul_const") @@ -453,14 +453,14 @@ def _create_ov_model(self): @SYNTHETIC_MODELS.register() class LSTMSequenceModel(OVReferenceModel): def _create_ov_model(self): - x = ov.opset9.parameter([1, 2, 16], name="X") - initial_hidden_state = ov.opset9.parameter([1, 1, 128], name="initial_hidden_state") - initial_cell_state = ov.opset9.parameter([1, 1, 128], name="initial_cell_state") - seq_len = ov.opset9.constant(np.array([2]), dtype=np.int32) + x = opset.parameter([1, 2, 16], name="X") + initial_hidden_state = opset.parameter([1, 1, 128], name="initial_hidden_state") + initial_cell_state = opset.parameter([1, 1, 128], name="initial_cell_state") + seq_len = opset.constant(np.array([2]), dtype=np.int32) - W = ov.opset9.constant(np.zeros(([1, 512, 16])), dtype=np.float32) - R = ov.opset9.constant(np.zeros(([1, 512, 128])), dtype=np.float32) - B = ov.opset9.constant(np.zeros(([1, 512])), dtype=np.float32) + W = opset.constant(np.zeros(([1, 512, 16])), dtype=np.float32) + R = opset.constant(np.zeros(([1, 512, 128])), dtype=np.float32) + B = opset.constant(np.zeros(([1, 512])), dtype=np.float32) lstm = opset.lstm_sequence( x, initial_hidden_state, initial_cell_state, seq_len, W, R, B, 128, "FORWARD", name="LSTMSequence" @@ -474,6 +474,40 @@ def _create_ov_model(self): return model +class GRUSequenceModel(OVReferenceModel): + def _create_ov_model(self, linear_before_reset=True): + hidden_size = 128 + + x = opset.parameter([3, 2, 16], name="X") + initial_hidden_state = opset.parameter([3, 1, hidden_size], name="initial_hidden_state") + seq_len = opset.constant(np.array([1, 2, 3]), dtype=np.int32) + + scale_factor = 4 if linear_before_reset else 3 + W = opset.constant(np.zeros(([1, 3 * hidden_size, 16])), dtype=np.float32) + R = opset.constant(np.zeros(([1, 3 * hidden_size, hidden_size])), dtype=np.float32) + B = opset.constant(np.zeros(([1, scale_factor * hidden_size])), dtype=np.float32) + + gru = opset.gru_sequence( + x, + initial_hidden_state, + seq_len, + W, + R, + B, + hidden_size, + direction="FORWARD", + linear_before_reset=linear_before_reset, + name="GRUSequence", + ) + data = self._rng.random((3, 1, hidden_size, 3)).astype(np.float32) + matmul = opset.matmul(gru.output(0), data, transpose_a=False, transpose_b=False, name="MatMul") + + result = opset.result(matmul, name="Result") + result.get_output_tensor(0).set_names(set(["Result"])) + model = ov.Model(results=[result], parameters=[x, initial_hidden_state]) + return model + + class MatmulSoftmaxMatmulBlock(OVReferenceModel): def _create_ov_model(self): input_1 = opset.parameter([1, 1, 1], name="Input") @@ -544,27 +578,27 @@ def _create_ov_model(self, input_name) -> ov.Model: @SYNTHETIC_MODELS.register() class IntegerModel(OVReferenceModel): def _create_ov_model(self): - input_1 = opset.parameter([1, 192, 1], name="Input") + input_1 = opset.parameter([1, 7, 1], name="Input") convert_1 = opset.convert(input_1, destination_type="i64", name="Convert_1") gather_1 = opset.gather(convert_1, 2, axis=0, batch_dims=0) gather_1.set_friendly_name("Gather_1") - gather_2_data = self._rng.random((369, 160)).astype(np.float32) + gather_2_data = opset.constant(self._rng.random((3, 6)), dtype=np.float32, name="gather_2_data") gather_2 = opset.gather(gather_2_data, gather_1, axis=0, batch_dims=0) gather_2.set_friendly_name("Gather_2") gather_3 = opset.gather(gather_2, 2, axis=0, batch_dims=0) gather_3.set_friendly_name("Gather_3") - matmul_1_data = self._rng.random((160, 160)).astype(np.float32) + matmul_1_data = opset.constant(self._rng.random((6, 6)), dtype=np.float32, name="matmul_1_data") matmul_1 = opset.matmul(gather_3, matmul_1_data, transpose_a=False, transpose_b=True, name="MatMul_1") gather_4 = opset.gather(input_1, 0, axis=2, batch_dims=0) gather_4.set_friendly_name("Gather_4") - matmul_1_data = self._rng.random((160, 192)).astype(np.float32) - matmul_2 = opset.matmul(gather_4, matmul_1_data, transpose_a=False, transpose_b=True, name="MatMul_2") + matmul_2_data = opset.constant(self._rng.random((6, 7)), dtype=np.float32, name="matmul_2_data") + matmul_2 = opset.matmul(gather_4, matmul_2_data, transpose_a=False, transpose_b=True, name="MatMul_2") add_1 = opset.add(matmul_1, matmul_2, name="Add_1") result = opset.result(add_1, name="Result") @@ -613,3 +647,27 @@ def _create_ov_model(self): result_1 = opset.result(add, name="Result") model = ov.Model([result_1], [input_1]) return model + + +@SYNTHETIC_MODELS.register() +class UnifiedEmbeddingModel(OVReferenceModel): + def _create_ov_model(self): + input_1 = opset.parameter([1, 3], name="Input") + convert_1 = opset.convert(input_1, destination_type="i64", name="Convert_1") + + gather_1_data = opset.constant(self._rng.random((4, 5)), dtype=np.float32, name="gather_1_data") + gather_1 = opset.gather(gather_1_data, convert_1, axis=0, batch_dims=0) + gather_1.set_friendly_name("Gather_1") + + matmul_1_data = opset.constant(self._rng.random((3, 3, 5)), dtype=np.float32, name="matmul_1_data") + matmul_1 = opset.matmul(input_1, matmul_1_data, transpose_a=False, transpose_b=False, name="MatMul_1") + reshape_1 = opset.reshape(matmul_1, [1, 3, 5], special_zero=False, name="Reshape_1") + + concat_1 = opset.concat([gather_1, reshape_1], axis=1) + + matmul_2_data = opset.constant(self._rng.random((1, 5)), dtype=np.float32, name="matmul_2_data") + matmul_2 = opset.matmul(concat_1, matmul_2_data, transpose_a=False, transpose_b=True, name="MatMul_2") + + result = opset.result(matmul_2, name="Result") + model = ov.Model([result], [input_1]) + return model diff --git a/tests/openvino/native/quantization/test_channel_alignment.py b/tests/openvino/native/quantization/test_channel_alignment.py new file mode 100644 index 00000000000..080ec8a883d --- /dev/null +++ b/tests/openvino/native/quantization/test_channel_alignment.py @@ -0,0 +1,107 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Type + +import pytest + +from nncf.common.graph import NNCFNode +from nncf.common.graph.transformations.commands import TargetType +from nncf.openvino.graph.layer_attributes import OVLayerAttributes +from nncf.openvino.graph.metatypes.openvino_metatypes import OVAddMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVConstantMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVConvolutionMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVGroupConvolutionMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVMatMulMetatype +from nncf.openvino.graph.transformations.command_creation import OVCommandCreator +from nncf.openvino.graph.transformations.commands import OVBiasCorrectionCommand +from nncf.openvino.graph.transformations.commands import OVTargetPoint +from nncf.openvino.graph.transformations.commands import OVWeightUpdateCommand +from nncf.quantization.algorithms.channel_alignment.backend import LayoutDescriptor +from nncf.quantization.algorithms.channel_alignment.openvino_backend import OVChannelAlignmentAlgoBackend +from tests.post_training.test_templates.test_channel_alignment import TemplateTestChannelAlignment + + +def _get_nncf_node(metatype, layer_attrs): + return NNCFNode( + { + NNCFNode.ID_NODE_ATTR: 0, + NNCFNode.NODE_NAME_ATTR: "test", + NNCFNode.METATYPE_ATTR: metatype, + NNCFNode.LAYER_ATTRIBUTES: layer_attrs, + } + ) + + +class TestOVChannelAlignment(TemplateTestChannelAlignment): + def get_backend_cls(self) -> Type[OVChannelAlignmentAlgoBackend]: + return OVChannelAlignmentAlgoBackend + + def target_point(self, target_type: TargetType, target_node_name: str, port_id: int) -> OVTargetPoint: + return OVTargetPoint(target_type, target_node_name, port_id) + + def convert_conv_layer_attrs(self, layer_attributes): + return OVLayerAttributes({}, {1: layer_attributes}) + + def get_conv_metatype(self): + return OVConvolutionMetatype + + def get_add_metatype(self): + return OVAddMetatype + + def get_add_layer_attrs(self): + return OVLayerAttributes({1: 1}, {}) + + def get_constant_metatype(self): + return OVConstantMetatype + + def get_transformation_commands(self): + return OVBiasCorrectionCommand, OVWeightUpdateCommand + + def mock_command_creation_factory(self, mocker) -> None: + mocker.patch("nncf.common.factory.CommandCreatorFactory.create", return_value=OVCommandCreator) + + @pytest.mark.parametrize("transpose", [False, True]) + @pytest.mark.parametrize("shape", [[3, 4], [1, 2, 3, 4]]) + @pytest.mark.parametrize("port_id", [-1, -2]) + def test_get_dims_descriptor_matmul(self, transpose, shape, port_id): + _port_id = len(shape) + port_id + node = _get_nncf_node(OVMatMulMetatype, OVLayerAttributes({_port_id: {"transpose": transpose, "shape": shape}})) + dims_descr = OVChannelAlignmentAlgoBackend.get_dims_descriptor(node) + + in_dims, out_dims = (0, 1) if port_id == -1 else (1, 0) + if len(shape) > 2: + in_dims += 2 + out_dims += 2 + if transpose: + in_dims, out_dims = out_dims, in_dims + + assert dims_descr.conv_weight_in_channels_dim == in_dims + assert dims_descr.conv_weight_out_channels_dim == out_dims + assert dims_descr.bias_channels_dim == OVMatMulMetatype.output_channel_axis + + def test_get_dims_descriptor_mm_no_layer_attrs(self): + node = _get_nncf_node(OVMatMulMetatype, None) + with pytest.raises(RuntimeError): + OVChannelAlignmentAlgoBackend.get_dims_descriptor(node) + + @pytest.mark.parametrize( + "metatype,ref_desc", + [ + (OVConvolutionMetatype, LayoutDescriptor(0, 1, 1)), + (OVGroupConvolutionMetatype, LayoutDescriptor(0, 2, 1)), + (OVGroupConvolutionMetatype, LayoutDescriptor(0, 2, 1)), + ], + ) + def test_get_dims_descriptor_convs(self, metatype, ref_desc): + node = _get_nncf_node(metatype, None) + dims_descr = OVChannelAlignmentAlgoBackend.get_dims_descriptor(node) + assert dims_descr.__dict__ == ref_desc.__dict__ diff --git a/tests/openvino/native/quantization/test_fq_configurations.py b/tests/openvino/native/quantization/test_fq_configurations.py index c83c26f682f..6708a33a1d2 100644 --- a/tests/openvino/native/quantization/test_fq_configurations.py +++ b/tests/openvino/native/quantization/test_fq_configurations.py @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. from nncf.openvino.statistics.statistics import OVMinMaxTensorStatistic -from tests.post_training.test_calculate_quantizer_parameters import TemplateTestFQParams +from tests.post_training.test_templates.test_calculate_quantizer_parameters import TemplateTestFQParams class TestFQParams(TemplateTestFQParams): diff --git a/tests/openvino/native/quantization/test_fq_params_calculation.py b/tests/openvino/native/quantization/test_fq_params_calculation.py index c2139e1a557..c28b415ad59 100644 --- a/tests/openvino/native/quantization/test_fq_params_calculation.py +++ b/tests/openvino/native/quantization/test_fq_params_calculation.py @@ -16,6 +16,7 @@ import pytest from nncf.common.quantization.structs import QuantizationPreset +from nncf.openvino.graph.nncf_graph_builder import GraphConverter from nncf.openvino.statistics.aggregator import OVStatisticsAggregator from nncf.quantization.advanced_parameters import OverflowFix from nncf.quantization.algorithms.min_max.algorithm import MinMaxQuantization @@ -56,13 +57,14 @@ def get_fq_nodes_stats_algo(model): # pylint: disable=protected-access def quantize_model(ov_model, q_params): dataset = get_dataset_for_test(ov_model) + graph = GraphConverter.create_nncf_graph(ov_model) min_max_algo = MinMaxQuantization(subset_size=1, **q_params) statistics_aggregator = OVStatisticsAggregator(dataset) - statistic_points = min_max_algo.get_statistic_points(ov_model) + statistic_points = min_max_algo.get_statistic_points(ov_model, graph) statistics_aggregator.register_statistic_points(statistic_points) - statistics_aggregator.collect_statistics(ov_model) - quantized_model = min_max_algo._apply(ov_model, statistics_aggregator.statistic_points) + statistics_aggregator.collect_statistics(ov_model, graph) + quantized_model = min_max_algo.apply(ov_model, graph, statistics_aggregator.statistic_points) return quantized_model @@ -77,7 +79,7 @@ def fixture_inplace_statistics(request): ids=[QuantizationPreset.PERFORMANCE.value, QuantizationPreset.MIXED.value], ) @pytest.mark.parametrize("model_creator_func", SYNTHETIC_MODELS.values()) -def test_syntetic_models_fq_scales(model_creator_func, preset, inplace_statistics): +def test_synthetic_models_fq_scales(model_creator_func, preset, inplace_statistics): model = model_creator_func() quantized_model = quantize_model(model.ov_model, {"preset": preset, "inplace_statistics": inplace_statistics}) nodes = get_fq_nodes_stats_algo(quantized_model) @@ -85,7 +87,7 @@ def test_syntetic_models_fq_scales(model_creator_func, preset, inplace_statistic ref_stats_name = model.ref_graph_name.split(".")[0] + f"_{preset.value}.json" ref_stats_path = REFERENCE_SCALES_DIR / ref_stats_name - # Unkomment lines below to generate reference for new models. + # Uncomment lines below to generate reference for new models. # from tests.shared.helpers import dump_to_json # dump_to_json(ref_stats_path, nodes) @@ -107,7 +109,7 @@ def test_overflow_fix_scales(overflow_fix): ref_stats_name = model.ref_graph_name.split(".")[0] + f"_overflow_fix_{overflow_fix.value}.json" ref_stats_path = REFERENCE_SCALES_DIR / ref_stats_name - # Unkomment lines below to generate reference for new models. + # Uncomment lines below to generate reference for new models. # from tests.shared.helpers import dump_to_json # dump_to_json(ref_stats_path, nodes) @@ -140,7 +142,7 @@ def test_omz_models_fq_scales(model_name, preset, inplace_statistics, tmp_path): ref_stats_name = str(Path(model_path).name).rsplit(".", maxsplit=1)[0] + f"_{preset.value}.json" ref_stats_path = REFERENCE_SCALES_DIR / ref_stats_name - # Unkomment lines below to generate reference for new models. + # Uncomment lines below to generate reference for new models. # from tests.shared.helpers import dump_to_json # dump_to_json(ref_stats_path, nodes) @@ -159,7 +161,7 @@ def test_omz_models_fq_scales(model_name, preset, inplace_statistics, tmp_path): @pytest.mark.parametrize( "model_creator_func, ref_shapes", zip([LinearModel, ConvModel, MatMul2DModel], REF_NODES_SHAPES.values()) ) -def test_syntetic_models_fq_shapes(model_creator_func, ref_shapes, inplace_statistics): +def test_synthetic_models_fq_shapes(model_creator_func, ref_shapes, inplace_statistics): model = model_creator_func() quantized_model = quantize_model( model.ov_model, {"preset": QuantizationPreset.PERFORMANCE, "inplace_statistics": inplace_statistics} diff --git a/tests/openvino/native/quantization/test_graphs.py b/tests/openvino/native/quantization/test_graphs.py index a1b154faea2..b761aa68b68 100644 --- a/tests/openvino/native/quantization/test_graphs.py +++ b/tests/openvino/native/quantization/test_graphs.py @@ -10,18 +10,26 @@ # limitations under the License. +from typing import Dict + import openvino.runtime as ov import pytest from nncf.common.quantization.structs import QuantizationPreset +from nncf.openvino.graph.nncf_graph_builder import GraphConverter +from nncf.openvino.statistics.aggregator import OVStatisticsAggregator from nncf.parameters import ModelType +from nncf.parameters import TargetDevice +from nncf.quantization.algorithms.smooth_quant.algorithm import SmoothQuant from tests.openvino.conftest import OPENVINO_NATIVE_TEST_ROOT from tests.openvino.native.common import compare_nncf_graphs from tests.openvino.native.common import dump_model +from tests.openvino.native.common import get_dataset_for_test from tests.openvino.native.models import SYNTHETIC_MODELS from tests.openvino.native.models import DepthwiseConv3DModel from tests.openvino.native.models import DepthwiseConv4DModel from tests.openvino.native.models import DepthwiseConv5DModel +from tests.openvino.native.models import GRUSequenceModel from tests.openvino.native.models import MatmulSoftmaxMatmulBlock from tests.openvino.native.quantization.test_fq_params_calculation import quantize_model from tests.openvino.omz_helpers import convert_model @@ -31,7 +39,7 @@ @pytest.mark.parametrize("model_creator_func", SYNTHETIC_MODELS.values()) -def test_syntetic_models_fq_placement(model_creator_func): +def test_synthetic_models_fq_placement(model_creator_func): model = model_creator_func() quantized_model = quantize_model( model.ov_model, {"preset": QuantizationPreset.PERFORMANCE, "inplace_statistics": True} @@ -57,13 +65,15 @@ def test_depthwise_models_fq_placement(model_creator_func): "mobilenet-v2-pytorch": {"preset": QuantizationPreset.PERFORMANCE}, "mobilenet-v3-small-1.0-224-tf": {"preset": QuantizationPreset.PERFORMANCE}, "resnet-18-pytorch": {"preset": QuantizationPreset.PERFORMANCE}, + "resnet-50-pytorch": {"preset": QuantizationPreset.PERFORMANCE, "target_device": TargetDevice.CPU_SPR}, "yolo-v4-tiny-tf": {"preset": QuantizationPreset.PERFORMANCE}, } -@pytest.mark.parametrize("model_name_params", OMZ_MODELS_QUANTIZE_PARAMS.items()) +@pytest.mark.parametrize("model_name_params", OMZ_MODELS_QUANTIZE_PARAMS.items(), ids=list(OMZ_MODELS_QUANTIZE_PARAMS)) def test_omz_models_fq_placement(model_name_params, tmp_path): model_name, q_params = model_name_params + params_str = "_".join([param.value for param in q_params.values()]) q_params.update({"inplace_statistics": True}) download_model(model_name, tmp_path) convert_model(model_name, tmp_path) @@ -71,9 +81,10 @@ def test_omz_models_fq_placement(model_name_params, tmp_path): model = ov.Core().read_model(model_path) quantized_model = quantize_model(model, q_params) - path_ref_graph = QUANTIZED_REF_GRAPHS_DIR / f"{model_name}.dot" - xml_path = tmp_path / (model_name + ".xml") - bin_path = tmp_path / (model_name + ".bin") + result_name = f"{model_name}_{params_str}" + path_ref_graph = QUANTIZED_REF_GRAPHS_DIR / f"{result_name}.dot" + xml_path = tmp_path / (result_name + ".xml") + bin_path = tmp_path / (result_name + ".bin") dump_model(quantized_model, str(xml_path), str(bin_path)) compare_nncf_graphs(quantized_model, path_ref_graph) @@ -91,3 +102,54 @@ def test_transformer_models_fq_placement(model_creator_func, tmp_path): bin_path = tmp_path / (model.ref_model_name + ".bin") dump_model(quantized_model, str(xml_path), str(bin_path)) compare_nncf_graphs(quantized_model, path_ref_graph) + + +OMZ_MODELS_SQ_PARAMS = { + "swin-tiny-patch4-window7-224": {"preset": QuantizationPreset.PERFORMANCE, "model_type": ModelType.TRANSFORMER} +} + + +@pytest.mark.parametrize("model_name_params", OMZ_MODELS_SQ_PARAMS.items(), ids=list(OMZ_MODELS_SQ_PARAMS)) +def test_omz_models_sq_placement(model_name_params, tmp_path): + model_name, q_params = model_name_params + q_params.update({"inplace_statistics": True}) + download_model(model_name, tmp_path) + convert_model(model_name, tmp_path) + model_path = tmp_path / "public" / model_name / "FP32" / f"{model_name}.xml" + model = ov.Core().read_model(model_path) + + quantized_model = smooth_quant_model(model, q_params, quantize=False) + + path_ref_graph = QUANTIZED_REF_GRAPHS_DIR / f"{model_name}_sq.dot" + xml_path = tmp_path / (model_name + ".xml") + bin_path = tmp_path / (model_name + ".bin") + dump_model(quantized_model, str(xml_path), str(bin_path)) + compare_nncf_graphs(quantized_model, path_ref_graph) + + +# pylint: disable=protected-access +def smooth_quant_model(ov_model: ov.Model, q_params: Dict, quantize=True): + dataset = get_dataset_for_test(ov_model) + graph = GraphConverter.create_nncf_graph(ov_model) + + smooth_quant_algo = SmoothQuant(subset_size=1) + statistics_aggregator = OVStatisticsAggregator(dataset) + statistic_points = smooth_quant_algo.get_statistic_points(ov_model, graph) + statistics_aggregator.register_statistic_points(statistic_points) + statistics_aggregator.collect_statistics(ov_model, graph) + modified_model = smooth_quant_algo.apply(ov_model, graph, statistics_aggregator.statistic_points) + + if quantize: + modified_model = quantize_model(modified_model, q_params) + return modified_model + + +@pytest.mark.parametrize( + "linear_before_reset", [True, False], ids=["linear_before_reset_True", "linear_before_reset_False"] +) +def test_ignore_nodes_by_attribues(linear_before_reset): + model = GRUSequenceModel(**{"linear_before_reset": linear_before_reset}).ov_model + quantized_model = quantize_model(model, {}) + postfix = "T" if linear_before_reset else "F" + path_ref_graph = QUANTIZED_REF_GRAPHS_DIR / f"GRUSequenceModel_linear_before_reset_{postfix}.dot" + compare_nncf_graphs(quantized_model, path_ref_graph) diff --git a/tests/openvino/native/quantization/test_ptq_params.py b/tests/openvino/native/quantization/test_ptq_params.py index 915300a7412..3552915d523 100644 --- a/tests/openvino/native/quantization/test_ptq_params.py +++ b/tests/openvino/native/quantization/test_ptq_params.py @@ -34,8 +34,8 @@ from tests.common.quantization.metatypes import SoftmaxTestMetatype from tests.openvino.native.models import DepthwiseConv4DModel from tests.openvino.native.models import LinearModel -from tests.post_training.models import NNCFGraphToTestMatMul -from tests.post_training.test_ptq_params import TemplateTestPTQParams +from tests.post_training.test_templates.models import NNCFGraphToTestMatMul +from tests.post_training.test_templates.test_ptq_params import TemplateTestPTQParams def get_hw_patterns(device: TargetDevice = TargetDevice.ANY) -> GraphPattern: @@ -91,16 +91,29 @@ def metatypes_mapping(self): @pytest.fixture(scope="session") def test_params(self): + linear_model = LinearModel().ov_model + linear_model_graph = GraphConverter.create_nncf_graph(linear_model) + depthwise_model = DepthwiseConv4DModel().ov_model + depthwise_model_graph = GraphConverter.create_nncf_graph(depthwise_model) + return { - "test_range_estimator_per_tensor": {"model": LinearModel().ov_model, "stat_points_num": 2}, - "test_range_estimator_per_channel": {"model": DepthwiseConv4DModel().ov_model, "stat_points_num": 2}, + "test_range_estimator_per_tensor": { + "model": linear_model, + "nncf_graph": linear_model_graph, + "stat_points_num": 2, + }, + "test_range_estimator_per_channel": { + "model": depthwise_model, + "nncf_graph": depthwise_model_graph, + "stat_points_num": 2, + }, "test_quantize_outputs": { - "nncf_graph": GraphConverter.create_nncf_graph(LinearModel().ov_model), + "nncf_graph": linear_model_graph, "hw_patterns": get_hw_patterns(), "ignored_patterns": get_ignored_patterns(), }, "test_ignored_scopes": { - "nncf_graph": GraphConverter.create_nncf_graph(LinearModel().ov_model), + "nncf_graph": linear_model_graph, "hw_patterns": get_hw_patterns(), "ignored_patterns": get_ignored_patterns(), }, @@ -109,6 +122,10 @@ def test_params(self): "hw_patterns": get_hw_patterns(), "ignored_patterns": get_ignored_patterns(), }, + "test_validate_scope": { + "nncf_graph": linear_model_graph, + "ignored_patterns": get_ignored_patterns(), + }, } @pytest.fixture( diff --git a/tests/openvino/native/quantization/test_quantizer_config.py b/tests/openvino/native/quantization/test_quantizer_config.py index 9d8a2faa5e7..45d41644ba4 100644 --- a/tests/openvino/native/quantization/test_quantizer_config.py +++ b/tests/openvino/native/quantization/test_quantizer_config.py @@ -16,15 +16,15 @@ from nncf.experimental.common.tensor_statistics.collectors import MeanAggregator from nncf.experimental.common.tensor_statistics.collectors import MinAggregator from nncf.experimental.common.tensor_statistics.collectors import TensorCollector +from nncf.openvino.graph.layer_attributes import OVLayerAttributes from nncf.openvino.graph.metatypes.openvino_metatypes import OVConvolutionMetatype from nncf.openvino.graph.metatypes.openvino_metatypes import OVDepthwiseConvolutionMetatype from nncf.openvino.graph.metatypes.openvino_metatypes import OVSumMetatype -from nncf.openvino.graph.nncf_graph_builder import OVConstantLayerAttributes from nncf.quantization.algorithms.min_max.openvino_backend import OVMinMaxAlgoBackend -from tests.post_training.models import NNCFGraphToTest -from tests.post_training.models import NNCFGraphToTestDepthwiseConv -from tests.post_training.models import NNCFGraphToTestSumAggregation -from tests.post_training.test_quantizer_config import TemplateTestQuantizerConfig +from tests.post_training.test_templates.models import NNCFGraphToTest +from tests.post_training.test_templates.models import NNCFGraphToTestDepthwiseConv +from tests.post_training.test_templates.models import NNCFGraphToTestSumAggregation +from tests.post_training.test_templates.test_quantizer_config import TemplateTestQuantizerConfig ParamsCls = TemplateTestQuantizerConfig.TestGetStatisticsCollectorParameters @@ -60,16 +60,14 @@ def statistic_collector_parameters(self, request) -> ParamsCls: @pytest.fixture def single_conv_nncf_graph(self) -> NNCFGraphToTest: - conv_layer_attrs = OVConstantLayerAttributes({0: {"name": "dummy", "shape": (4, 4, 4, 4)}}) + conv_layer_attrs = OVLayerAttributes({0: {"name": "dummy", "shape": (4, 4, 4, 4)}}) return NNCFGraphToTest(OVConvolutionMetatype, conv_layer_attrs) @pytest.fixture def depthwise_conv_nncf_graph(self): - return NNCFGraphToTestDepthwiseConv( - OVDepthwiseConvolutionMetatype, conv_layer_attrs=OVConstantLayerAttributes({}) - ) + return NNCFGraphToTestDepthwiseConv(OVDepthwiseConvolutionMetatype, conv_layer_attrs=OVLayerAttributes({})) @pytest.fixture def conv_sum_aggregation_nncf_graph(self) -> NNCFGraphToTestSumAggregation: - conv_layer_attrs = OVConstantLayerAttributes({0: {"name": "dummy", "shape": (4, 4, 4, 4)}}) + conv_layer_attrs = OVLayerAttributes({0: {"name": "dummy", "shape": (4, 4, 4, 4)}}) return NNCFGraphToTestSumAggregation(OVConvolutionMetatype, OVSumMetatype, conv_layer_attrs) diff --git a/tests/openvino/native/quantization/test_sanity.py b/tests/openvino/native/quantization/test_sanity.py index 53efb3fd974..2bbe4ad38d5 100644 --- a/tests/openvino/native/quantization/test_sanity.py +++ b/tests/openvino/native/quantization/test_sanity.py @@ -16,8 +16,8 @@ import nncf from nncf.common.quantization.structs import QuantizationPreset -from nncf.common.utils.os import is_windows from nncf.parameters import TargetDevice +from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters from tests.openvino.conftest import AC_CONFIGS_DIR from tests.openvino.datasets_helpers import get_dataset_for_test from tests.openvino.datasets_helpers import get_nncf_dataset_from_ac_config @@ -26,18 +26,27 @@ from tests.openvino.omz_helpers import download_model OMZ_MODELS = [ - ("resnet-18-pytorch", "imagenette2-320", {"accuracy@top1": 0.777, "accuracy@top5": 0.948}), - ("mobilenet-v3-small-1.0-224-tf", "imagenette2-320", {"accuracy@top1": 0.735, "accuracy@top5": 0.916}), - ("googlenet-v3-pytorch", "imagenette2-320", {"accuracy@top1": 0.911, "accuracy@top5": 0.994}), - ("mobilefacedet-v1-mxnet", "wider", {"map": 0.7763171885846742}), - ("retinaface-resnet50-pytorch", "wider", {"map": 0.917961898320335}), + ( + "resnet-18-pytorch", + "imagenette2-320", + {"accuracy@top1": 0.777, "accuracy@top5": 0.948}, + None, + ), + ( + "mobilenet-v3-small-1.0-224-tf", + "imagenette2-320", + {"accuracy@top1": 0.75, "accuracy@top5": 0.916}, + AdvancedQuantizationParameters(disable_channel_alignment=False), + ), + ("googlenet-v3-pytorch", "imagenette2-320", {"accuracy@top1": 0.911, "accuracy@top5": 0.994}, None), + ("retinaface-resnet50-pytorch", "wider", {"map": 0.917961898320335}, None), ] -@pytest.mark.parametrize("model, dataset, ref_metrics", OMZ_MODELS, ids=[model[0] for model in OMZ_MODELS]) -def test_compression(data_dir, tmp_path, model, dataset, ref_metrics): - if is_windows() and model == "mobilefacedet-v1-mxnet": - pytest.xfail("OMZ for Windows has version 1.2.0 pinned that is incompatible with Python 3.8+") +@pytest.mark.parametrize( + "model, dataset, ref_metrics, advanced_params", OMZ_MODELS, ids=[model[0] for model in OMZ_MODELS] +) +def test_compression(data_dir, tmp_path, model, dataset, ref_metrics, advanced_params): extracted_data_dir = os.path.dirname(get_dataset_for_test(dataset, data_dir)) config_path = AC_CONFIGS_DIR / f"{model}.yml" @@ -51,6 +60,9 @@ def test_compression(data_dir, tmp_path, model, dataset, ref_metrics): calibration_dataset = get_nncf_dataset_from_ac_config(model_path, config_path, extracted_data_dir) ov_model = ov.Core().read_model(str(model_path)) + + if advanced_params is None: + advanced_params = AdvancedQuantizationParameters() quantized_model = nncf.quantize( ov_model, calibration_dataset, @@ -58,6 +70,7 @@ def test_compression(data_dir, tmp_path, model, dataset, ref_metrics): TargetDevice.ANY, subset_size=300, fast_bias_correction=True, + advanced_parameters=advanced_params, ) ov.serialize(quantized_model, int8_ir_path) diff --git a/tests/openvino/native/quantization/test_weights_compression.py b/tests/openvino/native/quantization/test_weights_compression.py new file mode 100644 index 00000000000..6f23552d81b --- /dev/null +++ b/tests/openvino/native/quantization/test_weights_compression.py @@ -0,0 +1,88 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +import openvino.runtime as ov +import pytest + +from nncf.openvino.graph.node_utils import get_const_value +from nncf.quantization import compress_weights +from tests.openvino.native.models import IntegerModel +from tests.openvino.native.models import WeightsModel +from tests.openvino.native.quantization.test_fq_params_calculation import REFERENCE_SCALES_DIR +from tests.shared.helpers import compare_stats +from tests.shared.helpers import load_json + +TEST_MODELS = { + IntegerModel: ["gather_2_data", "matmul_1_data", "matmul_2_data"], + WeightsModel: ["weights_0", "weights_1"], +} + + +@pytest.mark.parametrize("model_creator_func", TEST_MODELS) +def test_compress_weights(model_creator_func): + ref_compressed_weights = TEST_MODELS[model_creator_func] + model = model_creator_func().ov_model + compressed_model = compress_weights(model) + + n_compressed_weights = 0 + for op in compressed_model.get_ops(): + if op.get_type_name() == "Constant" and op.get_friendly_name() in ref_compressed_weights: + assert op.get_element_type() == ov.Type(np.uint8) + n_compressed_weights += 1 + + assert n_compressed_weights == len(ref_compressed_weights) + + +def test_compare_compressed_weights(): + model = IntegerModel().ov_model + compressed_model = compress_weights(model) + + def get_next_node(node): + target_inputs = node.output(0).get_target_inputs() + assert len(target_inputs) == 1 + next_node = next(iter(target_inputs)).get_node() + return next_node + + nodes = {} + ref_compressed_weights = TEST_MODELS[IntegerModel] + for op in compressed_model.get_ops(): + if op.get_type_name() == "Constant" and op.get_friendly_name() in ref_compressed_weights: + assert op.get_element_type() == ov.Type(np.uint8) + compressed_weight = get_const_value(op) + + convert_node = get_next_node(op) + assert convert_node.get_type_name() == "Convert" + + sub_node = get_next_node(convert_node) + assert sub_node.get_type_name() == "Subtract" + zero_point_node = sub_node.input_value(1).get_node() + zero_point = get_const_value(zero_point_node) + + mul_node = get_next_node(sub_node) + assert mul_node.get_type_name() == "Multiply" + scale_node = mul_node.input_value(1).get_node() + scale = get_const_value(scale_node) + + nodes[op.get_friendly_name()] = { + "compressed_weight": compressed_weight, + "zero_point": zero_point, + "scale": scale, + } + + ref_stats_path = REFERENCE_SCALES_DIR / "IntegerModel_compressed_weights.json" + + # from tests.shared.helpers import dump_to_json + # dump_to_json(ref_stats_path, nodes) + + ref_nodes = load_json(ref_stats_path) + params = ["compressed_weight", "zero_point", "scale"] + compare_stats(ref_nodes, nodes, params) diff --git a/tests/openvino/native/test_bias_correction.py b/tests/openvino/native/test_bias_correction.py new file mode 100644 index 00000000000..e884084d2ee --- /dev/null +++ b/tests/openvino/native/test_bias_correction.py @@ -0,0 +1,200 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Dict, List + +import numpy as np +import openvino.runtime as ov +import pytest +import torch + +from nncf.common.factory import NNCFGraphFactory +from nncf.openvino.graph.model_utils import remove_fq_from_inputs +from nncf.openvino.graph.nncf_graph_builder import GraphConverter +from nncf.openvino.graph.node_utils import get_bias_value +from nncf.quantization.algorithms.bias_correction.openvino_backend import OVBiasCorrectionAlgoBackend +from tests.openvino.conftest import OPENVINO_NATIVE_TEST_ROOT +from tests.openvino.native.common import compare_nncf_graphs +from tests.post_training.test_templates.test_bias_correction import TemplateTestBCAlgorithm +from tests.shared.command import Command + + +class TestOVBCAlgorithm(TemplateTestBCAlgorithm): + @staticmethod + def list_to_backend_type(data: List) -> np.ndarray: + return np.array(data) + + @staticmethod + def get_backend() -> OVBiasCorrectionAlgoBackend: + return OVBiasCorrectionAlgoBackend + + @staticmethod + def backend_specific_model(model: torch.nn.Module, tmp_dir: str): + onnx_path = f"{tmp_dir}/model.onnx" + torch.onnx.export(model, torch.rand(model.INPUT_SIZE), onnx_path, opset_version=13, input_names=["input.1"]) + ov_path = f"{tmp_dir}/model.xml" + runner = Command(f"mo -m {onnx_path} -o {tmp_dir} -n model --compress_to_fp16=False") + runner.run() + core = ov.Core() + ov_model = core.read_model(ov_path) + return ov_model + + @staticmethod + def fn_to_type(tensor) -> np.ndarray: + return np.array(tensor) + + @staticmethod + def get_transform_fn() -> callable: + def transform_fn(data_item): + tensor, _ = data_item + return {"input.1": tensor} + + return transform_fn + + @staticmethod + def map_references(ref_biases: Dict) -> Dict[str, List]: + mapping = {f"{name}/WithoutBiases": val for name, val in ref_biases.items()} + return mapping + + @staticmethod + def remove_fq_from_inputs(model: ov.Model) -> ov.Model: + graph = GraphConverter.create_nncf_graph(model) + return remove_fq_from_inputs(model, graph) + + @staticmethod + def get_ref_path(suffix: str) -> str: + return OPENVINO_NATIVE_TEST_ROOT / "data" / "reference_graphs" / "quantized" / "subgraphs" / f"{suffix}.dot" + + @staticmethod + def compare_nncf_graphs(model: ov.Model, ref_path: str) -> None: + return compare_nncf_graphs(model, ref_path) + + @staticmethod + def check_bias(model: ov.Model, ref_biases: Dict) -> None: + nncf_graph = NNCFGraphFactory.create(model) + for ref_name, ref_value in ref_biases.items(): + node = nncf_graph.get_node_by_name(ref_name) + ref_value = np.array(ref_value) + curr_value = get_bias_value(node, nncf_graph, model) + curr_value = curr_value.reshape(ref_value.shape) + assert np.all(np.isclose(curr_value, ref_value, atol=0.0001)), f"{curr_value} != {ref_value}" + + @pytest.mark.parametrize( + "layer_name, ref_data", + ( + ( + "/conv_1/Conv/WithoutBiases", + { + "collected_inputs": {"/conv_1/Conv/WithoutBiases": ("input.1", 0)}, + "subgraph_data": { + "subgraph_input_names": {"/conv_1/Conv/WithoutBiases"}, + "subgraph_output_names": {"/maxpool_1/MaxPool", "/Split"}, + "subgraph_output_ids": {("/Split", 0), ("/maxpool_1/MaxPool", 0), ("/Split", 1)}, + }, + }, + ), + ( + "/conv_2/Conv/WithoutBiases", + { + "collected_inputs": { + "/conv_1/Conv/WithoutBiases": ("input.1", 0), + "/conv_2/Conv/WithoutBiases": ("/maxpool_1/MaxPool", 0), + "/conv_4/Conv/WithoutBiases": ("/Split", 0), + "/conv_6/Conv/WithoutBiases": ("/Split", 1), + }, + "subgraph_data": { + "subgraph_input_names": {"/conv_2/Conv/WithoutBiases"}, + "subgraph_output_names": {"/Relu_1"}, + "subgraph_output_ids": {("/Relu_1", 0)}, + }, + }, + ), + ( + "/conv_3/Conv/WithoutBiases", + { + "collected_inputs": { + "/conv_1/Conv/WithoutBiases": ("input.1", 0), + "/conv_2/Conv/WithoutBiases": ("/maxpool_1/MaxPool", 0), + "/conv_3/Conv/WithoutBiases": ("/Relu_1", 0), + "/conv_4/Conv/WithoutBiases": ("/Split", 0), + "/conv_6/Conv/WithoutBiases": ("/Split", 1), + }, + "subgraph_data": { + "subgraph_input_names": {"/conv_1/Conv/WithoutBiases", "/conv_3/Conv/WithoutBiases"}, + "subgraph_output_names": {"/Split"}, + "subgraph_output_ids": {("/Split", 0), ("/Split", 1)}, + }, + }, + ), + ( + "/conv_4/Conv/WithoutBiases", + { + "collected_inputs": { + "/conv_4/Conv/WithoutBiases": ("/Split", 0), + "/conv_6/Conv/WithoutBiases": ("/Split", 1), + }, + "subgraph_data": { + "subgraph_input_names": {"/conv_4/Conv/WithoutBiases"}, + "subgraph_output_names": {"/Relu_2"}, + "subgraph_output_ids": {("/Relu_2", 0)}, + }, + }, + ), + ( + "/conv_6/Conv/WithoutBiases", + { + "collected_inputs": { + "/conv_5/Conv/WithoutBiases": ("/Relu_2", 0), + "/conv_6/Conv/WithoutBiases": ("/Split", 1), + }, + "subgraph_data": { + "subgraph_input_names": {"/conv_5/Conv/WithoutBiases", "/conv_6/Conv/WithoutBiases"}, + "subgraph_output_names": {"/Add_3", "/Concat"}, + "subgraph_output_ids": {("/Add_3", 0), ("/Concat", 0)}, + }, + }, + ), + ( + "/conv_10/Conv/WithoutBiases", + { + "collected_inputs": { + "/conv_8/Conv/WithoutBiases": ("/conv_7/Conv", 0), + "/conv_9/Conv/WithoutBiases": ("/Add_3", 0), + "/conv_10/Conv/WithoutBiases": ("/Concat", 0), + }, + "subgraph_data": { + "subgraph_input_names": { + "/conv_8/Conv/WithoutBiases", + "/conv_9/Conv/WithoutBiases", + "/conv_10/Conv/WithoutBiases", + }, + "subgraph_output_names": {"/Concat_1"}, + "subgraph_output_ids": {("/Concat_1", 0)}, + }, + }, + ), + ( + "/MatMul", + { + "collected_inputs": { + "/MatMul": ("/Reshape", 0), + }, + "subgraph_data": { + "subgraph_input_names": {"/MatMul"}, + "subgraph_output_names": {"/Reshape_1", "/Add_4"}, + "subgraph_output_ids": {("/Reshape_1", 0), ("/Add_4", 0)}, + }, + }, + ), + ), + ) + def test__get_subgraph_data_for_node(self, quantized_test_model, layer_name, ref_data): + return super().test__get_subgraph_data_for_node(quantized_test_model, layer_name, ref_data) diff --git a/tests/openvino/native/test_fast_bias_correction.py b/tests/openvino/native/test_fast_bias_correction.py new file mode 100644 index 00000000000..bf87eba2c07 --- /dev/null +++ b/tests/openvino/native/test_fast_bias_correction.py @@ -0,0 +1,70 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import List + +import numpy as np +import openvino.runtime as ov +import torch + +from nncf.common.factory import NNCFGraphFactory +from nncf.openvino.graph.node_utils import get_bias_value +from nncf.openvino.graph.node_utils import is_node_with_bias +from nncf.quantization.algorithms.fast_bias_correction.openvino_backend import OVFastBiasCorrectionAlgoBackend +from tests.post_training.test_templates.test_fast_bias_correction import TemplateTestFBCAlgorithm +from tests.shared.command import Command + + +class TestOVFBCAlgorithm(TemplateTestFBCAlgorithm): + @staticmethod + def list_to_backend_type(data: List) -> np.ndarray: + return np.array(data) + + @staticmethod + def get_backend() -> OVFastBiasCorrectionAlgoBackend: + return OVFastBiasCorrectionAlgoBackend + + @staticmethod + def backend_specific_model(model: bool, tmp_dir: str): + onnx_path = f"{tmp_dir}/model.onnx" + torch.onnx.export(model, torch.rand(model.INPUT_SIZE), onnx_path, opset_version=13, input_names=["input.1"]) + ov_path = f"{tmp_dir}/model.xml" + runner = Command(f"mo -m {onnx_path} -o {tmp_dir} -n model --compress_to_fp16=False") + runner.run() + core = ov.Core() + ov_model = core.read_model(ov_path) + return ov_model + + @staticmethod + def fn_to_type(tensor): + return np.array(tensor) + + @staticmethod + def get_transform_fn(): + def transform_fn(data_item): + tensor, _ = data_item + return {"input.1": tensor} + + return transform_fn + + @staticmethod + def check_bias(model: ov.Model, ref_bias: list): + ref_bias = np.array(ref_bias) + nncf_graph = NNCFGraphFactory.create(model) + for node in nncf_graph.get_all_nodes(): + if not is_node_with_bias(node, nncf_graph): + continue + bias_value = get_bias_value(node, nncf_graph, model) + bias_value = bias_value.reshape(ref_bias.shape) + assert np.all(np.isclose(bias_value, ref_bias, atol=0.0001)), f"{bias_value} != {ref_bias}" + + return + raise ValueError("Not found node with bias") diff --git a/tests/openvino/native/test_layer_attributes.py b/tests/openvino/native/test_layer_attributes.py index ee62a6665fa..6e48b437b81 100644 --- a/tests/openvino/native/test_layer_attributes.py +++ b/tests/openvino/native/test_layer_attributes.py @@ -14,8 +14,10 @@ import pytest from openvino.runtime import opset9 as opset +from nncf.common.graph.layer_attributes import ConvolutionLayerAttributes +from nncf.common.graph.layer_attributes import GenericWeightedLayerAttributes +from nncf.openvino.graph.layer_attributes import OVLayerAttributes from nncf.openvino.graph.nncf_graph_builder import GraphConverter -from nncf.openvino.graph.nncf_graph_builder import OVConstantLayerAttributes def get_conv(input_1, node_name, input_shape, kernel=None): @@ -23,18 +25,69 @@ def get_conv(input_1, node_name, input_shape, kernel=None): pads = [0, 0] dilations = [1, 1] if kernel is None: - shape = (input_shape[1], input_shape[1], 1, 1) + shape = (input_shape[1] + 1, input_shape[1], 2, 1) kernel = opset.constant(np.ones(shape), dtype=np.float32, name="Const") return opset.convolution(input_1, kernel, strides, pads, pads, dilations, name=node_name) +def get_group_conv(input_1, node_name, input_shape, kernel=None): + strides = [1, 2] + pads = [0, 1] + dilations = [3, 1] + if kernel is None: + shape = (input_shape[1], input_shape[1], 1, 1, 1) + kernel = opset.constant(np.ones(shape), dtype=np.float32, name="Const") + return opset.group_convolution(input_1, kernel, strides, pads, pads, dilations, name=node_name) + + +def get_transpose_conv(input_1, node_name, input_shape, kernel=None): + strides = [1, 1] + pads = [0, 0] + dilations = [1, 1] + if kernel is None: + shape = (input_shape[1], input_shape[1] + 1, 2, 1) + kernel = opset.constant(np.ones(shape), dtype=np.float32, name="Const") + return opset.convolution_backprop_data( + input_1, kernel, strides, pads_begin=pads, pads_end=pads, dilations=dilations, name=node_name + ) + + +def get_transpose_group_conv(input_1, node_name, input_shape, kernel=None): + strides = [1, 2] + pads = [0, 1] + dilations = [3, 1] + if kernel is None: + shape = (input_shape[1], 1, input_shape[1], 1, 1) + kernel = opset.constant(np.ones(shape), dtype=np.float32, name="Const") + return opset.group_convolution_backprop_data( + input_1, kernel, strides, pads_begin=pads, pads_end=pads, dilations=dilations, name=node_name + ) + + def get_convert_conv(input_1, node_name, input_shape): - shape = (input_shape[1], input_shape[1], 1, 1) + shape = (input_shape[1] + 1, input_shape[1], 1, 1) const = opset.constant(np.ones(shape), dtype=np.float64, name="Const") convert = opset.convert(const, np.float32) return get_conv(input_1, node_name, input_shape, convert) +def get_matmul_b(input_1, node_name, input_shape): + return get_matmul(input_1, node_name, input_shape, transpose_b=True) + + +def get_matmul_a(input_1, node_name, input_shape): + return get_matmul(input_1, node_name, input_shape, transpose_a=True) + + +def get_matmul(input_1, node_name, input_shape, transpose_a=False, transpose_b=False): + channel_position = 1 if transpose_a else -1 + data_shape = [input_shape[channel_position], 1] + if transpose_b: + data_shape = data_shape[::-1] + data = opset.constant(np.ones(tuple(data_shape)), dtype=np.float32, name="Const") + return opset.matmul(input_1, data, transpose_a=transpose_a, transpose_b=transpose_b, name=node_name) + + def get_shape_node(input_, op_name, input_shape): return opset.shape_of(input_, name=op_name) @@ -48,15 +101,135 @@ def get_one_layer_model(op_name: str, node_creator, input_shape): @pytest.mark.parametrize( - "node_creator, ref_layer_attrs", + "node_creator, input_shape, ref_layer_attrs", [ - (get_conv, OVConstantLayerAttributes({1: {"name": "Const", "shape": (3, 3, 1, 1)}})), - (get_convert_conv, OVConstantLayerAttributes({1: {"name": "Const", "shape": (3, 3, 1, 1)}})), - (get_shape_node, None), + ( + get_conv, + (1, 3, 3, 3), + OVLayerAttributes( + {1: {"name": "Const", "shape": (4, 3, 2, 1)}}, + { + 1: ConvolutionLayerAttributes( + weight_requires_grad=False, + in_channels=3, + out_channels=4, + kernel_size=(2, 1), + stride=(1, 1), + dilations=[1, 1], + groups=1, + transpose=False, + padding_values=(0, 0, 0, 0), + ), + }, + {}, + ), + ), + ( + get_convert_conv, + (1, 3, 3, 3), + OVLayerAttributes( + {1: {"name": "Const", "shape": (4, 3, 1, 1)}}, + { + 1: ConvolutionLayerAttributes( + weight_requires_grad=False, + in_channels=3, + out_channels=4, + kernel_size=(1, 1), + stride=(1, 1), + dilations=[1, 1], + groups=1, + transpose=False, + padding_values=(0, 0, 0, 0), + ), + }, + {}, + ), + ), + ( + get_group_conv, + (1, 3, 3, 3), + OVLayerAttributes( + {1: {"name": "Const", "shape": (3, 3, 1, 1, 1)}}, + { + 1: ConvolutionLayerAttributes( + weight_requires_grad=False, + in_channels=1, + out_channels=3, + kernel_size=(1, 1), + stride=(1, 2), + dilations=[3, 1], + groups=3, + transpose=False, + padding_values=(0, 1, 0, 1), + ), + }, + {}, + ), + ), + ( + get_transpose_conv, + (1, 3, 3, 3), + OVLayerAttributes( + {1: {"name": "Const", "shape": (3, 4, 2, 1)}}, + { + 1: ConvolutionLayerAttributes( + weight_requires_grad=False, + in_channels=3, + out_channels=4, + kernel_size=(2, 1), + stride=(1, 1), + dilations=[1, 1], + groups=1, + transpose=True, + padding_values=(0, 0, 0, 0), + ), + }, + {}, + ), + ), + ( + get_transpose_group_conv, + (1, 3, 3, 3), + OVLayerAttributes( + {1: {"name": "Const", "shape": (3, 1, 3, 1, 1)}}, + { + 1: ConvolutionLayerAttributes( + weight_requires_grad=False, + in_channels=1, + out_channels=3, + kernel_size=(1, 1), + stride=(1, 2), + dilations=[3, 1], + groups=3, + transpose=True, + padding_values=(0, 1, 0, 1), + ), + }, + {}, + ), + ), + (get_shape_node, (1, 3, 3, 3), None), + ( + get_matmul_b, + (1, 3, 4), + OVLayerAttributes( + {1: {"name": "Const", "shape": (1, 4), "transpose": True}}, + {1: GenericWeightedLayerAttributes(False, (1, 4))}, + {"transpose": False}, + ), + ), + ( + get_matmul_a, + (1, 3, 4), + OVLayerAttributes( + {1: {"name": "Const", "shape": (3, 1), "transpose": False}}, + {1: GenericWeightedLayerAttributes(False, (3, 1))}, + {"transpose": True}, + ), + ), ], ) -def test_layer_attributes(node_creator, ref_layer_attrs): - input_shape = [1, 3, 3, 3] +def test_layer_attributes(node_creator, input_shape, ref_layer_attrs): op_name = "test_node" ov_model = get_one_layer_model(op_name, node_creator, input_shape) nncf_graph = GraphConverter.create_nncf_graph(ov_model) diff --git a/tests/openvino/native/test_metatypes.py b/tests/openvino/native/test_metatypes.py index 669c761cc32..761c22c1145 100644 --- a/tests/openvino/native/test_metatypes.py +++ b/tests/openvino/native/test_metatypes.py @@ -87,7 +87,7 @@ def test_determining_weights_port(): for node in nncf_graph.get_all_nodes(): if node.metatype not in ovm.GENERAL_WEIGHT_LAYER_METATYPES: continue - if node.layer_attributes is not None: + if node.layer_attributes and node.layer_attributes.constant_attributes: counter += 1 const_port_ids = node.layer_attributes.get_const_port_ids() assert len(const_port_ids) == 1 diff --git a/tests/openvino/native/test_model_transformer.py b/tests/openvino/native/test_model_transformer.py index 2e0dc2a2857..b0aef3ea011 100644 --- a/tests/openvino/native/test_model_transformer.py +++ b/tests/openvino/native/test_model_transformer.py @@ -30,6 +30,9 @@ from nncf.openvino.graph.transformations.commands import OVBiasCorrectionCommand from nncf.openvino.graph.transformations.commands import OVFQNodeRemovingCommand from nncf.openvino.graph.transformations.commands import OVInplaceFnInsertionCommand +from nncf.openvino.graph.transformations.commands import OVModelExtractionCommand +from nncf.openvino.graph.transformations.commands import OVMultiplyInsertionCommand +from nncf.openvino.graph.transformations.commands import OVNullBiasInsertionCommand from nncf.openvino.graph.transformations.commands import OVOutputInsertionCommand from nncf.openvino.graph.transformations.commands import OVQuantizerInsertionCommand from nncf.openvino.graph.transformations.commands import OVTargetPoint @@ -37,10 +40,12 @@ from tests.openvino.conftest import OPENVINO_NATIVE_TEST_ROOT from tests.openvino.native.common import compare_nncf_graphs from tests.openvino.native.models import ConvModel +from tests.openvino.native.models import ConvNotBiasModel from tests.openvino.native.models import FPModel from tests.openvino.native.models import LinearModel from tests.openvino.native.models import QuantizedModel from tests.openvino.native.models import SimpleSplitModel +from tests.openvino.native.models import WeightsModel from tests.openvino.native.models import ZeroRankEltwiseModel REFERENCE_GRAPHS_DIR = OPENVINO_NATIVE_TEST_ROOT / "data" / "reference_graphs" / "original_nncf_graph" @@ -121,7 +126,7 @@ def __str__(self) -> str: "abs_max", None, lambda o, r: get_inplace_max_op(o, r, True), ["Abs", "ReduceMax"], [None, (0, 1, 2, 3)] ), # Batch mean and mean per ch operations - InplaceOpTestCase("batch_mean", None, lambda o, r: get_inplace_batch_mean_op(o), ["ReduceMean"], [(0,)]), + InplaceOpTestCase("batch_mean", None, lambda o, r: get_inplace_batch_mean_op(o), ["ReduceMean"], [0]), InplaceOpTestCase("mean_per_ch", 1, get_inplace_mean_per_ch, ["Reshape", "ReduceMean"], [(1, 3, 16), (0, 2)]), InplaceOpTestCase( "mean_per_ch", @@ -130,7 +135,21 @@ def __str__(self) -> str: ["Transpose", "Reshape", "ReduceMean"], [(0, 2, 1, 3), (1, 4, 12), (0, 2)], ), - InplaceOpTestCase("mean_per_ch", 0, get_inplace_mean_per_ch, ["ReduceMean"], [(0,)], dims="SHORT"), + InplaceOpTestCase( + "mean_per_ch", + 0, + get_inplace_mean_per_ch, + ["ReduceMean"], + [ + 0, + ], + dims="SHORT", + ), + # EmptyCase + InplaceOpTestCase("min", (), get_inplace_min_op, ["ReduceMin"], [()]), + InplaceOpTestCase("mean", (), get_inplace_mean_op, ["ReduceMean"], [()]), + InplaceOpTestCase("max", (), lambda o, r: get_inplace_max_op(o, r, False), ["ReduceMax"], [()]), + InplaceOpTestCase("abs_max", (), lambda o, r: get_inplace_max_op(o, r, True), ["Abs", "ReduceMax"], [None, ()]), ] @@ -159,9 +178,12 @@ def check_inplace_op(target_node, ref_types, ref_vals, inplace_branches_num, out const = get_prev_node(node, 1) if ref_val == []: assert const.get_data().shape == (0,) + elif not isinstance(ref_val, tuple): + assert const.get_data() == ref_val else: res = np.equal(const.get_data(), np.array(ref_val)) assert all(res) + assert const.get_data().shape == np.array(ref_val).shape nodes = get_next_nodes(node, 0) assert len(nodes) == 1 @@ -520,3 +542,107 @@ def infer_model_with_ones(model, shape): for output in ret_val_1.keys(): assert np.allclose(ret_val_1[output], ret_val_2[output]) assert id(transformed_model) != id(model) + + +MODELS_WITH_PARAMETERS = [ + {"model": ConvNotBiasModel().ov_model, "layers": ["Conv"]}, + {"model": WeightsModel().ov_model, "layers": ["Conv", "Conv_backprop"]}, +] + + +@pytest.mark.parametrize("model_with_parameters", MODELS_WITH_PARAMETERS) +def test_null_biases_insertion(model_with_parameters): + model = model_with_parameters["model"] + layers = model_with_parameters["layers"] + + transformed_model = create_transformed_model(model, layers, TargetType.LAYER, OVNullBiasInsertionCommand, port_id=0) + ops_dict = {op.get_friendly_name(): op for op in transformed_model.get_ops()} + + for layer_name in layers: + node = ops_dict[layer_name] + layer_shape = ops_dict[layer_name].shape + bias_dtype = node.get_element_type().to_dtype() + + # We assume that there is only ONE bias after convolution + output_port = node.output(0) + add_with_bias = list(output_port.get_target_inputs())[0].get_node() + assert add_with_bias.get_type_name() == "Add" + + # We assume that the bias inserts only on 1st position for Add layer + bias_node = add_with_bias.input(1).get_source_output().get_node() + assert bias_node.get_type_name() == "Constant" + + assert all(bias_node.get_vector() == np.zeros(layer_shape[1], dtype=bias_dtype)) + + +MODELS_WITH_DATA = [ + {"model": ConvModel(), "input_layers": ["Conv"], "output_layers": ["Conv"]}, + {"model": QuantizedModel(), "input_layers": ["Relu_1", "Transpose"], "output_layers": ["Conv_3", "Add_2"]}, +] + + +@pytest.mark.parametrize("model_with_data", MODELS_WITH_DATA) +def test_model_extraction(model_with_data): + model_to_test = model_with_data["model"] + model = model_to_test.ov_model + transformation_layout = TransformationLayout() + command = OVModelExtractionCommand(model_with_data["input_layers"], model_with_data["output_layers"]) + transformation_layout.register(command) + + model_transformer = OVModelTransformer(model) + transformed_model = model_transformer.transform(transformation_layout) + + path_to_dot = REFERENCE_GRAPHS_DIR / f"exctracted_{model_to_test.ref_graph_name}" + compare_nncf_graphs(transformed_model, path_to_dot) + + +MODELS_WITH_PARAMETERS = [ + { + "model": LinearModel().ov_model, + "layers": ["Reshape"], + "destination_node_names": ["MatMul"], + "scale": np.ones((1, 1, 1, 4)), + }, + { + "model": WeightsModel().ov_model, + "layers": ["MatMul_1"], + "destination_node_names": ["MatMul_0"], + "scale": np.ones((1, 1, 1, 1)), + }, +] + + +@pytest.mark.parametrize("model_with_parameters", MODELS_WITH_PARAMETERS) +def test_multiply_insertion(model_with_parameters): + model = model_with_parameters["model"] + layers = model_with_parameters["layers"] + dest_nodes = model_with_parameters["destination_node_names"] + scale = model_with_parameters["scale"] + output_port_id = 0 + + transformed_model = create_transformed_model( + model, + layers, + TargetType.POST_LAYER_OPERATION, + OVMultiplyInsertionCommand, + port_id=output_port_id, + **{"scale_value": scale, "destination_node_names": dest_nodes, "multiply_node_name": "test_name"}, + ) + ops_dict = {op.get_friendly_name(): op for op in transformed_model.get_ops()} + + for dest_node_name in dest_nodes: + dest_node = ops_dict[dest_node_name] + + for dest_input in dest_node.inputs(): + input_node = dest_input.get_source_output().get_node() + if input_node.get_type_name() == "Constant": + continue + scale_node = input_node + + assert scale_node.get_type_name() == "Multiply" + scale_const = scale_node.input(1).get_source_output().get_node() + + assert scale_const.get_type_name() == "Constant" + scale_const_data = scale_const.data + + assert np.all(scale_const_data == scale) diff --git a/tests/openvino/native/test_node_utils.py b/tests/openvino/native/test_node_utils.py index 813e01eef9f..4a2a24872d4 100644 --- a/tests/openvino/native/test_node_utils.py +++ b/tests/openvino/native/test_node_utils.py @@ -13,7 +13,12 @@ import pytest from nncf.common.factory import NNCFGraphFactory +from nncf.common.graph.graph import NNCFNode +from nncf.openvino.graph.layer_attributes import OVLayerAttributes +from nncf.openvino.graph.metatypes.openvino_metatypes import OVMatMulMetatype from nncf.openvino.graph.nncf_graph_builder import GraphConverter +from nncf.openvino.graph.node_utils import get_channel_agnostic_reduction_shape +from nncf.openvino.graph.node_utils import get_weight_channel_axes from nncf.openvino.graph.node_utils import get_weight_value from nncf.openvino.graph.node_utils import is_node_with_bias from tests.openvino.native.models import ConvModel @@ -50,3 +55,53 @@ def test_is_node_with_bias(model_to_create, is_with_bias, node_name): nncf_graph = GraphConverter.create_nncf_graph(model) node = nncf_graph.get_node_by_name(node_name) assert is_node_with_bias(node, nncf_graph) == is_with_bias + + +@pytest.mark.parametrize( + "weights_port_id, transpose, shape, expected_channel_axes", + [ + (0, False, (1,), [0]), + (0, True, (1,), []), + (1, False, (1,), []), + (1, True, (1,), [0]), + (0, False, (1, 1), [0]), + (0, True, (1, 1), [1]), + (1, False, (1, 1), [1]), + (1, True, (1, 1), [0]), + (0, False, (1, 1, 1, 1), [0, 1, 2]), + (0, True, (1, 1, 1, 1), [0, 1, 3]), + (1, False, (1, 1, 1, 1), [0, 1, 3]), + (1, True, (1, 1, 1, 1), [0, 1, 2]), + ], +) +def test_get_weight_channel_axes_for_matmul(weights_port_id, transpose, shape, expected_channel_axes): + attributes = { + NNCFNode.ID_NODE_ATTR: 0, + NNCFNode.NODE_NAME_ATTR: "test", + NNCFNode.METATYPE_ATTR: OVMatMulMetatype, + NNCFNode.LAYER_ATTRIBUTES: OVLayerAttributes( + constant_attributes={weights_port_id: {"transpose": transpose, "shape": shape}} + ), + } + node = NNCFNode(attributes) + actual_channel_axes = get_weight_channel_axes(node, weights_port_id) + + assert len(actual_channel_axes) == len(expected_channel_axes) + assert all(a == b for a, b in zip(actual_channel_axes, expected_channel_axes)) + + +@pytest.mark.parametrize( + "shape, channel_axes, ref_reduction_shape", + [ + ((1, 128), [-1], (0,)), + ((1, 256, 1), [-2], (0, 2)), + ((1, 128, 512), [-1], (0, 1)), + ((1, 3, 224, 224), [1], (0, 2, 3)), + ((1, 1, 12, 12), [1], (0, 2, 3)), + ((1, 1, 12, 12), [1, 2], (0, 3)), + ], +) +def test_get_channel_agnostic_reduction_shape(shape, channel_axes, ref_reduction_shape): + reduction_shape = get_channel_agnostic_reduction_shape(channel_axes=channel_axes, shape=shape) + + assert reduction_shape == ref_reduction_shape diff --git a/tests/openvino/native/test_smooth_quant.py b/tests/openvino/native/test_smooth_quant.py new file mode 100644 index 00000000000..32e51f34079 --- /dev/null +++ b/tests/openvino/native/test_smooth_quant.py @@ -0,0 +1,100 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pathlib import Path +from typing import Callable, Dict + +import numpy as np +import openvino.runtime as ov +import pytest +import torch + +from nncf.openvino.graph.layer_attributes import OVLayerAttributes +from nncf.openvino.graph.metatypes.openvino_metatypes import OVConvolutionMetatype +from nncf.openvino.graph.metatypes.openvino_metatypes import OVMatMulMetatype +from nncf.quantization.algorithms.smooth_quant.openvino_backend import OVSmoothQuantAlgoBackend +from tests.post_training.test_templates.test_smooth_quant import TemplateTestSQAlgorithm +from tests.shared.command import Command + + +class TestOVSQAlgorithm(TemplateTestSQAlgorithm): + @staticmethod + def fn_to_type(tensor) -> np.ndarray: + return np.array(tensor) + + @staticmethod + def get_transform_fn() -> Callable: + def transform_fn(data_item): + tensor, _ = data_item + return {"input.1": tensor} + + return transform_fn + + @staticmethod + def get_backend() -> OVSmoothQuantAlgoBackend: + return OVSmoothQuantAlgoBackend() + + @staticmethod + def backend_specific_model(model: torch.nn.Module, tmp_dir: str) -> ov.Model: + onnx_path = Path(f"{tmp_dir}/model.onnx") + torch.onnx.export(model, torch.rand(model.INPUT_SIZE), onnx_path, opset_version=13, input_names=["input.1"]) + ov_path = Path(f"{tmp_dir}/model.xml") + runner = Command(f"mo -m {onnx_path} -o {tmp_dir} -n model --compress_to_fp16=False") + runner.run() + core = ov.Core() + ov_model = core.read_model(ov_path) + return ov_model + + @staticmethod + def check_scales(model: ov.Model, reference_values: Dict[str, np.ndarray]) -> None: + ops_list = {op.get_friendly_name(): op for op in model.get_ops()} + for ref_name, ref_value in reference_values.items(): + node = ops_list[ref_name] + const_node = node.input(1).get_source_output().get_node() + + assert const_node.get_type_name() == "Constant" + + value = const_node.data + ref_value = np.array(ref_value) + assert value.shape == ref_value.shape + assert np.all(np.isclose(value, ref_value, atol=0.0001)), f"{value} != {ref_value}" + + @pytest.mark.parametrize( + "node_metatype, layer_attributes, port_id, reference_value", + ( + (OVMatMulMetatype, OVLayerAttributes({}, inputs_attributes={"transpose": False}), 0, -1), + (OVMatMulMetatype, OVLayerAttributes({}, inputs_attributes={"transpose": True}), 0, -2), + (OVMatMulMetatype, OVLayerAttributes({}, inputs_attributes={"transpose": False}), 1, -2), + (OVMatMulMetatype, OVLayerAttributes({}, inputs_attributes={"transpose": True}), 1, -1), + (OVMatMulMetatype, OVLayerAttributes({}, inputs_attributes={"transpose": False}), 2, RuntimeError), + (OVConvolutionMetatype, OVLayerAttributes({}, inputs_attributes={}), 0, 1), + ), + ) + def test_get_activation_channel_axis(self, node_metatype, layer_attributes, port_id, reference_value): + return super().test_get_activation_channel_axis(node_metatype, layer_attributes, port_id, reference_value) + + @pytest.mark.parametrize( + "node_metatype, layer_attributes, port_id, reference_value", + ( + (OVMatMulMetatype, OVLayerAttributes({1: {"transpose": False}}), 1, -2), + (OVMatMulMetatype, OVLayerAttributes({1: {"transpose": True}}), 1, -1), + (OVMatMulMetatype, OVLayerAttributes({0: {"transpose": False}}), 0, -1), + (OVMatMulMetatype, OVLayerAttributes({0: {"transpose": True}}), 0, -2), + (OVMatMulMetatype, OVLayerAttributes({1: {"transpose": False}}), 2, RuntimeError), + (OVConvolutionMetatype, OVLayerAttributes({1: {}}), 1, 0), + ), + ) + def test_get_weight_channel_axis(self, node_metatype, layer_attributes, port_id, reference_value): + return super().test_get_weight_channel_axis(node_metatype, layer_attributes, port_id, reference_value) + + @staticmethod + def get_matmul_metatype(): + return OVMatMulMetatype diff --git a/tests/openvino/native/test_statistic_collector.py b/tests/openvino/native/test_statistic_collector.py index c9b325db56b..d4dca76f9c5 100644 --- a/tests/openvino/native/test_statistic_collector.py +++ b/tests/openvino/native/test_statistic_collector.py @@ -9,49 +9,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import numpy as np - -from nncf.experimental.common.tensor_statistics.collectors import TensorCollector from nncf.openvino.tensor import OVNNCFTensor -from tests.experimental.common.test_statistic_collector import DummyTensorAggregator -from tests.experimental.common.test_statistic_collector import DummyTensorReducer - - -# pylint:disable=protected-access -def test_empty_tensors_register(): - collector = TensorCollector() - reducer = DummyTensorReducer("Dummy") - aggregator = DummyTensorAggregator(5) - collector.register_statistic_branch("A", reducer, aggregator) - input_name = "input_name" - full_inputs = TensorCollector.get_tensor_collector_inputs( - {input_name: OVNNCFTensor(np.array([100]))}, [(hash(reducer), [input_name])] - ) - empty_inputs = TensorCollector.get_tensor_collector_inputs( - {input_name: OVNNCFTensor(np.array([]))}, [(hash(reducer), [input_name])] - ) - - for inputs in [full_inputs, empty_inputs, full_inputs]: - collector.register_inputs(inputs) - assert len(aggregator._container) == 2 - assert aggregator._collected_samples == 2 - +from tests.experimental.common.test_statistic_collector import TemplateTestStatisticCollector -# pylint:disable=protected-access -def test_empty_inplace_tensors_register(): - collector = TensorCollector() - inplace_reducer = DummyTensorReducer("Dummy", True) - aggregator = DummyTensorAggregator(5) - collector.register_statistic_branch("A", inplace_reducer, aggregator) - input_name = "input_name" - full_inputs = TensorCollector.get_tensor_collector_inputs( - {input_name: OVNNCFTensor(np.array([100]))}, [(hash(inplace_reducer), [input_name])] - ) - empty_inputs = TensorCollector.get_tensor_collector_inputs( - {input_name: OVNNCFTensor(np.array([]))}, [(hash(inplace_reducer), [input_name])] - ) - for inputs in [full_inputs, empty_inputs, full_inputs]: - collector.register_inputs(inputs) - assert len(aggregator._container) == 2 - assert aggregator._collected_samples == 2 +class TestOVStatisticCollector(TemplateTestStatisticCollector): + def get_nncf_tensor_cls(self): + return OVNNCFTensor diff --git a/tests/openvino/omz_helpers.py b/tests/openvino/omz_helpers.py index 6b505caf5f5..713f84be5ba 100644 --- a/tests/openvino/omz_helpers.py +++ b/tests/openvino/omz_helpers.py @@ -12,8 +12,6 @@ import csv from typing import List -import numpy as np - from tests.openvino.conftest import OPENVINO_DATASET_DEFINITIONS_PATH from tests.shared.command import Command @@ -72,5 +70,5 @@ def get_metrics(ac_report): for row in reader: metric_name = row["metric_name"] metric_value = row["metric_value"] - metrics[metric_name] = np.float(metric_value) + metrics[metric_name] = float(metric_value) return metrics diff --git a/tests/openvino/pot/quantization/test_parameters.py b/tests/openvino/pot/quantization/test_parameters.py index 5c40c1a9fef..129902dce3e 100644 --- a/tests/openvino/pot/quantization/test_parameters.py +++ b/tests/openvino/pot/quantization/test_parameters.py @@ -14,7 +14,7 @@ from nncf.parameters import TargetDevice -@pytest.mark.parametrize("target_device", TargetDevice) +@pytest.mark.parametrize("target_device", [TargetDevice.ANY, TargetDevice.CPU, TargetDevice.CPU_SPR, TargetDevice.GPU]) def test_target_device(target_device): config = {"target_device": target_device.value} assert load_hardware_config(config) diff --git a/tests/openvino/pot/quantization/test_sanity.py b/tests/openvino/pot/quantization/test_sanity.py index 58b34b06647..dc413236d39 100644 --- a/tests/openvino/pot/quantization/test_sanity.py +++ b/tests/openvino/pot/quantization/test_sanity.py @@ -15,7 +15,6 @@ import pytest import nncf -from nncf.common.utils.os import is_windows from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters from tests.openvino.conftest import AC_CONFIGS_DIR from tests.openvino.datasets_helpers import get_dataset_for_test @@ -28,15 +27,12 @@ ("resnet-18-pytorch", "imagenette2-320", {"accuracy@top1": 0.777, "accuracy@top5": 0.949}), ("mobilenet-v3-small-1.0-224-tf", "imagenette2-320", {"accuracy@top1": 0.744, "accuracy@top5": 0.922}), ("googlenet-v3-pytorch", "imagenette2-320", {"accuracy@top1": 0.91, "accuracy@top5": 0.994}), - ("mobilefacedet-v1-mxnet", "wider", {"map": 0.7750216770678978}), ("retinaface-resnet50-pytorch", "wider", {"map": 0.91875950512032}), ] @pytest.mark.parametrize("model, dataset, ref_metrics", OMZ_MODELS, ids=[model[0] for model in OMZ_MODELS]) def test_compression(data_dir, tmp_path, model, dataset, ref_metrics): - if is_windows() and model == "mobilefacedet-v1-mxnet": - pytest.xfail("OMZ for Windows has version 1.2.0 pinned that is incompatible with Python 3.8+") extracted_data_dir = os.path.dirname(get_dataset_for_test(dataset, data_dir)) config_path = AC_CONFIGS_DIR / f"{model}.yml" diff --git a/tests/openvino/pot/test_engine.py b/tests/openvino/pot/test_engine.py index 9af46c9658a..97d952dbde7 100644 --- a/tests/openvino/pot/test_engine.py +++ b/tests/openvino/pot/test_engine.py @@ -44,12 +44,12 @@ def val_func(compiled_model: ov.CompiledModel, dataset: Iterable[Any]) -> float: predictions = compiled_model([inputs])[output] values.append(np.sum(predictions)) - return np.mean(values).item() + return np.mean(values).item(), None def get_expected(model: ov.Model, dataset, use_output: bool = False): compiled_model = ov.Core().compile_model(model, device_name="CPU") - metric = val_func(compiled_model, dataset) + metric, _ = val_func(compiled_model, dataset) per_sample_metrics = [] output = compiled_model.output(0) @@ -57,7 +57,7 @@ def get_expected(model: ov.Model, dataset, use_output: bool = False): if use_output: value = compiled_model([data_item])[output] else: - value = val_func(compiled_model, [data_item]) + value, _ = val_func(compiled_model, [data_item]) per_sample_metrics.append({"sample_id": idx, "result": value}) return per_sample_metrics, metric diff --git a/tests/openvino/requirements.txt b/tests/openvino/requirements.txt index 9ac8bea46ac..07054a24633 100644 --- a/tests/openvino/requirements.txt +++ b/tests/openvino/requirements.txt @@ -1,5 +1,6 @@ pytest virtualenv +pytest-cov pytest-mock>=3.3.1 -openvino-dev[mxnet,onnx,pytorch,tensorflow2]==2023.0.0 +openvino-dev[onnx,pytorch,tensorflow2]==2023.0.1 fastdownload diff --git a/tests/openvino/test_transform_fn.py b/tests/openvino/test_transform_fn.py new file mode 100644 index 00000000000..ca3a22800de --- /dev/null +++ b/tests/openvino/test_transform_fn.py @@ -0,0 +1,76 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +import openvino.runtime as ov +import pytest + +import nncf +from nncf.openvino.quantization.backend_parameters import BackendParameters +from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters +from tests.openvino.native.models import ConvModel as ModelWithMultipleInputs +from tests.openvino.native.models import LinearModel as ModelWithSingleInput + +dataset = [ + { + "input_0": np.zeros((1, 3, 4, 2), dtype=np.float32), + "input_1": np.zeros((1, 3, 2, 4), dtype=np.float32), + } +] + + +def single_input_transform_fn(data_item): + return data_item["input_0"] + + +def multiple_inputs_transform_fn(data_item): + return data_item["input_0"], data_item["input_1"] + + +def multiple_inputs_as_dict_transform_fn(data_item): + return { + "Input_1": data_item["input_0"], + "Input_2": data_item["input_1"], + } + + +@pytest.mark.parametrize( + "model,transform_fn,use_pot", + [ + [ModelWithSingleInput(), single_input_transform_fn, False], + [ModelWithSingleInput(), single_input_transform_fn, True], + [ModelWithMultipleInputs(), multiple_inputs_transform_fn, False], + [ModelWithMultipleInputs(), multiple_inputs_transform_fn, True], + [ModelWithMultipleInputs(), multiple_inputs_as_dict_transform_fn, False], + [ModelWithMultipleInputs(), multiple_inputs_as_dict_transform_fn, True], + ], + ids=[ + "single_input_native", + "signle_input_pot", + "multiple_inputs_native", + "multiple_inputs_pot", + "multiple_inputs_as_dict_native", + "multiple_inputs_as_dict_pot", + ], +) +def test_transform_fn(model, transform_fn, use_pot: bool): + # Check the transformation function + compiled_model = ov.compile_model(model.ov_model) + _ = compiled_model(transform_fn(next(iter(dataset)))) + + # Start quantization + params = AdvancedQuantizationParameters( + backend_params={ + BackendParameters.USE_POT: use_pot, + } + ) + calibration_dataset = nncf.Dataset(dataset, transform_fn) + _ = nncf.quantize(model.ov_model, calibration_dataset, advanced_parameters=params) diff --git a/tests/openvino/tools/README.md b/tests/openvino/tools/README.md index dccb6c4149e..c0178738e5b 100644 --- a/tests/openvino/tools/README.md +++ b/tests/openvino/tools/README.md @@ -1,9 +1,12 @@ -## Calibration tool for testing OpenVINO backend using POT config -### How to run +# Calibration tool for testing OpenVINO backend using POT config + +## How to run + The `calibrate.py` supports `pot` and `native` implementation of the OpenVINO backend. The implementation should be specified using `--impl` command line argument. -``` + +```bash python calibrate.py \ ---config \ ---output-dir \ ---impl pot -``` \ No newline at end of file + --config \ + --output-dir \ + --impl pot +``` diff --git a/tests/openvino/tools/calibrate.py b/tests/openvino/tools/calibrate.py index 60b19e0608c..9e20616aacb 100644 --- a/tests/openvino/tools/calibrate.py +++ b/tests/openvino/tools/calibrate.py @@ -12,18 +12,22 @@ import json import multiprocessing import os +import tempfile from argparse import ArgumentParser from collections import OrderedDict from collections import defaultdict from dataclasses import asdict +from dataclasses import dataclass +from dataclasses import replace from enum import Enum from itertools import islice -from typing import Iterable, Optional, TypeVar +from typing import Any, Iterable, List, Optional, TypeVar import numpy as np import openvino.runtime as ov from openvino.runtime import Dimension from openvino.runtime import PartialShape +from openvino.tools import pot from openvino.tools.accuracy_checker.evaluators.quantization_model_evaluator import ModelEvaluator from openvino.tools.accuracy_checker.evaluators.quantization_model_evaluator import create_model_evaluator from openvino.tools.pot.configs.config import Config @@ -32,8 +36,9 @@ from nncf.common.logging.logger import set_log_file from nncf.common.quantization.structs import QuantizationMode from nncf.common.quantization.structs import QuantizationPreset -from nncf.experimental.openvino.quantization.quantize_model import ( - quantize_with_accuracy_control as pot_quantize_with_native_accuracy_control, +from nncf.data.dataset import DataProvider +from nncf.openvino.pot.quantization.quantize_model import ( + quantize_with_accuracy_control_impl as pot_quantize_with_native_accuracy_control, ) from nncf.parameters import DropType from nncf.parameters import ModelType @@ -47,9 +52,29 @@ TModel = TypeVar("TModel") +OVERRIDE_OPTIONS_ALGORITHMS = ["ActivationChannelAlignment", "FastBiasCorrection", "BiasCorrection"] + MAP_POT_NNCF_ALGORITHMS = { - "DefaultQuantization": "quantize", - "AccuracyAwareQuantization": "quantize_with_accuracy_control", + "ActivationChannelAlignment": { + "method": "quantize", + "advanced_parameters": {"disable_channel_alignment": False}, + }, + "FastBiasCorrection": { + "method": "quantize", + "advanced_parameters": {"disable_bias_correction": False}, + "parameters": {"fast_bias_correction": True}, + }, + "BiasCorrection": { + "method": "quantize", + "advanced_parameters": {"disable_bias_correction": False}, + "parameters": {"fast_bias_correction": False}, + }, + "MinMaxQuantization": { + "method": "quantize", + "advanced_parameters": {"disable_bias_correction": True, "disable_channel_alignment": True}, + }, + "DefaultQuantization": {"method": "quantize"}, + "AccuracyAwareQuantization": {"method": "quantize_with_accuracy_control"}, } _default_context = None @@ -82,7 +107,7 @@ def default(self, o): return o.value if isinstance(o, (IgnoredScope, AdvancedQuantizationParameters, AdvancedAccuracyRestorerParameters)): return asdict(o) - raise TypeError(f"Object of type {o.__class__.__name__} " f"is not JSON serializable") + raise TypeError(f"Object of type {o.__class__.__name__} is not JSON serializable") class ACValidationFunction: @@ -90,6 +115,14 @@ class ACValidationFunction: Implementation of a validation function using the Accuracy Checker. """ + METRIC_TO_PERSAMPLE_METRIC = { + "coco_orig_precision": "coco_precision", + "coco_orig_keypoints_precision": "coco_precision", + "coco_orig_segm_precision": "coco_segm_precision", + "hit_ratio": "sigmoid_recom_loss", + "ndcg": "sigmoid_recom_loss", + } + def __init__(self, model_evaluator: ModelEvaluator, metric_name: str, requests_number: Optional[int] = None): """ :param model_evaluator: Model Evaluator. @@ -99,7 +132,12 @@ def __init__(self, model_evaluator: ModelEvaluator, metric_name: str, requests_n """ self._model_evaluator = model_evaluator self._metric_name = metric_name + self._persample_metric_name = self.METRIC_TO_PERSAMPLE_METRIC.get(self._metric_name, self._metric_name) + registered_metrics = model_evaluator.get_metrics_attributes() + if self._persample_metric_name not in registered_metrics: + self._model_evaluator.register_metric(self._persample_metric_name) self._requests_number = requests_number + self._values_for_each_item = [] def __call__(self, compiled_model: ov.CompiledModel, indices: Optional[Iterable[int]] = None) -> float: """ @@ -118,6 +156,7 @@ def __call__(self, compiled_model: ov.CompiledModel, indices: Optional[Iterable[ kwargs = { "subset": indices, + "output_callback": self._output_callback, "check_progress": False, "dataset_tag": "", "calculate_metrics": True, @@ -145,7 +184,32 @@ def __call__(self, compiled_model: ov.CompiledModel, indices: Optional[Iterable[ self._model_evaluator.reset() - return metrics[self._metric_name] + values_for_each_item = sorted(self._values_for_each_item, key=lambda x: x["sample_id"]) + values_for_each_item = [x["metric_value"] for x in values_for_each_item] + self._values_for_each_item = [] + + return metrics[self._metric_name], values_for_each_item + + def _output_callback(self, raw_predictions, **kwargs): + if not ("metrics_result" in kwargs and "dataset_indices" in kwargs): + raise RuntimeError( + "Expected `metrics_result`, `dataset_indices` be passed to output_callback inside accuracy checker" + ) + + metrics_result = kwargs["metrics_result"] + if metrics_result is None: + return + + for sample_id, results in metrics_result.items(): + for metric_result in results: + if metric_result.metric_name != self._persample_metric_name: + continue + + sign = 1.0 + if metric_result.direction == "higher-worse": + sign = -1.0 + metric_value = sign * float(np.nanmean(metric_result.result)) + self._values_for_each_item.append({"sample_id": sample_id, "metric_value": metric_value}) @staticmethod def _set_requests_number(params, requests_number): @@ -552,11 +616,11 @@ def map_quantize_with_accuracy_control_parameters(pot_parameters): def map_paramaters(pot_algo_name, nncf_algo_name, pot_parameters): - if pot_algo_name == "DefaultQuantization" and nncf_algo_name == "quantize": + if nncf_algo_name == "quantize": return map_quantization_parameters(pot_parameters) - if pot_algo_name == "AccuracyAwareQuantization" and nncf_algo_name == "quantize_with_accuracy_control": + if nncf_algo_name == "quantize_with_accuracy_control": return map_quantize_with_accuracy_control_parameters(pot_parameters) - raise ValueError(f"Mapping POT {pot_algo_name} parameters to NNCF " f"{nncf_algo_name} parameters is not supported") + raise ValueError(f"Mapping POT {pot_algo_name} parameters to NNCF {nncf_algo_name} parameters is not supported") def get_model_paths(model_config): @@ -572,15 +636,38 @@ def get_accuracy_checker_config(engine_config): def get_nncf_algorithms_config(compression_config): - nncf_algorithms = [] + nncf_algorithms = {} + override_options = {} for pot_algo in compression_config.algorithms: - if pot_algo.name not in MAP_POT_NNCF_ALGORITHMS: - raise ValueError(f"Algorithm {pot_algo.name} is not supported.") + pot_algo_name = pot_algo.name + if pot_algo_name not in MAP_POT_NNCF_ALGORITHMS: + raise ValueError(f"Algorithm {pot_algo_name} is not supported.") + + nncf_algo_name = MAP_POT_NNCF_ALGORITHMS[pot_algo_name]["method"] + advanced_parameters = MAP_POT_NNCF_ALGORITHMS[pot_algo_name].get("advanced_parameters", None) + parameters = MAP_POT_NNCF_ALGORITHMS[pot_algo_name].get("parameters", {}) + + if pot_algo_name in OVERRIDE_OPTIONS_ALGORITHMS: + if nncf_algo_name not in override_options: + override_options[nncf_algo_name] = defaultdict(dict) + + override_options[nncf_algo_name]["advanced_parameters"].update(advanced_parameters) + override_options[nncf_algo_name]["parameters"].update(parameters) + continue - nncf_algo_name = MAP_POT_NNCF_ALGORITHMS[pot_algo.name] - nncf_algorithms.append( - {"name": nncf_algo_name, "parameters": map_paramaters(pot_algo.name, nncf_algo_name, pot_algo.params)} + nncf_algo_parameters = map_paramaters(pot_algo_name, nncf_algo_name, pot_algo.params) + + if advanced_parameters is not None: + nncf_algo_parameters["advanced_parameters"] = replace( + nncf_algo_parameters["advanced_parameters"], **advanced_parameters + ) + nncf_algorithms[nncf_algo_name] = nncf_algo_parameters + + for override_algo_name, override_values in override_options.items(): + nncf_algorithms[override_algo_name]["advanced_parameters"] = replace( + nncf_algorithms[override_algo_name]["advanced_parameters"], **override_values["advanced_parameters"] ) + nncf_algorithms[override_algo_name].update(override_values["parameters"]) return nncf_algorithms @@ -602,7 +689,10 @@ def maybe_reshape_model(model, dataset, subset_size, input_to_tensor_name): model_inputs_shapes = {} for input_output in model.inputs: input_node = input_output.get_node() - model_inputs_shapes[input_to_tensor_name[input_node.friendly_name]] = tuple(input_node.partial_shape) + partial_shape = [] + for dim in input_node.partial_shape: + partial_shape.append(Dimension(str(dim))) + model_inputs_shapes[input_to_tensor_name[input_node.friendly_name]] = tuple(partial_shape) if len(dataset_inputs_shapes) != len(model_inputs_shapes): raise RuntimeError( @@ -633,13 +723,13 @@ def maybe_reshape_model(model, dataset, subset_size, input_to_tensor_name): dynamic_dims[name].append(idx) if not any(any(dict_.values()) for dict_ in [dynamic_dims, reshaped_static_dims]): - return model + return model, model_inputs_shapes partial_shapes = {} - for name, shape in model_inputs_shapes.items(): + for name, partial_shape in model_inputs_shapes.items(): dataset_first_shape = dataset_inputs_shapes[name].pop() dims = [] - for idx, d in enumerate(shape): + for idx, d in enumerate(partial_shape): if idx in dynamic_dims[name]: dim = Dimension(-1) elif idx in reshaped_static_dims[name]: @@ -654,10 +744,88 @@ def maybe_reshape_model(model, dataset, subset_size, input_to_tensor_name): dims.append(dim) partial_shapes[name] = PartialShape(dims) model.reshape(partial_shapes) - return model + return model, model_inputs_shapes # pylint: disable=protected-access +def get_transform_fn(model_evaluator: ModelEvaluator, ov_model): + if model_evaluator.launcher._lstm_inputs: + compiled_original_model = ov.Core().compile_model(ov_model) + model_outputs = None + + def transform_fn(data_item: ACDattasetWrapper.DataItem): + model_inputs = data_item.data + nonlocal model_outputs + state_inputs = model_evaluator.launcher._fill_lstm_inputs(model_outputs) + model_inputs.update(state_inputs) + if data_item.status == ACDattasetWrapper.Status.SEQUENCE_IS_GOING: + model_outputs = compiled_original_model(model_inputs) + else: + model_outputs = None + return model_inputs + + else: + + def transform_fn(data_item: ACDattasetWrapper.DataItem): + return data_item.data + + return transform_fn + + +def get_dataset(model_evaluator, quantization_parameters): + dataset = ACDattasetWrapper(model_evaluator) + sequence_subset_size = quantization_parameters.get("subset_size", 300) + subset_size = dataset.calculate_per_sample_subset_size(sequence_subset_size) + if subset_size != sequence_subset_size: + print(f"Subset size is changed from {sequence_subset_size} to {subset_size}") + print(f"Total dataset size: {len(dataset)}") + quantization_parameters["subset_size"] = subset_size + return dataset + + +class ACDattasetWrapper: + """ + Iters through all items in sequences of model_evaluator dataset and + returns DataItem with one of the status: sequence is going or end of sequence. + """ + + class Status(Enum): + END_OF_SEQUENCE = "END_OF_SEQUENCE" + SEQUENCE_IS_GOING = "SEQUENCE_IS_GOING" + + @dataclass + class DataItem: + data: Any + status: "ACDattasetWrapper.Status" + + def __init__(self, model_evaluator): + self.model_evaluator = model_evaluator + + def __iter__(self): + for sequence in self.model_evaluator.dataset: + _, batch_annotation, batch_input, _ = sequence + filled_inputs, _, _ = self.model_evaluator._get_batch_input(batch_input, batch_annotation) + for idx, filled_input in enumerate(filled_inputs): + input_data = {} + for name, value in filled_input.items(): + input_data[self.model_evaluator.launcher.input_to_tensor_name[name]] = value + status = self.Status.SEQUENCE_IS_GOING + if idx == len(filled_inputs) - 1: + status = self.Status.END_OF_SEQUENCE + yield self.DataItem(input_data, status) + + def __len__(self): + return len(self.model_evaluator.dataset) + + def calculate_per_sample_subset_size(self, sequence_subset_size): + subset_size = 0 + for data_item in islice(self.model_evaluator.dataset, sequence_subset_size): + _, batch_annotation, batch_input, _ = data_item + filled_inputs, _, _ = self.model_evaluator._get_batch_input(batch_input, batch_annotation) + subset_size += len(filled_inputs) + return subset_size + + def quantize_model(xml_path, bin_path, accuracy_checker_config, quantization_impl, quantization_parameters): ov_model = ov.Core().read_model(model=xml_path, weights=bin_path) model_evaluator = create_model_evaluator(accuracy_checker_config) @@ -673,18 +841,13 @@ def quantize_model(xml_path, bin_path, accuracy_checker_config, quantization_imp raise NotImplementedError() quantization_parameters["advanced_parameters"] = advanced_parameters - def transform_fn(data_item): - _, batch_annotation, batch_input, _ = data_item - filled_inputs, _, _ = model_evaluator._get_batch_input(batch_input, batch_annotation) - input_data = {} - for name, value in filled_inputs[0].items(): - input_data[model_evaluator.launcher.input_to_tensor_name[name]] = value - return input_data - - calibration_dataset = nncf.Dataset(model_evaluator.dataset, transform_fn) + transform_fn = get_transform_fn(model_evaluator, ov_model) + dataset = get_dataset(model_evaluator, quantization_parameters) + calibration_dataset = nncf.Dataset(dataset, transform_fn) + original_model_shapes = None if get_allow_reshape_input(accuracy_checker_config): - ov_model = maybe_reshape_model( + ov_model, original_model_shapes = maybe_reshape_model( ov_model, calibration_dataset, quantization_parameters.get("subset_size", 300), @@ -693,28 +856,55 @@ def transform_fn(data_item): model_evaluator.load_network([{"model": ov_model}]) quantized_model = nncf.quantize(ov_model, calibration_dataset, **quantization_parameters) + if original_model_shapes is not None: + quantized_model.reshape(original_model_shapes) + return quantized_model -# pylint: disable=protected-access +class ACDataset: + def __init__(self, model_evaluator, transform_func): + self._model_evaluator = model_evaluator + self._indices = list(range(model_evaluator.dataset.full_size)) + self._transform_func = transform_func + + def get_data(self, indices: Optional[List[int]] = None): + return DataProvider(self._indices, None, indices) + + def get_inference_data(self, indices: Optional[List[int]] = None): + return DataProvider(ACDattasetWrapper(self._model_evaluator), self._transform_func, indices) + + +def initialize_model_and_evaluator(xml_path: str, bin_path: str, accuracy_checker_config, quantization_impl: str): + model_evaluator = create_model_evaluator(accuracy_checker_config) + + with tempfile.TemporaryDirectory(dir=tempfile.gettempdir()) as tmp_dir: + if quantization_impl == "pot": + pot_model = pot.load_model({"model_name": "model", "model": xml_path, "weights": bin_path}, "CPU") + paths = pot.save_model(pot_model, save_path=tmp_dir, model_name="model") + xml_path, bin_path = paths[0]["model"], paths[0]["weights"] + + model = ov.Core().read_model(xml_path, bin_path) + model_evaluator.load_network_from_ir([{"model": xml_path, "weights": bin_path}]) + model_evaluator.select_dataset("") + return model, model_evaluator + + def quantize_model_with_accuracy_control( xml_path: str, bin_path: str, accuracy_checker_config, quantization_impl: str, quantization_parameters ): - ov_model = ov.Core().read_model(xml_path, bin_path) - model_evaluator = create_model_evaluator(accuracy_checker_config) - model_evaluator.load_network_from_ir([{"model": xml_path, "weights": bin_path}]) - model_evaluator.select_dataset("") - - def transform_fn(data_item): - _, batch_annotation, batch_input, _ = data_item - filled_inputs, _, _ = model_evaluator._get_batch_input(batch_input, batch_annotation) - return filled_inputs[0] + ov_model, model_evaluator = initialize_model_and_evaluator( + xml_path, bin_path, accuracy_checker_config, quantization_impl + ) - calibration_dataset = nncf.Dataset(model_evaluator.dataset, transform_fn) - validation_dataset = nncf.Dataset(list(range(model_evaluator.dataset.full_size))) + transform_fn = get_transform_fn(model_evaluator, ov_model) + dataset = get_dataset(model_evaluator, quantization_parameters) + calibration_dataset = nncf.Dataset(dataset, transform_fn) + validation_dataset = ACDataset(model_evaluator, transform_fn) + original_model_shapes = None if get_allow_reshape_input(accuracy_checker_config): - ov_model = maybe_reshape_model( + ov_model, original_model_shapes = maybe_reshape_model( ov_model, calibration_dataset, quantization_parameters.get("subset_size", 300), @@ -735,8 +925,12 @@ def transform_fn(data_item): advanced_parameters = quantization_parameters.get( "advanced_quantization_parameters", AdvancedQuantizationParameters() ) - if quantization_impl == "native": + if quantization_impl == "pot": + advanced_parameters.backend_params["use_pot"] = True + elif quantization_impl == "native": advanced_parameters.backend_params["use_pot"] = False + else: + raise NotImplementedError() quantization_parameters["advanced_quantization_parameters"] = advanced_parameters quantization_impl_fn = name_to_quantization_impl_map.get(quantization_impl) @@ -747,6 +941,8 @@ def transform_fn(data_item): else: raise NotImplementedError(f"Unsupported implementation: {quantization_impl}") + if original_model_shapes is not None: + quantized_model.reshape(original_model_shapes) return quantized_model @@ -767,8 +963,7 @@ def main(): "quantize": quantize_model, "quantize_with_accuracy_control": quantize_model_with_accuracy_control, } - for algo_config in nncf_algorithms_config: - algo_name = algo_config["name"] + for algo_name, algo_config in nncf_algorithms_config.items(): algo_fn = algo_name_to_method_map.get(algo_name, None) if algo_fn: quantize_model_arguments = { @@ -776,7 +971,7 @@ def main(): "bin_path": bin_path, "accuracy_checker_config": accuracy_checker_config, "quantization_impl": args.impl, - "quantization_parameters": algo_config["parameters"], + "quantization_parameters": algo_config, } output_model = algo_fn(**quantize_model_arguments) diff --git a/tests/post_training/README.md b/tests/post_training/README.md index 76cc22b9d6e..f42fc9271d8 100644 --- a/tests/post_training/README.md +++ b/tests/post_training/README.md @@ -1,5 +1,7 @@ # Post-training Quantization Conformance Suite + This is the test suite that takes PyTorch Timm models and runs post-training quantization on ImageNet dataset for the following three representations: + - PyTorch - ONNX - OpenVINO @@ -7,26 +9,31 @@ This is the test suite that takes PyTorch Timm models and runs post-training qua The outcome of each quantization step is accuracy and performance with OpenVINO. The source representation is converted to OpenVINO IR at this step. ## Installation -``` + +```bash pip install -r requirements.txt -python setup.py install ``` ## Data preparation + +## Imagenet + +/imagenet/val - name of path Since Torchvision `ImageFolder` class is used to work with data the ImageNet validation dataset should be structured accordingly. Below is an example of the `val` folder: -``` + +```text n01440764 n01695060 -n01843383 +n01843383 ... ``` ## Usage -Once the evnironment is installed use the following command to run the test: -``` + +Once the environment is installed use the following command to run the test: + +```bash NUM_VAL_THREADS=8 pytest --data= --output=./tmp tests/post_training/test_quantize_conformance.py ``` `NUM_VAL_THREADS` environment variable controls the number of parallel streams when validating the model. - - diff --git a/tests/post_training/conftest.py b/tests/post_training/conftest.py index 618ab460581..7bf887d1c38 100644 --- a/tests/post_training/conftest.py +++ b/tests/post_training/conftest.py @@ -9,219 +9,28 @@ # See the License for the specific language governing permissions and # limitations under the License. -from abc import abstractclassmethod -from dataclasses import dataclass -from enum import Enum +import collections from pathlib import Path -from typing import Callable, Dict, Optional -import numpy as np +import pandas as pd import pytest from tests.shared.paths import TEST_ROOT -NOT_AVAILABLE_MESSAGE = "N/A" +PTQ_TEST_ROOT = TEST_ROOT / "post_training" +FQ_CALCULATED_PARAMETERS_PATH = PTQ_TEST_ROOT / "data" / "fq_params" / "fq_params.json" def pytest_addoption(parser): parser.addoption("--data", action="store") parser.addoption("--output", action="store", default="./tmp/") - parser.addoption("--backends", action="store", default="TORCH,TORCH_PTQ,ONNX,OV_NATIVE,OV") - parser.addoption( - "--eval_fp32", - action="store_true", - help="Evaluation fp32 model, by defaults used cached metric.", - ) - parser.addoption( - "--skip_bench", - action="store_true", - help="Skip the collection of performance statistics.", - ) def pytest_configure(config): config.test_results = {} -class PipelineType(Enum): - FP32 = "FP32" - TORCH = "Torch INT8" - TORCH_PTQ = "Torch PTQ INT8" - ONNX = "ONNX INT8" - OV_NATIVE = "OV Native INT8" - OV = "Openvino INT8" - - -@dataclass -class RunInfo: - top_1: Optional[float] - fps: Optional[float] - status: str = None - - -class TableColumn: - @classmethod - @abstractclassmethod - def name(cls) -> str: - """ - Name of the column. - - :returns: Name of the column. - """ - - @classmethod - @abstractclassmethod - def accept_pipeline_type(cls, pipeline_type: PipelineType) -> bool: - """ - Is statistic applicable for given pipeline type. - - :param pipeline_type: Given pipeline type. - :returns: Either given pipeline type applicable or not. - """ - - @classmethod - @abstractclassmethod - def get_value(cls, info: Dict[PipelineType, RunInfo], target_pipeline_type: PipelineType) -> str: - """ - Method describes how to retrieve column info out of RunInfo. - - :param info: Runinfo to retrieve column info. - :param target_pipeline_type: Target type of the pipeline. - :returns: Column info. - """ - - @staticmethod - def assign_default_value(func: Callable): - """ - Return '-' for pipeline types that does not runs. - """ - - def wrapped_get_value(cls, info: Dict[PipelineType, RunInfo], target_pipeline_type: PipelineType): - if target_pipeline_type not in info: - return "-" - return func(cls, info, target_pipeline_type) - - return wrapped_get_value - - @staticmethod - def na_msg(func: Callable): - """ - Replace return value of function from None to NOT_AVAILABLE_MESSAGE. - """ - - def wrapped_na_msg(*args, **kwargs): - result = func(*args, **kwargs) - if result is None: - return NOT_AVAILABLE_MESSAGE - return result - - return wrapped_na_msg - - -class Top1Column(TableColumn): - @classmethod - def name(cls): - return "top 1" - - @classmethod - def accept_pipeline_type(cls, pipeline_type: PipelineType) -> bool: - return True - - @classmethod - @TableColumn.assign_default_value - @TableColumn.na_msg - def get_value(cls, info: Dict[PipelineType, RunInfo], target_pipeline_type: PipelineType) -> str: - return info[target_pipeline_type].top_1 - - -class FPSColumn(TableColumn): - @classmethod - def name(cls): - return "FPS" - - @classmethod - def accept_pipeline_type(cls, pipeline_type: PipelineType) -> bool: - return True - - @classmethod - @TableColumn.assign_default_value - @TableColumn.na_msg - def get_value(cls, info: Dict[PipelineType, RunInfo], target_pipeline_type: PipelineType) -> str: - return info[target_pipeline_type].fps - - -class Top1DiffColumn(TableColumn): - @classmethod - def name(cls): - return "top 1 diff" - - @classmethod - def accept_pipeline_type(cls, pipeline_type: PipelineType) -> bool: - return pipeline_type != PipelineType.FP32 - - @classmethod - @TableColumn.assign_default_value - @TableColumn.na_msg - def get_value(cls, info: Dict[PipelineType, RunInfo], target_pipeline_type: PipelineType) -> str: - if info[target_pipeline_type].top_1 is None or info[PipelineType.FP32].top_1 is None: - return None - return info[PipelineType.FP32].top_1 - info[target_pipeline_type].top_1 - - -class FPSSpeedupColumn(TableColumn): - @classmethod - def name(cls): - return "FPS speedup" - - @classmethod - def accept_pipeline_type(cls, pipeline_type: PipelineType) -> bool: - return pipeline_type != PipelineType.FP32 - - @classmethod - @TableColumn.assign_default_value - @TableColumn.na_msg - def get_value(cls, info: Dict[PipelineType, RunInfo], target_pipeline_type: PipelineType) -> str: - if info[target_pipeline_type].fps is None or info[PipelineType.FP32].fps is None: - return None - if info[PipelineType.FP32].fps > 1e-5: - return info[target_pipeline_type].fps / info[PipelineType.FP32].fps - return None - - -class StatusColumn(TableColumn): - @classmethod - def name(cls): - return "Status" - - @classmethod - def accept_pipeline_type(cls, pipeline_type: PipelineType) -> bool: - return True - - @classmethod - def get_value(cls, info: Dict[PipelineType, RunInfo], target_pipeline_type: PipelineType) -> str: - status = [] - for pipeline_type in PipelineType: - if pipeline_type in info: - stat = info[pipeline_type].status - if stat is not None: - status.append(stat) - - return ",".join(status) - - -@pytest.fixture -def backends_list(request): - return request.config.getoption("--backends") - - -@pytest.fixture -def eval_fp32(request): - return request.config.getoption("--eval_fp32") - - -@pytest.fixture -def skip_bench(request): - return request.config.getoption("--skip_bench") +PTQ_TEST_ROOT = TEST_ROOT / "post_training_quantization" @pytest.hookimpl(tryfirst=True, hookwrapper=True) @@ -230,33 +39,11 @@ def pytest_runtest_makereport(item, call): result = outcome.get_result() if result.when == "call": - test_results = item.config.test_results - per_model_columns = [Top1Column, FPSColumn, Top1DiffColumn, FPSSpeedupColumn] - grouped_columns = [StatusColumn] - header = ["Model name"] - for column in per_model_columns: - for pipeline_type in PipelineType: - if column.accept_pipeline_type(pipeline_type): - header.append(" ".join((pipeline_type.value, column.name()))) - for column in grouped_columns: - header.append(column.name()) - - table = [] - for model_name, run_infos in test_results.items(): - row = [model_name] - for column in per_model_columns: - for pipeline_type in PipelineType: - if column.accept_pipeline_type(pipeline_type): - row.append(column.get_value(run_infos, pipeline_type)) - table.append(row) - for column in grouped_columns: - row.append(column.get_value(run_infos, None)) + test_results = collections.OrderedDict(sorted(item.config.test_results.items())) + df = pd.DataFrame() + for _, test_result in test_results.items(): + df = df.append(test_result, ignore_index=True) output_folder = Path(item.config.getoption("--output")) output_folder.mkdir(parents=True, exist_ok=True) - np.savetxt(output_folder / "results.csv", table, delimiter=",", fmt="%s", header=",".join(header)) - - -PTQ_TEST_ROOT = TEST_ROOT / "post_training" -FQ_CALCULATED_PARAMETERS_PATH = PTQ_TEST_ROOT / "data" / "fq_params" / "fq_params.json" -MODELS_SCOPE_PATH = PTQ_TEST_ROOT / "model_scope.json" + df.to_csv(output_folder / "results.csv", index=False) diff --git a/tests/post_training/model_scope.json b/tests/post_training/model_scope.json deleted file mode 100644 index 3f67c265f97..00000000000 --- a/tests/post_training/model_scope.json +++ /dev/null @@ -1,232 +0,0 @@ -{ - "vgg11": { - "model_name": "vgg11", - "quantization_params": {}, - "metrics": { - "FP32 top 1": 0.61246 - } - }, - "resnet18": { - "model_name": "resnet18", - "quantization_params": {}, - "metrics": { - "FP32 top 1": 0.62734 - } - }, - "mobilenetv2_050": { - "model_name": "mobilenetv2_050", - "quantization_params": { - "preset": "MIXED" - }, - "metrics": { - "FP32 top 1": 0.59498 - } - }, - "mobilenetv2_050_BC": { - "model_name": "mobilenetv2_050", - "quantization_params": { - "preset": "MIXED", - "fast_bias_correction": false - }, - "metrics": { - "FP32 top 1": 0.59498 - } - }, - "densenet121": { - "skipped": "temporary excluded due to memory leaks", - "model_name": "densenet121", - "quantization_params": { - "fast_bias_correction": false - } - }, - "densenet121_BC": { - "skipped": "temporary excluded due to memory leaks", - "model_name": "densenet121", - "quantization_params": { - "fast_bias_correction": false - } - }, - "tf_inception_v3": { - "model_name": "tf_inception_v3", - "quantization_params": { - "preset": "MIXED" - }, - "metrics": { - "FP32 top 1": 0.75948 - } - }, - "xception": { - "model_name": "xception", - "quantization_params": { - "preset": "MIXED" - }, - "metrics": { - "FP32 top 1": 0.77522 - } - }, - "efficientnet_b0": { - "model_name": "efficientnet_b0", - "quantization_params": { - "preset": "MIXED" - }, - "metrics": { - "FP32 top 1": 0.72912 - } - }, - "efficientnet_b0_BC": { - "model_name": "efficientnet_b0", - "quantization_params": { - "preset": "MIXED", - "fast_bias_correction": false - }, - "metrics": { - "FP32 top 1": 0.72912 - } - }, - "darknet53": { - "model_name": "darknet53", - "quantization_params": { - "preset": "MIXED" - }, - "metrics": { - "FP32 top 1": 0.7645 - } - }, - "resnest14d": { - "model_name": "resnest14d", - "quantization_params": { - "preset": "MIXED" - }, - "metrics": { - "FP32 top 1": 0.70102 - } - }, - "inception_resnet_v2": { - "model_name": "inception_resnet_v2", - "quantization_params": {}, - "metrics": { - "FP32 top 1": 0.78894 - } - }, - "wide_resnet50_2": { - "model_name": "wide_resnet50_2", - "quantization_params": { - "preset": "MIXED" - }, - "metrics": { - "FP32 top 1": 0.7664 - } - }, - "regnetx_002": { - "model_name": "regnetx_002", - "quantization_params": { - "preset": "MIXED" - }, - "metrics": { - "FP32 top 1": 0.63078 - } - }, - "mobilenetv3_small_050": { - "model_name": "mobilenetv3_small_050", - "quantization_params": { - "preset": "MIXED" - }, - "metrics": { - "FP32 top 1": 0.51764 - } - }, - "levit_128": { - "model_name": "levit_128", - "quantization_params": { - "preset": "MIXED", - "model_type": "TRANSFORMER" - }, - "metrics": { - "FP32 top 1": 0.7405 - } - }, - "deit3_small_patch16_224": { - "model_name": "deit3_small_patch16_224", - "quantization_params": { - "preset": "MIXED", - "model_type": "TRANSFORMER" - }, - "metrics": { - "FP32 top 1": 0.76974 - } - }, - "swin_base_patch4_window7_224": { - "model_name": "swin_base_patch4_window7_224", - "quantization_params": { - "preset": "MIXED", - "model_type": "TRANSFORMER" - }, - "metrics": { - "FP32 top 1": 0.81462 - } - }, - "convit_tiny": { - "skipped" : "Suppressed due to bug - 104173", - "model_name": "convit_tiny", - "quantization_params": { - "preset": "MIXED", - "model_type": "TRANSFORMER" - } - }, - "visformer_small": { - "model_name": "visformer_small", - "quantization_params": { - "preset": "MIXED", - "model_type": "TRANSFORMER" - }, - "metrics": { - "FP32 top 1": 0.77902 - } - }, - "crossvit_9_240": { - "model_name": "crossvit_9_240", - "quantization_params": { - "preset": "MIXED", - "model_type": "TRANSFORMER" - }, - "metrics": { - "FP32 top 1": 0.69966 - } - }, - "hrnet_w18": { - "model_name": "hrnet_w18", - "quantization_params": { - "preset": "MIXED" - }, - "metrics": { - "FP32 top 1": 0.70656 - } - }, - "efficientnet_lite0": { - "model_name": "efficientnet_lite0", - "quantization_params": { - "preset": "MIXED" - }, - "metrics": { - "FP32 top 1": 0.69878 - } - }, - "dpn68": { - "model_name": "dpn68", - "quantization_params": { - "preset": "MIXED" - }, - "metrics": { - "FP32 top 1": 0.71478 - } - }, - "dla34": { - "model_name": "dla34", - "quantization_params": { - "preset": "MIXED" - }, - "metrics": { - "FP32 top 1": 0.68424 - } - } -} diff --git a/tests/post_training/model_scope.py b/tests/post_training/model_scope.py index c7ed0dea301..a6bc2c12d13 100644 --- a/tests/post_training/model_scope.py +++ b/tests/post_training/model_scope.py @@ -9,45 +9,263 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict +import copy -from nncf import ModelType, QuantizationPreset -from tests.post_training.conftest import MODELS_SCOPE_PATH -from tests.shared.helpers import load_json +from nncf import ModelType +from nncf import QuantizationPreset +from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters +from tests.post_training.pipelines.base import ALL_NNCF_PTQ_BACKENDS +from tests.post_training.pipelines.base import BackendType +from tests.post_training.pipelines.causal_language_model import CausalLMHF +from tests.post_training.pipelines.image_classification_timm import ImageClassificationTimm +from tests.post_training.pipelines.masked_language_modeling import MaskedLanguageModelingHF +TEST_MODELS = [ + # HF models + { + "reported_name": "hf/bert-base-uncased", + "model_id": "bert-base-uncased", + "pipeline_cls": MaskedLanguageModelingHF, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + "model_type": ModelType.TRANSFORMER, + "subset_size": 2, + }, + "backends": ALL_NNCF_PTQ_BACKENDS + [BackendType.OPTIMUM], + }, + { + "reported_name": "hf/hf-internal-testing/tiny-random-GPTNeoXForCausalLM", + "model_id": "hf-internal-testing/tiny-random-GPTNeoXForCausalLM", + "pipeline_cls": CausalLMHF, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + "model_type": ModelType.TRANSFORMER, + "subset_size": 2, + }, + "backends": [BackendType.OPTIMUM], + }, + # Timm models + { + "reported_name": "timm/crossvit_9_240", + "model_id": "crossvit_9_240", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + "model_type": ModelType.TRANSFORMER, + "advanced_parameters": AdvancedQuantizationParameters(smooth_quant_alpha=-1.0), + }, + "backends": [BackendType.TORCH, BackendType.ONNX, BackendType.OV, BackendType.POT], + }, + { + "reported_name": "timm/darknet53", + "model_id": "darknet53", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + }, + "backends": ALL_NNCF_PTQ_BACKENDS, + }, + { + "reported_name": "timm/deit3_small_patch16_224", + "model_id": "deit3_small_patch16_224", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + "model_type": ModelType.TRANSFORMER, + "advanced_parameters": AdvancedQuantizationParameters(smooth_quant_alpha=-1.0), + }, + "backends": ALL_NNCF_PTQ_BACKENDS, + }, + { + "reported_name": "timm/dla34", + "model_id": "dla34", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + }, + "backends": ALL_NNCF_PTQ_BACKENDS, + }, + { + "reported_name": "timm/dpn68", + "model_id": "dpn68", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + }, + "backends": ALL_NNCF_PTQ_BACKENDS, + }, + { + "reported_name": "timm/efficientnet_b0", + "model_id": "efficientnet_b0", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + }, + "backends": ALL_NNCF_PTQ_BACKENDS, + }, + { + "reported_name": "timm/efficientnet_b0_BC", + "model_id": "efficientnet_b0", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + "fast_bias_correction": False, + }, + "backends": [BackendType.ONNX, BackendType.OV, BackendType.POT], + }, + { + "reported_name": "timm/efficientnet_lite0", + "model_id": "efficientnet_lite0", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + }, + "backends": ALL_NNCF_PTQ_BACKENDS, + }, + { + "reported_name": "timm/hrnet_w18", + "model_id": "hrnet_w18", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + }, + "backends": ALL_NNCF_PTQ_BACKENDS, + }, + { + "reported_name": "timm/inception_resnet_v2", + "model_id": "inception_resnet_v2", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": {}, + "backends": ALL_NNCF_PTQ_BACKENDS, + }, + { + "reported_name": "timm/levit_128", + "model_id": "levit_128", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + "model_type": ModelType.TRANSFORMER, + "advanced_parameters": AdvancedQuantizationParameters(smooth_quant_alpha=0.05), + }, + "backends": [BackendType.TORCH, BackendType.ONNX, BackendType.OV, BackendType.POT], + }, + { + "reported_name": "timm/mobilenetv2_050", + "model_id": "mobilenetv2_050", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + }, + "backends": ALL_NNCF_PTQ_BACKENDS, + }, + { + "reported_name": "timm/mobilenetv2_050_BC", + "model_id": "mobilenetv2_050", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + "fast_bias_correction": False, + }, + "backends": [BackendType.ONNX, BackendType.OV, BackendType.POT], + }, + { + "reported_name": "timm/mobilenetv3_small_050", + "model_id": "mobilenetv3_small_050", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + }, + "backends": ALL_NNCF_PTQ_BACKENDS, + }, + { + "reported_name": "timm/regnetx_002", + "model_id": "regnetx_002", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + }, + "backends": ALL_NNCF_PTQ_BACKENDS, + }, + { + "reported_name": "timm/resnest14d", + "model_id": "resnest14d", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + }, + "backends": ALL_NNCF_PTQ_BACKENDS, + }, + { + "reported_name": "timm/resnet18", + "model_id": "resnet18", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": {}, + "backends": ALL_NNCF_PTQ_BACKENDS, + }, + { + "reported_name": "timm/swin_base_patch4_window7_224", + "model_id": "swin_base_patch4_window7_224", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + "model_type": ModelType.TRANSFORMER, + }, + "backends": ALL_NNCF_PTQ_BACKENDS, + }, + { + "reported_name": "timm/tf_inception_v3", + "model_id": "tf_inception_v3", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + }, + "backends": ALL_NNCF_PTQ_BACKENDS, + }, + { + "reported_name": "timm/vgg11", + "model_id": "vgg11", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": {}, + "backends": ALL_NNCF_PTQ_BACKENDS, + }, + { + "reported_name": "timm/visformer_small", + "model_id": "visformer_small", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + "model_type": ModelType.TRANSFORMER, + }, + "backends": [BackendType.TORCH, BackendType.ONNX, BackendType.OV, BackendType.POT], + }, + { + "reported_name": "timm/wide_resnet50_2", + "model_id": "wide_resnet50_2", + "pipeline_cls": ImageClassificationTimm, + "ptq_params": { + "preset": QuantizationPreset.MIXED, + }, + "backends": ALL_NNCF_PTQ_BACKENDS, + }, +] -def get_validation_scope() -> Dict[str, Any]: - """ - Read json file that collected models to validation from MODELS_SCOPE_PATH. - Convert parameters - :return dict: Dict with model attributes. +def generate_tests_scope(): """ - model_scope = load_json(MODELS_SCOPE_PATH) - - for model_name in list(model_scope.keys()): - model_info = model_scope[model_name] - if model_info.get("skipped"): - print(f"Skip {model_name} by '{model_info.get('skipped')}'") - model_scope.pop(model_name) - continue - - qparams = model_info["quantization_params"] - if "preset" in qparams.keys(): - qparams["preset"] = QuantizationPreset[qparams["preset"]] - if "model_type" in qparams.keys(): - qparams["model_type"] = ModelType[qparams["model_type"]] - - return model_scope - - -VALIDATION_SCOPE = get_validation_scope() + Generate tests by names "{reported_name}_backend_{backend}" + """ + tests_scope = {} + for test_model_param in TEST_MODELS: + for backend in test_model_param["backends"]: + model_param = copy.deepcopy(test_model_param) + reported_name = model_param["reported_name"] + test_case_name = f"{reported_name}_backend_{backend.value}" + model_param["backend"] = backend + model_param.pop("backends") + if test_case_name in tests_scope: + raise RuntimeError(f"{test_case_name} already in tests_scope") + tests_scope[test_case_name] = model_param + return tests_scope -def get_cached_metric(report_model_name, metric_name): - cached_metric = None - try: - cached_metric = VALIDATION_SCOPE[report_model_name]["metrics"][metric_name] - except KeyError: - pass - return cached_metric +TEST_CASES = generate_tests_scope() diff --git a/tests/post_training/models.py b/tests/post_training/models.py deleted file mode 100644 index 7fa4dc2ce52..00000000000 --- a/tests/post_training/models.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright (c) 2023 Intel Corporation -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from nncf.common.graph import NNCFGraph -from nncf.common.graph.operator_metatypes import InputNoopMetatype -from nncf.common.graph.operator_metatypes import OutputNoopMetatype -from tests.common.quantization.mock_graphs import NodeWithType -from tests.common.quantization.test_filter_constant_nodes import create_mock_graph -from tests.common.quantization.test_filter_constant_nodes import get_nncf_graph_from_mock_nx_graph - - -# pylint: disable=protected-access -class NNCFGraphToTest: - def __init__(self, conv_metatype, conv_layer_attrs=None, nncf_graph_cls=NNCFGraph): - # Original graph - # Input_1 - # | - # Conv_1 - # | - # Output_1 - nodes = [ - NodeWithType("Input_1", InputNoopMetatype), - NodeWithType("Conv_1", conv_metatype, layer_attributes=conv_layer_attrs), - NodeWithType("Output_1", OutputNoopMetatype), - ] - node_edges = [("Input_1", "Conv_1"), ("Conv_1", "Output_1")] - original_mock_graph = create_mock_graph(nodes, node_edges) - self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph, nncf_graph_cls) - - -class NNCFGraphToTestDepthwiseConv: - def __init__(self, depthwise_conv_metatype, conv_layer_attrs=None): - # Original graph - # Input_1 - # | - # DepthwiseConv_1 - # | - # Output_1 - nodes = [ - NodeWithType("Input_1", InputNoopMetatype), - NodeWithType("Conv_1", depthwise_conv_metatype, layer_attributes=conv_layer_attrs), - NodeWithType("Output_1", OutputNoopMetatype), - ] - node_edges = [("Input_1", "Conv_1"), ("Conv_1", "Output_1")] - original_mock_graph = create_mock_graph(nodes, node_edges) - self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph) - - -class NNCFGraphToTestSumAggregation: - def __init__(self, conv_metatype, sum_metatype, conv_layer_attrs=None, nncf_graph_cls=NNCFGraph): - # Original graph - # Input_1 - # | - # Conv_1 - # | - # Sum_1 - # | - # Output_1 - nodes = [ - NodeWithType("Input_1", InputNoopMetatype), - NodeWithType("Conv_1", conv_metatype, layer_attributes=conv_layer_attrs), - NodeWithType("Sum_1", sum_metatype), - NodeWithType("Output_1", OutputNoopMetatype), - ] - node_edges = [("Input_1", "Conv_1"), ("Conv_1", "Sum_1"), ("Sum_1", "Output_1")] - original_mock_graph = create_mock_graph(nodes, node_edges) - self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph, nncf_graph_cls) - # Hack output size of the Sum_1 operation - self.nncf_graph._nx_graph.out_edges[("2 /Sum_1_0", "3 /Output_1_0")][ - self.nncf_graph.ACTIVATION_SHAPE_EDGE_ATTR - ] = [1, 1, 1] - - -class NNCFGraphToTestMatMul: - def __init__(self, matmul_metatype, matmul_layer_attrs=None, nncf_graph_cls=NNCFGraph): - # Original graphs - # Input_1 - # | - # MatMul_1 - # | - # Output_1 - nodes = [ - NodeWithType("Input_1", InputNoopMetatype), - NodeWithType("MatMul_1", matmul_metatype, layer_attributes=matmul_layer_attrs), - NodeWithType("Output_1", OutputNoopMetatype), - ] - node_edges = [("Input_1", "MatMul_1"), ("MatMul_1", "Output_1")] - original_mock_graph = create_mock_graph(nodes, node_edges) - self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph, nncf_graph_cls) diff --git a/tests/post_training/pipelines/base.py b/tests/post_training/pipelines/base.py new file mode 100644 index 00000000000..5427fd50686 --- /dev/null +++ b/tests/post_training/pipelines/base.py @@ -0,0 +1,302 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import time +from abc import ABC +from abc import abstractmethod +from dataclasses import dataclass +from datetime import timedelta +from enum import Enum +from pathlib import Path +from typing import Optional + +import numpy as np +import onnx +import openvino.runtime as ov +import torch +from memory_profiler import memory_usage +from optimum.intel import OVQuantizer +from torch import nn + +import nncf +from nncf import TargetDevice +from nncf.experimental.torch.quantization.quantize_model import quantize_impl as pt_impl_experimental +from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters +from tests.shared.command import Command + +DEFAULT_VAL_THREADS = 4 + + +class BackendType(Enum): + FP32 = "FP32" + OLD_TORCH = "OLD_TORCH" # Quantization via create_compressed_model + TORCH = "TORCH" # PTQ implementation + ONNX = "ONNX" + OV = "OV" + POT = "POT" + OPTIMUM = "OPTIMUM" + + +ALL_NNCF_PTQ_BACKENDS = [BackendType.OLD_TORCH, BackendType.TORCH, BackendType.ONNX, BackendType.OV, BackendType.POT] +PT_BACKENDS = [BackendType.TORCH, BackendType.OLD_TORCH] +OV_BACKENDS = [BackendType.OV, BackendType.POT, BackendType.OPTIMUM] + + +@dataclass +class RunInfo: + """ + Containing data about quantization of the model. + """ + + model: Optional[str] = None + backend: Optional[BackendType] = None + metric_name: Optional[str] = None + metric_value: Optional[float] = None + metric_diff: Optional[float] = None + num_fq_nodes: Optional[float] = None + quant_memory_usage: Optional[int] = None + time_total: Optional[float] = None + time_quantization: Optional[float] = None + status: Optional[str] = None + + @staticmethod + def format_time(time_elapsed): + if time_elapsed is None: + return None + return str(timedelta(seconds=int(time_elapsed))) + + @staticmethod + def format_memory_usage(memory): + if memory is None: + return None + return int(memory) + + def get_result_dict(self): + return { + "Model": self.model, + "Backend": self.backend.value if self.backend else None, + "Metric name": self.metric_name, + "Metric value": self.metric_value, + "Metric diff": self.metric_diff, + "Num FQ": self.num_fq_nodes, + "RAM MiB": self.format_memory_usage(self.quant_memory_usage), + "Quant. time": self.format_time(self.time_quantization), + "Total time": self.format_time(self.time_total), + "Status": self.status, + } + + +def export_to_onnx(model: nn.Module, save_path: str, data_sample: torch.Tensor) -> None: + """ + Export Torch model to ONNX format. + """ + torch.onnx.export(model, data_sample, save_path, export_params=True, opset_version=13, do_constant_folding=False) + + +def export_to_ir(model_path: str, save_path: str, model_name: str) -> None: + """ + Export ONNX model to OpenVINO format. + + :param model_path: Path to ONNX model. + :param save_path: Path directory to save OpenVINO IR model. + :param model_name: Model name. + """ + runner = Command(f"mo -m {model_path} -o {save_path} -n {model_name} --compress_to_fp16=False") + runner.run() + + +class BaseTestPipeline(ABC): + """ + Base class to test post training quantization. + """ + + def __init__( + self, + reported_name: str, + model_id: str, + backend: BackendType, + ptq_params: dict, + output_dir: Path, + data_dir: Path, + reference_data: dict, + params: dict = None, + ) -> None: + self.reported_name = reported_name + self.model_id = model_id + self.backend = backend + self.ptq_params = ptq_params + self.output_dir = Path(output_dir) + self.data_dir = Path(data_dir) + self.reference_data = reference_data + self.params = params or {} + + self.output_model_dir = self.output_dir / self.reported_name / self.backend.value + self.output_model_dir.mkdir(parents=True, exist_ok=True) + self.model_name = f"{self.reported_name}_{self.backend.value}" + + self.model = None + self.model_hf = None + self.calibration_dataset = None + self.dummy_tensor = None + + self.run_info = RunInfo(model=reported_name, backend=self.backend) + + self.post_init() + + def post_init(self): + """Post init actions""" + + @abstractmethod + def prepare_preprocessor(self) -> None: + """Prepare preprocessor for the target model""" + + @abstractmethod + def prepare_calibration_dataset(self) -> None: + """Prepare calibration dataset for the target model""" + + @abstractmethod + def prepare_model(self) -> None: + """Prepare model""" + + def prepare(self): + """ + Preparing model and calibration dataset for quantization. + """ + print("Preparing...") + self.prepare_model() + if self.model is None: + raise RuntimeError("self.model is None") + self.prepare_preprocessor() + self.prepare_calibration_dataset() + + def _quantize(self): + """ + Quantize self.model + """ + if self.backend == BackendType.OPTIMUM: + quantizer = OVQuantizer.from_pretrained(self.model_hf) + quantizer.quantize(calibration_dataset=self.calibration_dataset, save_directory=self.output_model_dir) + else: + quantize_fn = nncf.quantize + if self.backend == BackendType.TORCH: + # Use experimental torch api + quantize_fn = pt_impl_experimental + if "preset" not in self.ptq_params: + self.ptq_params["preset"] = nncf.QuantizationPreset.PERFORMANCE + if "subset_size" not in self.ptq_params: + self.ptq_params["subset_size"] = 300 + if "fast_bias_correction" not in self.ptq_params: + self.ptq_params["fast_bias_correction"] = True + + if self.backend == BackendType.POT: + self.ptq_params["advanced_parameters"] = AdvancedQuantizationParameters( + backend_params={"use_pot": True} + ) + + self.quantized_model = quantize_fn( + model=self.model, + target_device=TargetDevice.CPU, + calibration_dataset=self.calibration_dataset, + **self.ptq_params, + ) + + def quantize(self) -> None: + """ + Run quantization of the model and collect time and memory usage information. + """ + print("Quantization...") + + if self.backend in [BackendType.TORCH, BackendType.OLD_TORCH]: + cpu_threads_num = os.environ.get("CPU_THREADS_NUM") + if cpu_threads_num is not None: + torch.set_num_threads(int(cpu_threads_num)) + + start_time = time.perf_counter() + self.run_info.quant_memory_usage = memory_usage(self._quantize, max_usage=True) + self.run_info.time_quantization = time.perf_counter() - start_time + + def save_quantized_model(self) -> None: + """ + Save quantized model to IR. + """ + print("Save quantized model...") + if self.backend == BackendType.OPTIMUM: + self.path_quantized_ir = self.output_model_dir / "openvino_model.xml" + elif self.backend in PT_BACKENDS: + onnx_path = self.output_model_dir / "model.onnx" + export_to_onnx(self.quantized_model, str(onnx_path), self.dummy_tensor) + export_to_ir(onnx_path, self.output_model_dir, model_name="model") + self.path_quantized_ir = self.output_model_dir / "model.xml" + elif self.backend == BackendType.ONNX: + onnx_path = self.output_model_dir / "model.onnx" + onnx.save(self.quantized_model, str(onnx_path)) + export_to_ir(onnx_path, str(self.output_model_dir), model_name="model") + self.path_quantized_ir = self.output_model_dir / "model.xml" + elif self.backend in OV_BACKENDS: + self.path_quantized_ir = self.output_model_dir / "model.xml" + ov.serialize(self.quantized_model, str(self.path_quantized_ir)) + + def get_num_fq(self) -> None: + """ + Get number of the FakeQuantize nodes in the quantized IR. + """ + + ie = ov.Core() + model = ie.read_model(model=self.path_quantized_ir) + + num_fq = 0 + for node in model.get_ops(): + node_type = node.type_info.name + if node_type == "FakeQuantize": + num_fq += 1 + + self.run_info.num_fq_nodes = num_fq + + @abstractmethod + def _validate(self) -> None: + """Validate IR""" + + def validate(self) -> None: + """ + Validate and compare result with reference + """ + print("Validation...") + self._validate() + + metric_value = self.run_info.metric_value + metric_reference = self.reference_data.get("metric_value") + metric_value_fp32 = self.reference_data.get("metric_value_fp32") + + if metric_value is not None and metric_value_fp32 is not None: + self.run_info.metric_diff = self.run_info.metric_value - self.reference_data["metric_value_fp32"] + + if metric_value is not None and metric_reference is not None: + if not np.isclose(metric_value, metric_reference, atol=self.reference_data.get("atol", 0.001)): + if metric_value < metric_reference: + status_msg = f"Regression: Metric value is less than reference {metric_value} < {metric_reference}" + raise ValueError(status_msg) + if metric_value > metric_reference: + self.run_info.status = ( + f"Improvement: Metric value is better than reference {metric_value} > {metric_reference}" + ) + + def run(self) -> None: + """ + Run full pipeline of quantization + """ + self.prepare() + self.quantize() + self.save_quantized_model() + self.get_num_fq() + self.validate() + + def get_run_info(self) -> RunInfo: + return self.run_info diff --git a/tests/post_training/pipelines/causal_language_model.py b/tests/post_training/pipelines/causal_language_model.py new file mode 100644 index 00000000000..81e6a1d9c45 --- /dev/null +++ b/tests/post_training/pipelines/causal_language_model.py @@ -0,0 +1,57 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import transformers +from optimum.intel import OVQuantizer +from optimum.intel.openvino import OVModelForCausalLM + +from tests.post_training.pipelines.base import OV_BACKENDS +from tests.post_training.pipelines.base import BackendType +from tests.post_training.pipelines.base import BaseTestPipeline + + +class CausalLMHF(BaseTestPipeline): + """Pipeline for causal language models from Hugging Face repository""" + + def prepare_model(self) -> None: + if self.backend in OV_BACKENDS: + self.model_hf = OVModelForCausalLM.from_pretrained(self.model_id, export=True, compile=False) + self.model = self.model_hf.model + + def prepare_preprocessor(self) -> None: + self.preprocessor = transformers.AutoTokenizer.from_pretrained(self.model_id) + + def get_transform_calibration_fn(self): + def transform_func(examples): + data = self.preprocessor(examples["sentence"]) + return data + + return transform_func + + def prepare_calibration_dataset(self): + quantizer = OVQuantizer.from_pretrained(self.model_hf) + + num_samples = self.ptq_params.get("subset_size", 300) + calibration_dataset = quantizer.get_calibration_dataset( + "glue", + dataset_config_name="sst2", + preprocess_function=self.get_transform_calibration_fn(), + num_samples=num_samples, + dataset_split="validation", + preprocess_batch=True, + ) + + if self.backend == BackendType.OPTIMUM: + self.calibration_dataset = calibration_dataset + + def _validate(self): + pass diff --git a/tests/post_training/pipelines/image_classification_timm.py b/tests/post_training/pipelines/image_classification_timm.py new file mode 100644 index 00000000000..d7ad2a6d160 --- /dev/null +++ b/tests/post_training/pipelines/image_classification_timm.py @@ -0,0 +1,163 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy +import os + +import numpy as np +import onnx +import openvino.runtime as ov +import timm +import torch +import tqdm +from sklearn.metrics import accuracy_score +from timm.layers.config import set_fused_attn +from torchvision import datasets +from torchvision import transforms +from torchvision.transforms import InterpolationMode + +import nncf +from nncf.experimental.torch.replace_custom_modules.timm_custom_modules import ( + replace_timm_custom_modules_with_torch_native, +) +from tests.post_training.pipelines.base import DEFAULT_VAL_THREADS +from tests.post_training.pipelines.base import OV_BACKENDS +from tests.post_training.pipelines.base import PT_BACKENDS +from tests.post_training.pipelines.base import BackendType +from tests.post_training.pipelines.base import BaseTestPipeline +from tests.post_training.pipelines.base import export_to_ir +from tests.post_training.pipelines.base import export_to_onnx + +# Disable using aten::scaled_dot_product_attention +set_fused_attn(False, False) + + +class ImageClassificationTimm(BaseTestPipeline): + """Pipeline for Image Classification model from timm repository""" + + def prepare_model(self) -> None: + timm_model = timm.create_model(self.model_id, num_classes=1000, in_chans=3, pretrained=True, checkpoint_path="") + timm_model = replace_timm_custom_modules_with_torch_native(timm_model) + self.model_cfg = timm_model.default_cfg + input_size = [1] + list(timm_model.default_cfg["input_size"]) + self.dummy_tensor = torch.rand(input_size) + + if self.backend in PT_BACKENDS: + self.model = timm_model + + if self.backend == BackendType.ONNX: + onnx_path = self.output_model_dir / "model_fp32.onnx" + + export_to_onnx(timm_model, onnx_path, self.dummy_tensor) + self.model = onnx.load(onnx_path) + self.input_name = self.model.graph.input[0].name + + if self.backend in OV_BACKENDS: + onnx_path = self.output_model_dir / "model_fp32.onnx" + export_to_onnx(timm_model, onnx_path, self.dummy_tensor) + export_to_ir(onnx_path, self.output_model_dir, model_name="model_fp32") + ir_path = self.output_model_dir / "model_fp32.xml" + + core = ov.Core() + self.model = core.read_model(ir_path) + self.input_name = list(inp.get_any_name() for inp in self.model.inputs)[0] + + def prepare_preprocessor(self) -> None: + config = self.model_cfg + transformations_list = [] + normalize = transforms.Normalize(mean=config["mean"], std=config["std"]) + input_size = config["input_size"] + + RESIZE_MODE_MAP = { + "bilinear": InterpolationMode.BILINEAR, + "bicubic": InterpolationMode.BICUBIC, + "nearest": InterpolationMode.NEAREST, + } + + if "fixed_input_size" in config and not config["fixed_input_size"]: + resize_size = tuple(int(x / config["crop_pct"]) for x in input_size[-2:]) + resize = transforms.Resize(resize_size, interpolation=RESIZE_MODE_MAP[config["interpolation"]]) + transformations_list.append(resize) + transformations_list.extend([transforms.CenterCrop(input_size[-2:]), transforms.ToTensor(), normalize]) + + self.transform = transforms.Compose(transformations_list) + + def get_transform_calibration_fn(self): + if self.backend in PT_BACKENDS: + + def transform_fn(data_item): + images, _ = data_item + return images + + else: + + def transform_fn(data_item): + images, _ = data_item + return {self.input_name: np.array(images, dtype=np.float32)} + + return transform_fn + + def prepare_calibration_dataset(self): + batch_size = 128 if self.backend == BackendType.OLD_TORCH else 1 + dataset = datasets.ImageFolder(root=self.data_dir / "imagenet" / "val", transform=self.transform) + loader = torch.utils.data.DataLoader(dataset, batch_size=batch_size, num_workers=2, shuffle=False) + + self.calibration_dataset = nncf.Dataset(loader, self.get_transform_calibration_fn()) + + def _validate(self): + val_dataset = datasets.ImageFolder(root=self.data_dir / "imagenet" / "val", transform=self.transform) + val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=1, num_workers=2, shuffle=False) + + dataset_size = len(val_loader) + + predictions = [0] * dataset_size + references = [-1] * dataset_size + + core = ov.Core() + + if os.environ.get("CPU_THREADS_NUM"): + # Set CPU_THREADS_NUM for OpenVINO inference + cpu_threads_num = os.environ.get("CPU_THREADS_NUM") + core.set_property("CPU", properties={"CPU_THREADS_NUM": str(cpu_threads_num)}) + + ov_model = core.read_model(self.path_quantized_ir) + compiled_model = core.compile_model(ov_model) + + jobs = int(os.environ.get("NUM_VAL_THREADS", DEFAULT_VAL_THREADS)) + infer_queue = ov.AsyncInferQueue(compiled_model, jobs) + + # Disable tqdm for Jenkins + disable_tqdm = os.environ.get("JENKINS_HOME") is not None + + with tqdm.tqdm(total=dataset_size, desc="Validation", disable=disable_tqdm) as pbar: + + def process_result(request, userdata): + output_data = request.get_output_tensor().data + predicted_label = np.argmax(output_data, axis=1) + predictions[userdata] = [predicted_label] + pbar.update() + + infer_queue.set_callback(process_result) + + for i, (images, target) in enumerate(val_loader): + # W/A for memory leaks when using torch DataLoader and OpenVINO + image_copies = copy.deepcopy(images.numpy()) + infer_queue.start_async(image_copies, userdata=i) + references[i] = target + + infer_queue.wait_all() + + predictions = np.concatenate(predictions, axis=0) + references = np.concatenate(references, axis=0) + acc_top1 = accuracy_score(predictions, references) + + self.run_info.metric_name = "Acc@1" + self.run_info.metric_value = acc_top1 diff --git a/tests/post_training/pipelines/masked_language_modeling.py b/tests/post_training/pipelines/masked_language_modeling.py new file mode 100644 index 00000000000..671050bb651 --- /dev/null +++ b/tests/post_training/pipelines/masked_language_modeling.py @@ -0,0 +1,86 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +import onnx +import torch +import transformers +from optimum.intel import OVQuantizer +from optimum.intel.openvino import OVModelForSequenceClassification +from optimum.onnxruntime import ORTModelForSequenceClassification + +import nncf +from tests.post_training.pipelines.base import OV_BACKENDS +from tests.post_training.pipelines.base import PT_BACKENDS +from tests.post_training.pipelines.base import BackendType +from tests.post_training.pipelines.base import BaseTestPipeline + + +class MaskedLanguageModelingHF(BaseTestPipeline): + """Pipeline for masked language models from Hugging Face repository""" + + def prepare_model(self) -> None: + if self.backend in PT_BACKENDS: + self.model_hf = transformers.AutoModelForSequenceClassification.from_pretrained(self.model_id) + self.model = self.model_hf + self.dummy_tensor = self.model_hf.dummy_inputs["input_ids"] + if self.backend in OV_BACKENDS: + self.model_hf = OVModelForSequenceClassification.from_pretrained(self.model_id, export=True, compile=False) + self.model = self.model_hf.model + + if self.backend == BackendType.ONNX: + self.model_hf = ORTModelForSequenceClassification.from_pretrained(self.model_id, export=True) + self.model = onnx.load(self.model_hf.model_path) + + def prepare_preprocessor(self) -> None: + self.preprocessor = transformers.AutoTokenizer.from_pretrained(self.model_id) + + def get_transform_calibration_fn(self): + if self.backend in PT_BACKENDS: + + def transform_func(data): + return torch.Tensor([data["input_ids"]]).type(dtype=torch.LongTensor) + + else: + + def transform_func(data): + return { + "input_ids": np.expand_dims(data["input_ids"], axis=0), + "token_type_ids": np.expand_dims(data["token_type_ids"], axis=0), + "attention_mask": np.expand_dims(data["attention_mask"], axis=0), + } + + return transform_func + + def prepare_calibration_dataset(self): + quantizer = OVQuantizer.from_pretrained(self.model_hf) + + num_samples = self.ptq_params.get("subset_size", 300) + + def preprocess_function(examples): + return self.preprocessor(examples["sentence"], padding=True, truncation=True, max_length=128) + + calibration_dataset = quantizer.get_calibration_dataset( + "glue", + dataset_config_name="sst2", + preprocess_function=preprocess_function, + num_samples=num_samples, + dataset_split="validation", + preprocess_batch=True, + ) + + if self.backend == BackendType.OPTIMUM: + self.calibration_dataset = calibration_dataset + else: + self.calibration_dataset = nncf.Dataset(calibration_dataset, self.get_transform_calibration_fn()) + + def _validate(self): + pass diff --git a/tests/post_training/reference_data.yaml b/tests/post_training/reference_data.yaml new file mode 100644 index 00000000000..a5a21a716ca --- /dev/null +++ b/tests/post_training/reference_data.yaml @@ -0,0 +1,356 @@ +# Hugging Face +hf/bert-base-uncased_backend_OPTIMUM: + metric_value: null + metric_value_fp32: null +hf/bert-base-uncased_backend_OLD_TORCH: + metric_value: null + metric_value_fp32: null +hf/bert-base-uncased_backend_TORCH: + metric_value: null + metric_value_fp32: null +hf/bert-base-uncased_backend_ONNX: + metric_value: null + metric_value_fp32: null +hf/bert-base-uncased_backend_OV: + metric_value: null + metric_value_fp32: null +hf/bert-base-uncased_backend_POT: + metric_value: null + metric_value_fp32: null + +hf/hf-internal-testing/tiny-random-GPTNeoXForCausalLM_backend_OPTIMUM: + metric_value: null + metric_value_fp32: null + +# Timm +timm/crossvit_9_240_backend_TORCH: + metric_value: 0.68136 + metric_value_fp32: 0.69966 +timm/crossvit_9_240_backend_ONNX: + metric_value: 0.68906 + metric_value_fp32: 0.69966 +timm/crossvit_9_240_backend_OV: + metric_value: 0.68912 + metric_value_fp32: 0.69966 +timm/crossvit_9_240_backend_POT: + metric_value: 0.68862 + metric_value_fp32: 0.69966 + +timm/darknet53_backend_OLD_TORCH: + metric_value: 0.78534 + metric_value_fp32: 0.79858 +timm/darknet53_backend_TORCH: + metric_value: 0.79176 + metric_value_fp32: 0.79858 +timm/darknet53_backend_ONNX: + metric_value: 0.79224 + metric_value_fp32: 0.79858 +timm/darknet53_backend_OV: + metric_value: 0.79192 + metric_value_fp32: 0.79858 +timm/darknet53_backend_POT: + metric_value: 0.79206 + metric_value_fp32: 0.79858 + +timm/deit3_small_patch16_224_backend_OLD_TORCH: + metric_value: 0.76514 + metric_value_fp32: 0.76974 +timm/deit3_small_patch16_224_backend_TORCH: + metric_value: 0.7621 + metric_value_fp32: 0.76974 +timm/deit3_small_patch16_224_backend_ONNX: + metric_value: 0.76806 + metric_value_fp32: 0.76974 +timm/deit3_small_patch16_224_backend_OV: + metric_value: 0.76786 + metric_value_fp32: 0.76974 +timm/deit3_small_patch16_224_backend_POT: + metric_value: 0.7676 + metric_value_fp32: 0.76974 + +timm/dla34_backend_OLD_TORCH: + metric_value: 0.7364 + metric_value_fp32: 0.74416 +timm/dla34_backend_TORCH: + metric_value: 0.73978 + metric_value_fp32: 0.74416 +timm/dla34_backend_ONNX: + metric_value: 0.74202 + metric_value_fp32: 0.74416 +timm/dla34_backend_OV: + metric_value: 0.74224 + metric_value_fp32: 0.74416 +timm/dla34_backend_POT: + metric_value: 0.74222 + metric_value_fp32: 0.74416 + +timm/dpn68_backend_OLD_TORCH: + metric_value: 0.7479 + metric_value_fp32: 0.75964 +timm/dpn68_backend_TORCH: + metric_value: 0.75492 + metric_value_fp32: 0.75964 +timm/dpn68_backend_ONNX: + metric_value: 0.75488 + metric_value_fp32: 0.75964 +timm/dpn68_backend_OV: + metric_value: 0.75544 + metric_value_fp32: 0.75964 +timm/dpn68_backend_POT: + metric_value: 0.7554 + metric_value_fp32: 0.75964 + +timm/efficientnet_b0_backend_OLD_TORCH: + metric_value: 0.74976 + metric_value_fp32: 0.77292 +timm/efficientnet_b0_backend_TORCH: + metric_value: 0.76476 + metric_value_fp32: 0.77292 +timm/efficientnet_b0_backend_ONNX: + metric_value: 0.76582 + metric_value_fp32: 0.77292 +timm/efficientnet_b0_backend_OV: + metric_value: 0.76796 + metric_value_fp32: 0.77292 +timm/efficientnet_b0_backend_POT: + metric_value: 0.76866 + metric_value_fp32: 0.77292 + +timm/efficientnet_b0_BC_backend_ONNX: + metric_value: 0.76446 + metric_value_fp32: 0.77292 +timm/efficientnet_b0_BC_backend_OV: + metric_value: 0.76794 + metric_value_fp32: 0.77292 +timm/efficientnet_b0_BC_backend_POT: + metric_value: 0.76878 + metric_value_fp32: 0.77292 + +timm/efficientnet_lite0_backend_OLD_TORCH: + metric_value: 0.74172 + metric_value_fp32: 0.7496 +timm/efficientnet_lite0_backend_TORCH: + metric_value: 0.74686 + metric_value_fp32: 0.7496 +timm/efficientnet_lite0_backend_ONNX: + metric_value: 0.7468 + metric_value_fp32: 0.7496 +timm/efficientnet_lite0_backend_OV: + metric_value: 0.74704 + metric_value_fp32: 0.7496 +timm/efficientnet_lite0_backend_POT: + metric_value: 0.74686 + metric_value_fp32: 0.7496 + +timm/hrnet_w18_backend_OLD_TORCH: + metric_value: 0.7672 + metric_value_fp32: 0.77504 +timm/hrnet_w18_backend_TORCH: + metric_value: 0.76712 + metric_value_fp32: 0.77504 +timm/hrnet_w18_backend_ONNX: + metric_value: 0.76916 + metric_value_fp32: 0.77504 +timm/hrnet_w18_backend_OV: + metric_value: 0.76926 + metric_value_fp32: 0.77504 +timm/hrnet_w18_backend_POT: + metric_value: 0.76968 + metric_value_fp32: 0.77504 + +timm/inception_resnet_v2_backend_OLD_TORCH: + metric_value: 0.79722 + metric_value_fp32: 0.80102 +timm/inception_resnet_v2_backend_TORCH: + metric_value: 0.80024 + metric_value_fp32: 0.80102 +timm/inception_resnet_v2_backend_ONNX: + metric_value: 0.80108 + metric_value_fp32: 0.80102 +timm/inception_resnet_v2_backend_OV: + metric_value: 0.80152 + metric_value_fp32: 0.80102 +timm/inception_resnet_v2_backend_POT: + metric_value: 0.80118 + metric_value_fp32: 0.80102 + +timm/levit_128_backend_TORCH: + metric_value: 0.72938 + metric_value_fp32: 0.7405 +timm/levit_128_backend_ONNX: + metric_value: 0.73082 + metric_value_fp32: 0.7405 +timm/levit_128_backend_OV: + metric_value: 0.7334 + metric_value_fp32: 0.7405 +timm/levit_128_backend_POT: + metric_value: 0.73284 + metric_value_fp32: 0.7405 + +timm/mobilenetv2_050_backend_OLD_TORCH: + metric_value: 0.6338 + metric_value_fp32: 0.64568 +timm/mobilenetv2_050_backend_TORCH: + metric_value: 0.64278 + metric_value_fp32: 0.64568 +timm/mobilenetv2_050_backend_ONNX: + metric_value: 0.64358 + metric_value_fp32: 0.64568 +timm/mobilenetv2_050_backend_OV: + metric_value: 0.64134 + metric_value_fp32: 0.64568 +timm/mobilenetv2_050_backend_POT: + metric_value: 0.64234 + metric_value_fp32: 0.64568 + +timm/mobilenetv2_050_BC_backend_ONNX: + metric_value: 0.64498 + metric_value_fp32: 0.64568 +timm/mobilenetv2_050_BC_backend_OV: + metric_value: 0.64412 + metric_value_fp32: 0.64568 +timm/mobilenetv2_050_BC_backend_POT: + metric_value: 0.64376 + metric_value_fp32: 0.64568 + +timm/mobilenetv3_small_050_backend_OLD_TORCH: + metric_value: 0.43412 + metric_value_fp32: 0.56338 +timm/mobilenetv3_small_050_backend_TORCH: + atol: 0.03 + metric_value: 0.39514 + metric_value_fp32: 0.56338 +timm/mobilenetv3_small_050_backend_ONNX: + metric_value: 0.53962 + metric_value_fp32: 0.56338 +timm/mobilenetv3_small_050_backend_OV: + metric_value: 0.38638 + metric_value_fp32: 0.56338 +timm/mobilenetv3_small_050_backend_POT: + metric_value: 0.27062 + metric_value_fp32: 0.56338 + +timm/regnetx_002_backend_OLD_TORCH: + metric_value: 0.66612 + metric_value_fp32: 0.67826 +timm/regnetx_002_backend_TORCH: + metric_value: 0.67452 + metric_value_fp32: 0.67826 +timm/regnetx_002_backend_ONNX: + metric_value: 0.67394 + metric_value_fp32: 0.67826 +timm/regnetx_002_backend_OV: + metric_value: 0.6738 + metric_value_fp32: 0.67826 +timm/regnetx_002_backend_POT: + metric_value: 0.67472 + metric_value_fp32: 0.67826 + +timm/resnest14d_backend_OLD_TORCH: + metric_value: 0.7418 + metric_value_fp32: 0.74862 +timm/resnest14d_backend_TORCH: + metric_value: 0.74176 + metric_value_fp32: 0.74862 +timm/resnest14d_backend_ONNX: + metric_value: 0.74316 + metric_value_fp32: 0.74862 +timm/resnest14d_backend_OV: + metric_value: 0.74358 + metric_value_fp32: 0.74862 +timm/resnest14d_backend_POT: + metric_value: 0.74352 + metric_value_fp32: 0.74862 + +timm/resnet18_backend_OLD_TORCH: + metric_value: 0.69392 + metric_value_fp32: 0.70104 +timm/resnet18_backend_TORCH: + metric_value: 0.69748 + metric_value_fp32: 0.70104 +timm/resnet18_backend_ONNX: + metric_value: 0.6979 + metric_value_fp32: 0.70104 +timm/resnet18_backend_OV: + metric_value: 0.69774 + metric_value_fp32: 0.70104 +timm/resnet18_backend_POT: + metric_value: 0.69792 + metric_value_fp32: 0.70104 + +timm/swin_base_patch4_window7_224_backend_OLD_TORCH: + metric_value: 0.79586 + metric_value_fp32: 0.81462 +timm/swin_base_patch4_window7_224_backend_TORCH: + metric_value: 0.78026 + metric_value_fp32: 0.81462 +timm/swin_base_patch4_window7_224_backend_ONNX: + metric_value: 0.81294 + metric_value_fp32: 0.81462 +timm/swin_base_patch4_window7_224_backend_OV: + metric_value: 0.79582 + metric_value_fp32: 0.81462 +timm/swin_base_patch4_window7_224_backend_POT: + metric_value: 0.79514 + metric_value_fp32: 0.81462 + +timm/tf_inception_v3_backend_OLD_TORCH: + metric_value: 0.7723 + metric_value_fp32: 0.77728 +timm/tf_inception_v3_backend_TORCH: + metric_value: 0.77542 + metric_value_fp32: 0.77728 +timm/tf_inception_v3_backend_ONNX: + metric_value: 0.77748 + metric_value_fp32: 0.77728 +timm/tf_inception_v3_backend_OV: + metric_value: 0.77736 + metric_value_fp32: 0.77728 +timm/tf_inception_v3_backend_POT: + metric_value: 0.77792 + metric_value_fp32: 0.77728 + +timm/vgg11_backend_OLD_TORCH: + metric_value: 0.68296 + metric_value_fp32: 0.68344 +timm/vgg11_backend_TORCH: + metric_value: 0.6809 + metric_value_fp32: 0.68344 +timm/vgg11_backend_ONNX: + metric_value: 0.681 + metric_value_fp32: 0.68344 +timm/vgg11_backend_OV: + metric_value: 0.6809 + metric_value_fp32: 0.68344 +timm/vgg11_backend_POT: + metric_value: 0.6809 + metric_value_fp32: 0.68344 + +timm/visformer_small_backend_TORCH: + metric_value: 0.77446 + metric_value_fp32: 0.77902 +timm/visformer_small_backend_ONNX: + metric_value: 0.77432 + metric_value_fp32: 0.77902 +timm/visformer_small_backend_OV: + metric_value: 0.77686 + metric_value_fp32: 0.77902 +timm/visformer_small_backend_POT: + metric_value: 0.77736 + metric_value_fp32: 0.77902 + +timm/wide_resnet50_2_backend_OLD_TORCH: + metric_value: 0.80588 + metric_value_fp32: 0.81414 +timm/wide_resnet50_2_backend_TORCH: + metric_value: 0.81186 + metric_value_fp32: 0.81414 +timm/wide_resnet50_2_backend_ONNX: + metric_value: 0.81214 + metric_value_fp32: 0.81414 +timm/wide_resnet50_2_backend_OV: + metric_value: 0.81136 + metric_value_fp32: 0.81414 +timm/wide_resnet50_2_backend_POT: + metric_value: 0.8114 + metric_value_fp32: 0.81414 diff --git a/tests/post_training/requirements.txt b/tests/post_training/requirements.txt index 2a8e1a5f165..76b7358a019 100644 --- a/tests/post_training/requirements.txt +++ b/tests/post_training/requirements.txt @@ -1,10 +1,20 @@ --extra-index-url https://download.pytorch.org/whl/cpu -torch +torch==2.0.1 --extra-index-url https://download.pytorch.org/whl/cpu -torchvision -onnx -onnxruntime +torchvision==0.15.2 +transformers==4.30.0 +onnx==1.13.1 +onnxruntime==1.14.1 pytest -openvino-dev==2023.0.0 -timm==0.6.13 -scikit-learn +pytest-cov +openvino-dev==2023.0.1 +optimum[onnxruntime,openvino]==1.8.8 +optimum-intel @ git+https://github.com/huggingface/optimum-intel@47428484598986443686b7c8ba825b4a3d7b4f73 +soundfile==0.12.1 +librosa==0.10.0 +memory-profiler==0.61.0 +pandas==1.3.5 +tensorboard==2.13.0 +tensorflow-io==0.32.0 +timm==0.9.2 +scikit-learn==1.2.2 diff --git a/tests/post_training/test_quantize_conformance.py b/tests/post_training/test_quantize_conformance.py index 50dd8ad9a0f..2f4620a2718 100644 --- a/tests/post_training/test_quantize_conformance.py +++ b/tests/post_training/test_quantize_conformance.py @@ -1,715 +1,110 @@ -""" - Copyright (c) 2023 Intel Corporation - Licensed under the Apache License, Version 2.0 (the 'License'); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an 'AS IS' BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" -import copy -import logging -import os -import re +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import time import traceback -from multiprocessing import Pipe -from multiprocessing import Process -from multiprocessing.connection import Connection from pathlib import Path -from pathlib import PosixPath -from typing import Any, Dict, List, Optional, Tuple -import numpy as np -import onnx -import openvino.runtime as ov import pytest -import timm -import torch -from sklearn.metrics import accuracy_score -from torch import nn -from torch.utils.data.dataloader import DataLoader -from torchvision import datasets -from torchvision import transforms -from torchvision.transforms import InterpolationMode +import yaml -import nncf -from nncf.experimental.torch.quantization.quantize_model import quantize_impl as pt_impl_experimental -from nncf.openvino.quantization.quantize_model import quantize_impl as ov_quantize_impl -from nncf.torch.nncf_network import NNCFNetwork -from tests.post_training.conftest import PipelineType -from tests.post_training.conftest import RunInfo -from tests.post_training.model_scope import VALIDATION_SCOPE -from tests.post_training.model_scope import get_cached_metric -from tests.shared.command import Command +from tests.post_training.model_scope import TEST_CASES +from tests.post_training.pipelines.base import BackendType +from tests.post_training.pipelines.base import RunInfo -DEFAULT_VAL_THREADS = 4 - -def create_timm_model(name: str) -> nn.Module: - """ - Create timm model by name for ImageNet dataset. - - :param name: Name of model. - - :return: Instance of the timm model. - """ - model = timm.create_model(name, num_classes=1000, in_chans=3, pretrained=True, checkpoint_path="") - return model - - -def get_model_transform(model: nn.Module) -> transforms.Compose: - """ - Generate transformations for model. - - :param model: The model. - - :return: Transformations for the model. - """ - config = model.default_cfg - transformations_list = [] - normalize = transforms.Normalize(mean=config["mean"], std=config["std"]) - input_size = config["input_size"] - - RESIZE_MODE_MAP = { - "bilinear": InterpolationMode.BILINEAR, - "bicubic": InterpolationMode.BICUBIC, - "nearest": InterpolationMode.NEAREST, - } - - if "fixed_input_size" in config and not config["fixed_input_size"]: - resize_size = tuple(int(x / config["crop_pct"]) for x in input_size[-2:]) - resize = transforms.Resize(resize_size, interpolation=RESIZE_MODE_MAP[config["interpolation"]]) - transformations_list.append(resize) - transformations_list.extend([transforms.CenterCrop(input_size[-2:]), transforms.ToTensor(), normalize]) - - transform = transforms.Compose(transformations_list) - - return transform - - -def get_torch_dataloader(folder: str, transform: transforms.Compose, batch_size: int = 1) -> DataLoader: - """ - Return DataLoader for datasets. - - :param folder: Path to dataset folder. - :param transform: Transformations for datasets. - :param batch_size: The batch size, defaults to 1. - - :return torch.utils.data.DataLoader: Instance of DataLoader. - """ - val_dataset = datasets.ImageFolder(root=folder, transform=transform) - val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=batch_size, num_workers=2, shuffle=False) - return val_loader - - -def export_to_onnx(model: nn.Module, save_path: str, data_sample: torch.Tensor) -> None: - """ - Export Torch model to ONNX format. - - :param model: The target model. - :param save_path: Path to save ONNX model. - :param data_sample: Data sample for dummy forward. - """ - torch.onnx.export( - model, - data_sample, - save_path, - export_params=True, - opset_version=13, - do_constant_folding=False, - ) - - -def export_to_ir(model_path: str, save_path: str, model_name: str) -> None: - """ - Export ONNX model to OpenVINO format. - - :param model_path: Path to ONNX model. - :param save_path: Path directory to save OpenVINO IR model. - :param model_name: Model name. - """ - runner = Command(f"mo -m {model_path} -o {save_path} -n {model_name}") - runner.run() - - -def run_benchmark(model_path: str) -> Tuple[Optional[float], str]: - """ - Run benchmark_app to collect performance statistics. - - :param model_path: Path to the OpenVINO IR model. - - :return: - - FPS for successful run, otherwise None. - - Output of benchmark_app. - """ - runner = Command(f"benchmark_app -m {model_path} -d CPU -niter 300") - runner.run() - cmd_output = " ".join(runner.output) - - match = re.search(r"Throughput\: (.+?) FPS", cmd_output) - if match is not None: - fps = match.group(1) - return float(fps), cmd_output - - return None, cmd_output - - -def benchmark_performance(model_path: str, model_name: str, skip_bench: bool) -> Optional[float]: - """ - Receives the OpenVINO IR model and runs benchmark tool for it - - :param model_path: Path to the OpenVINO IR model. - :param model_name: Model name. - :param skip_bench: Boolean flag to skip or run benchmark. - - :return: FPS for successful run of benchmark_app, otherwise None. - """ - if skip_bench: - return None - - try: - model_perf, bench_output = run_benchmark(model_path) - - if model_perf is None: - logging.info(f"Cannot measure performance for the model: {model_name}\nDetails: {bench_output}\n") - except BaseException as error: - logging.error(f"Error when benchmarking the model: {model_name}. Details: {error}") - - return model_perf - - -def validate_accuracy(model_path: str, val_loader: DataLoader) -> float: - """ - VAlidate the OpenVINO IR models on validation dataset. - - :param model_path: Path to the OpenVINO IR models. - :param val_loader: Validation dataloader. - - :return float: Accuracy score. - """ - dataset_size = len(val_loader) - predictions = [0] * dataset_size - references = [-1] * dataset_size - - core = ov.Core() - - if os.environ.get("CPU_THREADS_NUM"): - # Set CPU_THREADS_NUM for OpenVINO inference - cpu_threads_num = os.environ.get("CPU_THREADS_NUM") - core.set_property("CPU", properties={"CPU_THREADS_NUM": str(cpu_threads_num)}) - - ov_model = core.read_model(model_path) - compiled_model = core.compile_model(ov_model) - - jobs = int(os.environ.get("NUM_VAL_THREADS", DEFAULT_VAL_THREADS)) - infer_queue = ov.AsyncInferQueue(compiled_model, jobs) - - def process_result(request, userdata): - output_data = request.get_output_tensor().data - predicted_label = np.argmax(output_data, axis=1) - predictions[userdata] = [predicted_label] - - infer_queue.set_callback(process_result) - - for i, (images, target) in enumerate(val_loader): - # W/A for memory leaks when using torch DataLoader and OpenVINO - image_copies = copy.deepcopy(images.numpy()) - infer_queue.start_async(image_copies, userdata=i) - references[i] = target - - infer_queue.wait_all() - predictions = np.concatenate(predictions, axis=0) - references = np.concatenate(references, axis=0) - return accuracy_score(predictions, references) - - -def benchmark_torch_model( - model: nn.Module, - dataloader: DataLoader, - model_name: str, - output_path: str, - skip_bench: bool = False, - eval: bool = True, -) -> RunInfo: - """ - Benchmark the torch model. - - :param model: The Torch Model. - :param dataloader: Validation dataloader. - :param model_name: Model name. - :param output_path: Path to save ONNX and OpenVINO IR models. - :param eval: Boolean flag to run validation, defaults to True. - :param skip_bench: Boolean flag to skip or run benchmark, defaults to False. - - :return RunInfo: Accuracy and performance metrics. - """ - data_sample, _ = next(iter(dataloader)) - # Dump model - onnx_path = Path(output_path) / (model_name + ".onnx") - export_to_onnx(model, onnx_path, data_sample) - ov_path = Path(output_path) / (model_name + ".xml") - export_to_ir(onnx_path, output_path, model_name) - - # Benchmark performance - performance = benchmark_performance(ov_path, model_name, skip_bench) - - # Validate accuracy - accuracy = None - if eval: - accuracy = validate_accuracy(ov_path, dataloader) - - return RunInfo(top_1=accuracy, fps=performance) - - -def benchmark_onnx_model( - model: onnx.ModelProto, - dataloader: DataLoader, - model_name: str, - output_path: str, - skip_bench: bool, -) -> RunInfo: - """ - Benchmark the ONNX model. - - :param model: The ONNX model. - :param dataloader: Validation dataloader. - :param model_name: Model name. - :param output_path: Path to save ONNX and OpenVINO IR models. - :param skip_bench: Boolean flag to skip or run benchmark. - - :return RunInfo: Accuracy and performance metrics. - """ - # Dump model - onnx_path = Path(output_path) / (model_name + ".onnx") - onnx.save(model, onnx_path) - ov_path = Path(output_path) / (model_name + ".xml") - export_to_ir(onnx_path, output_path, model_name) - - # Benchmark performance - performance = benchmark_performance(ov_path, model_name, skip_bench) - # Validate accuracy - accuracy = validate_accuracy(ov_path, dataloader) - return RunInfo(top_1=accuracy, fps=performance) - - -def benchmark_ov_model( - model: ov.Model, - dataloader: DataLoader, - model_name: str, - output_path: str, - skip_bench: bool, -) -> RunInfo: - """ - Benchmark the OpenVINO model. - - :param model: The OpenVINO model. - :param dataloader: Validation dataloader. - :param model_name: Model name. - :param output_path: Path to save ONNX and OpenVINO IR models. - :param skip_bench: Boolean flag to skip or run benchmark. - - :return RunInfo: Accuracy and performance metrics. - """ - # Dump model - ov_path = Path(output_path) / (model_name + ".xml") - ov.serialize(model, str(ov_path)) - - # Benchmark performance - performance = benchmark_performance(ov_path, model_name, skip_bench) - # Validate accuracy - accuracy = validate_accuracy(ov_path, dataloader) - return RunInfo(top_1=accuracy, fps=performance) - - -@pytest.fixture(scope="session") -def data(pytestconfig): +@pytest.fixture(scope="session", name="data") +def fixture_data(pytestconfig): return pytestconfig.getoption("data") -@pytest.fixture(scope="session") -def output(pytestconfig): +@pytest.fixture(scope="session", name="output") +def fixture_output(pytestconfig): return pytestconfig.getoption("output") -@pytest.fixture(scope="session") -def result(pytestconfig): +@pytest.fixture(scope="session", name="result") +def fixture_result(pytestconfig): return pytestconfig.test_results -def quantize_ov_native( - model: ov.Model, - calibration_dataset: nncf.Dataset, - preset: nncf.QuantizationPreset = nncf.QuantizationPreset.PERFORMANCE, - target_device: nncf.TargetDevice = nncf.TargetDevice.ANY, - subset_size: int = 300, - fast_bias_correction: bool = True, - model_type: Optional[nncf.ModelType] = None, - ignored_scope: Optional[nncf.IgnoredScope] = None, -) -> ov.Model: - """ - Quantize the OpenVINO model by OPENVINO_NATIVE backend. - """ - quantized_model = ov_quantize_impl( - model, - calibration_dataset, - preset=preset, - target_device=target_device, - subset_size=subset_size, - fast_bias_correction=fast_bias_correction, - model_type=model_type, - ignored_scope=ignored_scope, - ) - return quantized_model - - -def quantize_torch_ptq( - model: torch.nn.Module, - calibration_dataset: nncf.Dataset, - preset: nncf.QuantizationPreset = nncf.QuantizationPreset.PERFORMANCE, - target_device: nncf.TargetDevice = nncf.TargetDevice.ANY, - subset_size: int = 300, - fast_bias_correction: bool = True, - model_type: Optional[nncf.ModelType] = None, - ignored_scope: Optional[nncf.IgnoredScope] = None, -) -> NNCFNetwork: - """ - Quantize the Torch model by TORCH_PTQ backend. - """ - quantized_model = pt_impl_experimental( - model, - calibration_dataset, - preset, - target_device, - subset_size, - fast_bias_correction, - model_type, - ignored_scope, - ) - return quantized_model - - -def torch_runner( - model: nn.Module, - calibration_dataset: nncf.Dataset, - model_quantization_params: Dict[str, Any], - output_folder: str, - model_name: str, - batch_one_dataloader: DataLoader, - skip_bench: bool, -) -> RunInfo: - """ - Run quantization of the Torch model by TORCH backend. - """ - torch_quantized_model = nncf.quantize(model, calibration_dataset, **model_quantization_params) - # benchmark quantized torch model - torch_output_path = output_folder / "torch" - torch_output_path.mkdir(parents=True, exist_ok=True) - q_torch_model_name = model_name + "_torch_int8" - run_info = benchmark_torch_model( - torch_quantized_model, - batch_one_dataloader, - q_torch_model_name, - torch_output_path, - skip_bench, - ) - return run_info - - -def torch_ptq_runner( - model: nn.Module, - calibration_dataset: nncf.Dataset, - model_quantization_params: Dict[str, Any], - output_folder: str, - model_name: str, - batch_one_dataloader: DataLoader, - skip_bench: bool, -) -> RunInfo: - """ - Run quantization of the Torch model by TORCH_PTQ backend. - """ - - def transform_fn(data_item): - images, _ = data_item - return images - - calibration_dataset = nncf.Dataset(batch_one_dataloader, transform_fn) - - torch_quantized_model = quantize_torch_ptq(model, calibration_dataset, **model_quantization_params) - # benchmark quantized torch model - torch_output_path = output_folder / "torch_ptq" - torch_output_path.mkdir(parents=True, exist_ok=True) - q_torch_model_name = model_name + "_torch_ptq_int8" - run_info = benchmark_torch_model( - torch_quantized_model, - batch_one_dataloader, - q_torch_model_name, - torch_output_path, - skip_bench, - ) - return run_info - - -def onnx_runner( - model: nn.Module, - calibration_dataset: nncf.Dataset, - model_quantization_params: Dict[str, Any], - output_folder: str, - model_name: str, - batch_one_dataloader: DataLoader, - skip_bench: bool, -): - """ - Run quantization of the ONNX model by ONNX backend. - """ - onnx_model_path = output_folder / (model_name + ".onnx") - onnx_model = onnx.load(onnx_model_path) - onnx_input_name = onnx_model.graph.input[0].name - - def onnx_transform_fn(data_item): - images, _ = data_item - return {onnx_input_name: images.numpy()} - - onnx_calibration_dataset = nncf.Dataset(batch_one_dataloader, onnx_transform_fn) - - onnx_quantized_model = nncf.quantize(onnx_model, onnx_calibration_dataset, **model_quantization_params) - - onnx_output_path = output_folder / "onnx" - onnx_output_path.mkdir(parents=True, exist_ok=True) - q_onnx_model_name = model_name + "_onnx_int8" - run_info = benchmark_onnx_model( - onnx_quantized_model, - batch_one_dataloader, - q_onnx_model_name, - onnx_output_path, - skip_bench, - ) - return run_info +def read_reference_data(): + path_reference = Path(__file__).parent / "reference_data.yaml" + with path_reference.open() as f: + data = yaml.safe_load(f) + return data -def ov_native_runner( - model: nn.Module, - calibration_dataset: nncf.Dataset, - model_quantization_params: Dict[str, Any], - output_folder: str, - model_name: str, - batch_one_dataloader: DataLoader, - skip_bench: bool, -): - """ - Run quantization of the OpenVINO model by OV_NATIVE backend. - """ - ov_native_model_path = output_folder / (model_name + ".xml") - core = ov.Core() - ov_native_model = core.read_model(ov_native_model_path) +REFERENCE_DATA = read_reference_data() - input_names = set(inp.get_any_name() for inp in ov_native_model.inputs) - if len(ov_native_model.inputs) != 1: - RuntimeError("Number of inputs != 1") - def ov_native_transform_fn(data_item): - images, _ = data_item - return {next(iter(input_names)): images.numpy()} +@pytest.mark.parametrize("test_case_name", TEST_CASES.keys()) +def test_ptq_quantization(test_case_name, data, output, result): + pipeline = None + err_msg = None + test_model_param = None + start_time = time.perf_counter() - ov_native_calibration_dataset = nncf.Dataset(batch_one_dataloader, ov_native_transform_fn) - - ov_native_quantized_model = quantize_ov_native( - ov_native_model, ov_native_calibration_dataset, **model_quantization_params - ) - - ov_native_output_path = output_folder / "openvino_native" - ov_native_output_path.mkdir(parents=True, exist_ok=True) - q_ov_native_model_name = model_name + "_openvino_native_int8" - run_info = benchmark_ov_model( - ov_native_quantized_model, - batch_one_dataloader, - q_ov_native_model_name, - ov_native_output_path, - skip_bench, - ) - return run_info - - -def ov_runner( - model: nn.Module, - calibration_dataset: nncf.Dataset, - model_quantization_params: Dict[str, Any], - output_folder: str, - model_name: str, - batch_one_dataloader: DataLoader, - skip_bench: bool, -) -> RunInfo: - """ - Run quantization of the OpenVINO model by OV backend. - """ - - def ov_transform_fn(data_item): - images, _ = data_item - return images.numpy() - - ov_calibration_dataset = nncf.Dataset(batch_one_dataloader, ov_transform_fn) - ov_model_path = output_folder / (model_name + ".xml") - core = ov.Core() - ov_model = core.read_model(ov_model_path) - ov_quantized_model = nncf.quantize(ov_model, ov_calibration_dataset, **model_quantization_params) - - ov_output_path = output_folder / "openvino" - ov_output_path.mkdir(parents=True, exist_ok=True) - q_ov_model_name = model_name + "_openvino_int8" - run_info = benchmark_ov_model( - ov_quantized_model, - batch_one_dataloader, - q_ov_model_name, - ov_output_path, - skip_bench, - ) - return run_info - - -RUNNERS = { - PipelineType.TORCH: torch_runner, - PipelineType.TORCH_PTQ: torch_ptq_runner, - PipelineType.ONNX: onnx_runner, - PipelineType.OV_NATIVE: ov_native_runner, - PipelineType.OV: ov_runner, -} - - -def run_ptq_timm( - data: str, - output: str, - timm_model_name: str, - backends: List[PipelineType], - model_quantization_params: Dict[str, Any], - process_connection: Connection, - report_model_name: str, - eval_fp32: bool, - skip_bench: bool, -) -> None: # pylint: disable=W0703 - """ - Run test for the target model on selected backends. - - :param data: Path to dataset folder. - :param output: Output directory to save tested models. - :param timm_model_name: Name of model from timm module. - :param backends: List of backends. - :param model_quantization_params: Quantization parameters. - :param process_connection: Connection to send results to main process. - :param report_model_name: Reported name of the model. - :param eval_fp32: Boolean flag to validate fp32. - :param skip_bench: Boolean flag to skip or run benchmark. - """ - torch.multiprocessing.set_sharing_strategy("file_system") # W/A to avoid RuntimeError - - runinfos = {} try: - output_folder = Path(output) - output_folder.mkdir(parents=True, exist_ok=True) - - model = create_timm_model(timm_model_name) - model.eval().cpu() - transform = get_model_transform(model) - - model_name = report_model_name - - batch_one_dataloader = get_torch_dataloader(data, transform, batch_size=1) - # benchmark original models (once) - runinfos[PipelineType.FP32] = benchmark_torch_model( - model, - batch_one_dataloader, - model_name, - output_folder, - skip_bench, - eval_fp32, - ) - # Get cached accuracy - if not eval_fp32: - runinfos[PipelineType.FP32].top_1 = get_cached_metric(report_model_name, "FP32 top 1") - - val_dataloader = get_torch_dataloader(data, transform, batch_size=128) - - def transform_fn(data_item): - images, _ = data_item - return images - - calibration_dataset = nncf.Dataset(val_dataloader, transform_fn) - - for backend in backends: - runner = RUNNERS[backend] - try: - runinfo = runner( - model, - calibration_dataset, - model_quantization_params, - output_folder, - model_name, - batch_one_dataloader, - skip_bench, - ) - except Exception as error: - backend_dir = backend.value.replace(" ", "_") - traceback_path = Path.joinpath(output_folder, backend_dir, model_name + "_error_log.txt") - create_error_log(traceback_path) - status = get_error_msg(traceback_path, backend_dir) - runinfo = RunInfo(None, None, status) - runinfos[backend] = runinfo - - process_connection.send(runinfos) - except Exception as error: - traceback_path = Path.joinpath(output_folder, model_name + "_error_log.txt") - create_error_log(traceback_path) - status = f"{model_name} traceback: {traceback_path}" - runinfos[PipelineType.FP32] = RunInfo(None, None, status) - process_connection.send(runinfos) - raise error - - -def create_error_log(traceback_path: PosixPath) -> None: - """ - Create file with error log. - """ - traceback_path.parents[0].mkdir(parents=True, exist_ok=True) - with open(traceback_path, "w") as file: - traceback.print_exc(file=file) - file.close() - logging.error(traceback.format_exc()) - - -def get_error_msg(traceback_path: PosixPath, backend_name: str) -> str: - """ - Generate error message. - """ - return f"{backend_name} traceback: {traceback_path}" - - -@pytest.mark.parametrize("report_model_name,", VALIDATION_SCOPE.keys()) -def test_ptq_timm(data, output, result, report_model_name, backends_list, eval_fp32, skip_bench): - """ - Test quantization of classification models from timm module by different backends. - """ - model_args = VALIDATION_SCOPE[report_model_name] - backends = [PipelineType[backend] for backend in backends_list.split(",")] - model_name = model_args["model_name"] - quantization_params = model_args["quantization_params"] - main_connection, process_connection = Pipe() - process = Process( - target=run_ptq_timm, - args=( - data, - output, - model_name, - backends, - quantization_params, - process_connection, - report_model_name, - eval_fp32, - skip_bench, - ), - ) - process.start() - process.join() - result[report_model_name] = main_connection.recv() - if process.exitcode: - assert False + if test_case_name not in REFERENCE_DATA: + raise RuntimeError(f"{test_case_name} does not exist in 'reference_data.yaml'") + + test_model_param = TEST_CASES[test_case_name] + pipeline_cls = test_model_param["pipeline_cls"] + + print("\n") + print(f"Model: {test_model_param['reported_name']}") + print(f"Backend: {test_model_param['backend']}") + print(f"PTQ params: {test_model_param['ptq_params']}") + + pipeline_kwargs = { + "reported_name": test_model_param["reported_name"], + "model_id": test_model_param["model_id"], + "backend": test_model_param["backend"], + "ptq_params": test_model_param["ptq_params"], + "params": test_model_param.get("params"), + "output_dir": output, + "data_dir": data, + "reference_data": REFERENCE_DATA[test_case_name], + } + + pipeline = pipeline_cls(**pipeline_kwargs) + pipeline.run() + # pylint:disable=broad-except + except Exception as e: + err_msg = str(e) + traceback.print_exc() + + if pipeline is not None: + run_info = pipeline.get_run_info() + if err_msg: + run_info.status = f"{run_info.status} | {err_msg}" if run_info.status else err_msg + else: + if test_model_param is not None: + run_info = RunInfo( + model=test_model_param["reported_name"], + backend=test_model_param["backend"], + status=err_msg, + ) + else: + splitted = test_case_name.split("_backend_") + run_info = RunInfo( + model=splitted[0], + backend=BackendType[splitted[1]], + status=err_msg, + ) + + run_info.time_total = time.perf_counter() - start_time + result[test_case_name] = run_info.get_result_dict() + + if err_msg: + pytest.fail(err_msg) diff --git a/tests/post_training/test_templates/helpers.py b/tests/post_training/test_templates/helpers.py new file mode 100644 index 00000000000..8f60f61ee9f --- /dev/null +++ b/tests/post_training/test_templates/helpers.py @@ -0,0 +1,311 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Callable, Tuple, TypeVar + +import numpy as np +import torch +from torch import nn +from torch.nn import functional as F + +from nncf import Dataset +from tests.torch.helpers import create_bn +from tests.torch.helpers import create_conv +from tests.torch.helpers import set_torch_seed + +TTensor = TypeVar("TTensor") + + +class StaticDatasetMock: + """ + Common dataset that generate same data and can used for any backend by set fn_to_type function + to convert data to backend specific type. + """ + + def __init__(self, input_size: Tuple, fn_to_type: Callable = None): + super().__init__() + self._len = 1 + self._input_size = input_size + self._fn_to_type = fn_to_type + + def __getitem__(self, _) -> Tuple[TTensor, int]: + np.random.seed(0) + data = np.random.rand(*tuple(self._input_size)).astype(np.float32) + if self._fn_to_type: + data = self._fn_to_type(data) + return data, 0 + + def __len__(self) -> int: + return self._len + + +def get_static_dataset( + input_size: Tuple, + transform_fn: Callable, + fn_to_type: Callable, +) -> Dataset: + """ + Create nncf.Dataset for StaticDatasetMock. + :param input_size: Size of generated tensors, + :param transform_fn: Function to transformation dataset. + :param fn_to_type: Function, defaults to None. + :return: Instance of nncf.Dataset for StaticDatasetMock. + """ + return Dataset(StaticDatasetMock(input_size, fn_to_type), transform_fn) + + +class ConvTestModel(nn.Module): + INPUT_SIZE = [1, 1, 4, 4] + + def __init__(self): + super().__init__() + self.conv = create_conv(1, 2, 2, -1, -2) + self.conv.weight.data = torch.Tensor([[[[0.1, -2.0], [1.0, 0.1]]], [[[0.1, 2.0], [-1.0, 0.1]]]]) + self.conv.bias.data = torch.Tensor([0.1, 1.0]) + + def forward(self, x): + return self.conv(x) + + +class ConvBNTestModel(nn.Module): + INPUT_SIZE = [1, 1, 4, 4] + + def __init__(self): + super().__init__() + self.conv = create_conv(1, 2, 2, bias=False) + self.conv.weight.data = torch.Tensor([[[[0.1, -2.0], [1.0, 0.1]]], [[[0.1, 2.0], [-1.0, 0.1]]]]) + self.bn = create_bn(2) + self.bn.bias.data = torch.Tensor([0.1, 1.0]) + self.bn.weight.data = torch.Tensor([0.2, 2.0]) + + def forward(self, x): + x = self.conv(x) + x = self.bn(x) + return x + + +class FCTestModel(nn.Module): + INPUT_SIZE = [1, 1, 4, 4] + + def __init__(self): + super().__init__() + self.fc = nn.Linear(4, 2) + self.fc.weight.data = torch.Tensor([[0.1, 0.2, 0.3, 0.2], [0.3, -0.1, 0.2, 0.4]]) + self.fc.bias.data = torch.Tensor([1.0, 1.1]) + + def forward(self, x): + x = self.fc(x) + return x + + +class MultipleConvTestModel(nn.Module): + INPUT_SIZE = [1, 1, 4, 4] + + def __init__(self): + super().__init__() + with set_torch_seed(): + self.conv_1 = self._build_conv(1, 2, 2) + self.conv_2 = self._build_conv(2, 3, 2) + self.conv_3 = self._build_conv(1, 2, 3) + self.conv_4 = self._build_conv(2, 3, 1) + self.conv_5 = self._build_conv(3, 2, 2) + + def _build_conv(self, in_channels=1, out_channels=2, kernel_size=2): + conv = create_conv(in_channels, out_channels, kernel_size) + conv.weight.data = torch.randn([out_channels, in_channels, kernel_size, kernel_size]) + conv.bias.data = torch.randn([out_channels]) + return conv + + def forward(self, x): + x_1 = self.conv_1(x) + x_1 = self.conv_2(F.relu(x_1)) + x_2 = self.conv_3(x) + x_2 = self.conv_4(F.relu(x_2)) + x_1_2 = torch.concat([x_1, x_2]) + return self.conv_5(F.relu(x_1_2)) + + +class LinearMultiShapeModel(nn.Module): + INPUT_SIZE = [1, 3, 4, 2] + + def __init__(self) -> None: + super().__init__() + with set_torch_seed(): + self.matmul_1_data = torch.randn((4, 4), dtype=torch.float32) + self.matmul_2_data = torch.randn((4, 4), dtype=torch.float32) + self.matmul_3_data = torch.randn((1, 8, 2), dtype=torch.float32) + self.matmul_4_data = torch.randn((1, 8, 3), dtype=torch.float32) + self.matmul_5_data = torch.randn((1), dtype=torch.float32) + self.matmul_6_data = torch.randn((8), dtype=torch.float32) + + self.linear_1 = nn.Linear(2, 8) + self.linear_1.weight.data = torch.randn((8, 2), dtype=torch.float32) + self.linear_1.bias.data = torch.randn((1, 8), dtype=torch.float32) + + self.linear_2 = nn.Linear(2, 8) + self.linear_2.weight.data = torch.randn((8, 2), dtype=torch.float32) + self.linear_2.bias.data = torch.randn((1, 8), dtype=torch.float32) + + self.matmul_7_data = torch.randn((6, 6), dtype=torch.float32) + self.matmul_8_data = torch.randn((10, 6), dtype=torch.float32) + + def forward(self, x): + x = torch.reshape(x, (1, 3, 2, 4)) + + x_1 = torch.matmul(x, self.matmul_1_data) + x_2 = torch.matmul(x, self.matmul_2_data) + + x = torch.add(x_1, x_2) + x_1 = torch.reshape(x, (1, 3, 8)) + + x_1_1 = torch.matmul(x_1, self.matmul_3_data) + x_1_1 = torch.reshape(x_1_1, (1, 6)) + x_1_1 = torch.matmul(self.matmul_5_data, x_1_1) + + x_1_2 = torch.matmul(self.matmul_4_data, x_1) + x_1_2 = torch.max(x_1_2, 1).values + x_1_2 = torch.matmul(x_1_2, self.matmul_6_data) + + x_2, x_3 = torch.split(x, 2, 3) + x_2 = self.linear_1(x_2) + x_2 = torch.min(x_2, -1).values + x_2 = torch.flatten(x_2) + x_2 = torch.matmul(x_2, self.matmul_7_data) + x_3 = self.linear_2(x_3) + x_3 = torch.mean(x_3, -1) + x_3 = torch.flatten(x_3) + x_3 = torch.matmul(self.matmul_8_data, x_3) + return x_1_1, x_1_2, x_2, x_3 + + +class NonZeroLinearModel(nn.Module): + INPUT_SIZE = [10] + + def forward(self, x): + zeros = (x > torch.inf).float() + empty = torch.nonzero(zeros).reshape((-1, 1, 1)).float() + y = torch.matmul(empty, torch.ones((1, 5))) + y += 5 + y = torch.cat((torch.ones((1, 10)), y.reshape(1, -1)), dim=1) + y = torch.matmul(y, torch.ones(10, 10)) + y += 5 + return y + + +class SplittedModel(nn.Module): + INPUT_SIZE = [1, 3, 28, 28] + + def __init__(self) -> None: + super().__init__() + with set_torch_seed(): + self.conv_1 = self._build_conv(3, 12, 3) + self.add_1_data = torch.randn((1, 12, 26, 26), dtype=torch.float32) + self.maxpool_1 = torch.nn.MaxPool2d(1) + + self.conv_2 = self._build_conv(12, 18, 1) + self.conv_3 = self._build_conv(18, 12, 1) + + self.conv_4 = self._build_conv(6, 12, 1) + self.conv_5 = self._build_conv(12, 18, 3) + self.add_2_data = torch.randn((1, 18, 24, 24), dtype=torch.float32) + self.conv_6 = self._build_conv(6, 18, 3) + + self.conv_7 = self._build_conv(36, 48, 1) + self.add_3_data = torch.randn((1, 36, 24, 24), dtype=torch.float32) + self.conv_8 = self._build_conv(48, 24, 3) + self.conv_9 = self._build_conv(36, 24, 3) + self.conv_10 = self._build_conv(36, 24, 3) + + self.conv_11 = self._build_conv(72, 48, 5) + self.matmul_1_data = torch.randn((96, 48), dtype=torch.float32) + self.add_4_data = torch.randn((1, 1, 324), dtype=torch.float32) + + self.linear = nn.Linear(324, 48) + self.linear.weight.data = torch.randn((48, 324), dtype=torch.float32) + self.linear.bias.data = torch.randn((1, 48), dtype=torch.float32) + + self.add_5_data = torch.randn((1, 1, 324), dtype=torch.float32) + self.conv_12 = self._build_conv(96, 18, 3) + + self.linear_2 = nn.Linear(48, 10) + self.linear_2.weight.data = torch.randn((10, 48), dtype=torch.float32) + self.linear_2.bias.data = torch.randn((1, 10), dtype=torch.float32) + + def _build_conv(self, in_channels=1, out_channels=2, kernel_size=2): + conv = create_conv(in_channels, out_channels, kernel_size) + conv.weight.data = torch.randn([out_channels, in_channels, kernel_size, kernel_size]) + conv.bias.data = torch.randn([out_channels]) + return conv + + def forward(self, x): + x = self.conv_1(x) + x = F.relu(x) + x = torch.add(x, self.add_1_data) + x = self.maxpool_1(x) + + x_1 = self.conv_2(x) + x_1 = F.relu(x_1) + x_1 = self.conv_3(x_1) + x = torch.add(x, x_1) + + x_1, x_2 = torch.split(x, 6, 1) + x_1 = self.conv_4(x_1) + x_1 = F.relu(x_1) + x_1 = self.conv_5(x_1) + x_1 = torch.add(x_1, self.add_2_data) + + x_2 = self.conv_6(x_2) + + x = torch.concat([x_1, x_2], 1) + x_1 = self.conv_7(x) + x_1 = self.conv_8(x_1) + + x_2 = torch.add(x, self.add_3_data) + x_2 = self.conv_9(x_2) + + x_3 = self.conv_10(x) + + x = torch.concat([x_1, x_2, x_3], 1) + x = self.conv_11(x) + x = torch.reshape(x, [1, 48, 324]) + x = torch.matmul(self.matmul_1_data, x) + x = torch.add(x, self.add_4_data) + + x_1 = self.linear(x) + x_2 = torch.reshape(x, [1, 96, 18, 18]) + x_2 = self.conv_12(x_2) + x_2 = torch.reshape(x_2, [1, 96, 48]) + + x = torch.add(x_1, x_2) + x = self.linear_2(x) + + return torch.flatten(x, 1, 2) + + +class EmbeddingModel(nn.Module): + INPUT_SIZE = [1, 10] + EMBEDDING_SHAPE = [10, 20] + MATMUL_W_SHAPE = [5, 20] + + def __init__(self) -> None: + super().__init__() + with set_torch_seed(): + self.embedding = nn.Embedding(self.EMBEDDING_SHAPE[0], self.EMBEDDING_SHAPE[1]) + self.embedding.weight.data = torch.randn(self.EMBEDDING_SHAPE, dtype=torch.float32) + self.matmul = nn.Linear(self.EMBEDDING_SHAPE[1], self.MATMUL_W_SHAPE[1]) + self.matmul.weight.data = torch.randn(self.MATMUL_W_SHAPE, dtype=torch.float32) + self.matmul.bias.data = torch.randn([1, self.MATMUL_W_SHAPE[0]], dtype=torch.float32) + + def forward(self, x): + x = x.type(torch.int32) + x = self.embedding(x) + x = self.matmul(x) + return x diff --git a/tests/post_training/test_templates/models.py b/tests/post_training/test_templates/models.py new file mode 100644 index 00000000000..546a4104318 --- /dev/null +++ b/tests/post_training/test_templates/models.py @@ -0,0 +1,220 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from nncf.common.graph import NNCFGraph +from nncf.common.graph.operator_metatypes import InputNoopMetatype +from nncf.common.graph.operator_metatypes import OutputNoopMetatype +from tests.common.quantization.metatypes import ConstantTestMetatype +from tests.common.quantization.mock_graphs import NodeWithType +from tests.common.quantization.test_filter_constant_nodes import create_mock_graph +from tests.common.quantization.test_filter_constant_nodes import get_nncf_graph_from_mock_nx_graph + + +# pylint: disable=protected-access +class NNCFGraphToTest: + def __init__( + self, + conv_metatype, + conv_layer_attrs=None, + nncf_graph_cls=NNCFGraph, + input_layer_attrs=None, + output_layer_attrs=None, + ): + # Original graph + # Input_1 + # | + # Conv_1 + # | + # Output_1 + nodes = [ + NodeWithType("Input_1", InputNoopMetatype, layer_attributes=input_layer_attrs), + NodeWithType("Conv_1", conv_metatype, layer_attributes=conv_layer_attrs), + NodeWithType("Output_1", OutputNoopMetatype, layer_attributes=output_layer_attrs), + ] + node_edges = [("Input_1", "Conv_1"), ("Conv_1", "Output_1")] + original_mock_graph = create_mock_graph(nodes, node_edges) + self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph, nncf_graph_cls) + + +class NNCFGraphToTestDepthwiseConv: + def __init__( + self, + depthwise_conv_metatype, + conv_layer_attrs=None, + input_layer_attrs=None, + output_layer_attrs=None, + ): + # Original graph + # Input_1 + # | + # DepthwiseConv_1 + # | + # Output_1 + nodes = [ + NodeWithType("Input_1", InputNoopMetatype, layer_attributes=input_layer_attrs), + NodeWithType("Conv_1", depthwise_conv_metatype, layer_attributes=conv_layer_attrs), + NodeWithType("Output_1", OutputNoopMetatype, layer_attributes=output_layer_attrs), + ] + node_edges = [("Input_1", "Conv_1"), ("Conv_1", "Output_1")] + original_mock_graph = create_mock_graph(nodes, node_edges) + self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph) + + +class NNCFGraphToTestSumAggregation: + def __init__( + self, + conv_metatype, + sum_metatype, + conv_layer_attrs=None, + nncf_graph_cls=NNCFGraph, + sum_layer_attrs=None, + input_layer_attrs=None, + output_layer_attrs=None, + ): + # Original graph + # Input_1 + # | + # Conv_1 + # | + # Sum_1 + # | + # Output_1 + nodes = [ + NodeWithType("Input_1", InputNoopMetatype, layer_attributes=input_layer_attrs), + NodeWithType("Conv_1", conv_metatype, layer_attributes=conv_layer_attrs), + NodeWithType("Sum_1", sum_metatype, layer_attributes=sum_layer_attrs), + NodeWithType("Output_1", OutputNoopMetatype, layer_attributes=output_layer_attrs), + ] + node_edges = [("Input_1", "Conv_1"), ("Conv_1", "Sum_1"), ("Sum_1", "Output_1")] + original_mock_graph = create_mock_graph(nodes, node_edges) + self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph, nncf_graph_cls) + # Hack output size of the Sum_1 operation + self.nncf_graph._nx_graph.out_edges[("2 /Sum_1_0", "3 /Output_1_0")][ + self.nncf_graph.ACTIVATION_SHAPE_EDGE_ATTR + ] = [1, 1, 1] + + +class NNCFGraphToTestMatMul: + def __init__( + self, + matmul_metatype, + matmul_layer_attrs=None, + nncf_graph_cls=NNCFGraph, + input_layer_attrs=None, + output_layer_attrs=None, + ): + # Original graphs + # Input_1 + # | + # MatMul_1 + # | + # Output_1 + nodes = [ + NodeWithType("Input_1", InputNoopMetatype, layer_attributes=input_layer_attrs), + NodeWithType("MatMul_1", matmul_metatype, layer_attributes=matmul_layer_attrs), + NodeWithType("Output_1", OutputNoopMetatype, layer_attributes=output_layer_attrs), + ] + node_edges = [("Input_1", "MatMul_1"), ("MatMul_1", "Output_1")] + original_mock_graph = create_mock_graph(nodes, node_edges) + self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph, nncf_graph_cls) + + +class NNCFGraphCA: + def __init__( + self, + conv_metatype, + conv_layer_attrs=None, + conv_2_layer_attrs=None, + use_one_layer_attrs=True, + nncf_graph_cls=NNCFGraph, + ): + # Original graph + # Input_1 + # | + # Conv_1 + # | + # Conv_2 + # | + # Output_1 + if use_one_layer_attrs and not conv_layer_attrs is None and conv_2_layer_attrs is None: + conv_2_layer_attrs = conv_layer_attrs + nodes = [ + NodeWithType("Input_1", InputNoopMetatype), + NodeWithType("Conv_1_W", ConstantTestMetatype), + NodeWithType("Conv_1", conv_metatype, layer_attributes=conv_layer_attrs), + NodeWithType("Conv_2_W", ConstantTestMetatype), + NodeWithType("Conv_2", conv_metatype, layer_attributes=conv_2_layer_attrs), + NodeWithType("Output_1", OutputNoopMetatype), + ] + node_edges = [ + ("Input_1", "Conv_1"), + ("Conv_1", "Conv_2"), + ("Conv_2", "Output_1"), + ("Conv_1_W", "Conv_1"), + ("Conv_2_W", "Conv_2"), + ] + original_mock_graph = create_mock_graph(nodes, node_edges) + self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph, nncf_graph_cls) + + +class NNCFGraphCAWithBias: + def __init__( + self, + conv_metatype, + add_metatype, + conv_layer_attrs=None, + both_biases=True, + add_layer_attrs=None, + constant_metatype=ConstantTestMetatype, + nncf_graph_cls=NNCFGraph, + ): + # Original graph + # Input_1 + # | + # Conv_1 + # | + # Add_1 + # | + # Conv_2 + # | + # Add_2 + # Output_1 + nodes = [ + NodeWithType("Input_1", InputNoopMetatype), + NodeWithType("Conv_1_W", constant_metatype), + NodeWithType("Conv_1", conv_metatype, layer_attributes=conv_layer_attrs), + NodeWithType("Add_1_W", constant_metatype), + NodeWithType("Add_1", add_metatype, layer_attributes=add_layer_attrs), + NodeWithType("Conv_2_W", constant_metatype), + NodeWithType("Conv_2", conv_metatype, layer_attributes=conv_layer_attrs), + NodeWithType("Output_1", OutputNoopMetatype), + ] + if both_biases: + nodes.extend( + [ + NodeWithType("Add_2_W", constant_metatype), + NodeWithType("Add_2", add_metatype, layer_attributes=add_layer_attrs), + ] + ) + node_edges = [ + ("Input_1", "Conv_1"), + ("Conv_1", "Add_1"), + ("Add_1", "Conv_2"), + ("Conv_1_W", "Conv_1"), + ("Add_1_W", "Add_1"), + ("Conv_2_W", "Conv_2"), + ] + if both_biases: + node_edges.extend([("Conv_2", "Add_2"), ("Add_2", "Output_1"), ("Add_2_W", "Add_2")]) + else: + node_edges.extend([("Conv_2", "Output_1")]) + original_mock_graph = create_mock_graph(nodes, node_edges) + self.nncf_graph = get_nncf_graph_from_mock_nx_graph(original_mock_graph, nncf_graph_cls) diff --git a/tests/post_training/test_templates/test_bias_correction.py b/tests/post_training/test_templates/test_bias_correction.py new file mode 100644 index 00000000000..68c72301707 --- /dev/null +++ b/tests/post_training/test_templates/test_bias_correction.py @@ -0,0 +1,181 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import abstractmethod +from typing import Dict, List, Tuple, TypeVar + +import pytest + +from nncf.common.factory import NNCFGraphFactory +from nncf.data import Dataset +from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters +from nncf.quantization.advanced_parameters import OverflowFix +from nncf.quantization.algorithms.bias_correction.algorithm import BiasCorrection +from nncf.quantization.algorithms.bias_correction.backend import BiasCorrectionAlgoBackend +from nncf.quantization.algorithms.post_training.algorithm import PostTrainingQuantization +from tests.post_training.test_templates.helpers import ConvTestModel +from tests.post_training.test_templates.helpers import MultipleConvTestModel +from tests.post_training.test_templates.helpers import SplittedModel +from tests.post_training.test_templates.helpers import StaticDatasetMock + +TModel = TypeVar("TModel") +TTensor = TypeVar("TTensor") + + +# pylint: disable=protected-access +class TemplateTestBCAlgorithm: + @staticmethod + @abstractmethod + def list_to_backend_type(data: List) -> TTensor: + """ + Convert list to backend specific type + + :param data: List of data. + + :return: Converted data. + """ + + @staticmethod + @abstractmethod + def get_backend() -> BiasCorrectionAlgoBackend: + """ + Get backend specific BiasCorrectionAlgoBackend + + :return BiasCorrectionAlgoBackend: Backend specific BiasCorrectionAlgoBackend + """ + + @staticmethod + def fn_to_type(tensor) -> TTensor: + return tensor + + @staticmethod + @abstractmethod + def get_transform_fn() -> callable: + """ + Get transformation function for dataset. + """ + + def get_dataset(self, input_size: Tuple) -> StaticDatasetMock: + """ + Return backend specific random dataset. + + :param model: The model for which the dataset is being created. + """ + return StaticDatasetMock(input_size, self.fn_to_type) + + @staticmethod + @abstractmethod + def backend_specific_model(model: TModel, tmp_dir: str) -> TModel: + """ + Return backend specific model. + """ + + @staticmethod + @abstractmethod + def check_bias(model: TModel, ref_biases: Dict) -> None: + """ + Checks biases values. + """ + + @staticmethod + def map_references(ref_biases: Dict) -> Dict[str, List]: + """ + Returns backend-specific reference. + """ + return ref_biases + + @staticmethod + def get_quantization_algorithm(disable_bias_correction=False) -> PostTrainingQuantization: + return PostTrainingQuantization( + subset_size=1, + fast_bias_correction=False, + advanced_parameters=AdvancedQuantizationParameters( + overflow_fix=OverflowFix.DISABLE, disable_bias_correction=disable_bias_correction + ), + ) + + @staticmethod + def get_bias_correction_algorithm() -> BiasCorrection: + return BiasCorrection(subset_size=1) + + @staticmethod + @abstractmethod + def remove_fq_from_inputs(model: TModel) -> TModel: + """ + Removes quantizer nodes from inputs. + """ + + @staticmethod + @abstractmethod + def get_ref_path(suffix: str) -> str: + """ + Returns backend-specific reference graph paths. + """ + + @staticmethod + @abstractmethod + def compare_nncf_graphs(model: TModel, ref_path: str) -> None: + """ + Compares backend-specific model with reference graph. + """ + + @pytest.fixture() + def quantized_test_model(self, tmpdir) -> TModel: + model_cls = SplittedModel + model = self.backend_specific_model(model_cls(), tmpdir) + dataset = Dataset(self.get_dataset(model_cls.INPUT_SIZE), self.get_transform_fn()) + + quantization_algorithm = self.get_quantization_algorithm(disable_bias_correction=True) + graph = NNCFGraphFactory.create(model) + quantized_model = quantization_algorithm.apply(model, graph, dataset=dataset) + modified_model = self.remove_fq_from_inputs(quantized_model) + return modified_model + + @pytest.mark.parametrize( + "model_cls, ref_biases", + ( + ( + MultipleConvTestModel, + { + "/conv_1/Conv": [0.6658976, -0.70563036], + "/conv_2/Conv": [-0.307696, -0.42806846, 0.44965455], + "/conv_3/Conv": [-0.0033792169, 1.0661412], + "/conv_4/Conv": [-0.6941606, 0.9958957, 0.6081058], + # Disabled latest layer due to backends differences + # "/conv_5/Conv": [0.07476559, -0.75797373], + }, + ), + (ConvTestModel, {"/conv/Conv": [0.11085186, 1.0017344]}), + ), + ) + def test_update_bias(self, model_cls, ref_biases, tmpdir): + model = self.backend_specific_model(model_cls(), tmpdir) + dataset = Dataset(self.get_dataset(model_cls.INPUT_SIZE), self.get_transform_fn()) + + quantization_algorithm = self.get_quantization_algorithm() + graph = NNCFGraphFactory.create(model) + quantized_model = quantization_algorithm.apply(model, graph, dataset=dataset) + + mapped_ref_biases = self.map_references(ref_biases) + self.check_bias(quantized_model, mapped_ref_biases) + + def test__get_subgraph_data_for_node(self, quantized_test_model, layer_name, ref_data): + nncf_graph = NNCFGraphFactory.create(quantized_test_model) + + bc_algo = self.get_bias_correction_algorithm() + bc_algo._set_backend_entity(quantized_test_model) + + node = nncf_graph.get_node_by_name(layer_name) + bc_algo._collected_stat_inputs_map.update(ref_data["collected_inputs"]) + subgraph_data = bc_algo._get_subgraph_data_for_node(node, nncf_graph) + ref_subgraph_data = ref_data["subgraph_data"] + + assert subgraph_data == ref_subgraph_data diff --git a/tests/post_training/test_calculate_quantizer_parameters.py b/tests/post_training/test_templates/test_calculate_quantizer_parameters.py similarity index 100% rename from tests/post_training/test_calculate_quantizer_parameters.py rename to tests/post_training/test_templates/test_calculate_quantizer_parameters.py diff --git a/tests/post_training/test_templates/test_channel_alignment.py b/tests/post_training/test_templates/test_channel_alignment.py new file mode 100644 index 00000000000..2b063e7d90f --- /dev/null +++ b/tests/post_training/test_templates/test_channel_alignment.py @@ -0,0 +1,502 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import abstractmethod +from typing import Type + +import numpy as np +import pytest + +from nncf.common.graph.graph import NNCFGraph +from nncf.common.graph.layer_attributes import ConvolutionLayerAttributes +from nncf.common.graph.model_transformer import ModelTransformer +from nncf.common.graph.transformations.commands import TargetType +from nncf.common.graph.transformations.commands import TransformationType +from nncf.common.tensor_statistics.statistic_point import StatisticPoint +from nncf.common.tensor_statistics.statistic_point import StatisticPointsContainer +from nncf.common.tensor_statistics.statistics import MinMaxTensorStatistic +from nncf.experimental.common.tensor_statistics.collectors import MedianAggregator +from nncf.experimental.common.tensor_statistics.collectors import QuantileReducer +from nncf.experimental.common.tensor_statistics.collectors import TensorCollector +from nncf.quantization.algorithms.channel_alignment.algorithm import ChannelAlignment +from nncf.quantization.algorithms.channel_alignment.backend import ChannelAlignmentAlgoBackend +from nncf.quantization.algorithms.channel_alignment.backend import LayoutDescriptor +from tests.post_training.test_templates.models import NNCFGraphCA +from tests.post_training.test_templates.models import NNCFGraphCAWithBias + +# pylint: disable=protected-access + +EPS = 1e-3 + +VALID_CONV_LAYER_ATTR = ConvolutionLayerAttributes( + weight_requires_grad=False, + in_channels=5, + out_channels=5, + kernel_size=(5, 5), + stride=(1, 1), + dilations=(1, 1), + groups=1, + transpose=False, + padding_values=(0, 0, 0, 0), +) + + +INVALID_CONSUMER_CONV_LAYER_ATTRS = [ + ConvolutionLayerAttributes( + weight_requires_grad=False, + in_channels=5, + out_channels=5, + kernel_size=(5, 5), + stride=(2, 1), + dilations=(1, 1), + groups=1, + transpose=False, + padding_values=(0, 0, 0, 0), + ), + ConvolutionLayerAttributes( + weight_requires_grad=False, + in_channels=5, + out_channels=5, + kernel_size=(5, 5), + stride=(1, 1), + dilations=(2, 1), + groups=1, + transpose=False, + padding_values=(0, 0, 0, 0), + ), + ConvolutionLayerAttributes( + weight_requires_grad=False, + in_channels=5, + out_channels=5, + kernel_size=(5, 5), + stride=(1, 1), + dilations=(2, 1), + groups=1, + transpose=False, + padding_values=(0, 0, 0, 0), + ), + ConvolutionLayerAttributes( + weight_requires_grad=False, + in_channels=5, + out_channels=5, + kernel_size=(5, 5), + stride=(1, 1), + dilations=(1, 1), + groups=1, + transpose=False, + padding_values=(1, 0, 0, 0), + ), +] + + +INVALID_CONV_LAYER_ATTR = ConvolutionLayerAttributes( + weight_requires_grad=False, + in_channels=5, + out_channels=5, + kernel_size=(5, 5), + stride=(1, 1), + dilations=(1, 1), + groups=5, + transpose=False, + padding_values=(0, 0, 0, 0), +) + + +class TemplateTestChannelAlignment: + @abstractmethod + def get_backend_cls(self) -> Type[ChannelAlignmentAlgoBackend]: + pass + + @abstractmethod + def target_point(self, target_type: TargetType, target_node_name: str, port_id: int): + pass + + @abstractmethod + def convert_conv_layer_attrs(self, layer_attributes): + pass + + @abstractmethod + def get_conv_metatype(self): + pass + + @abstractmethod + def get_add_metatype(self): + pass + + @abstractmethod + def get_add_layer_attrs(self): + pass + + @abstractmethod + def get_constant_metatype(self): + pass + + @abstractmethod + def get_transformation_commands(self): + pass + + @abstractmethod + def mock_command_creation_factory(self, mocker) -> None: + pass + + def mock_nncf_graph_factory(self, mocker, nncf_graph: NNCFGraph) -> None: + mocker.patch("nncf.common.factory.NNCFGraphFactory.create", return_value=nncf_graph) + + def mock_model_transformer_factory(self, mocker, model_transformer: ModelTransformer) -> None: + mocker.patch("nncf.common.factory.ModelTransformerFactory.create", return_value=model_transformer) + + @pytest.mark.parametrize( + "conv_out_value,refs", + [ + (np.arange(12).reshape(4, 3), ([-8, -16, -24], [83, 265, 449, 631])), + (np.arange(24).reshape(4, 3, 2, 1), ([-8, -16, -24], [383, 1105, 1829, 2551])), + ], + ) + @pytest.mark.parametrize("transposed", [False, True]) + def test_align_means(self, conv_out_value, refs, transposed): + amean = np.array([10, 20, 30]) + dims_descriptor = LayoutDescriptor(0, 1, 1) + if transposed: + if conv_out_value.ndim == 2: + conv_out_value = np.transpose(conv_out_value, (1, 0)) + dims_descriptor = LayoutDescriptor(1, 0, 1) + else: + conv_out_value = np.transpose(conv_out_value, (3, 1, 2, 0)) + dims_descriptor = LayoutDescriptor(3, 1, 1) + bias_in_value = np.array([2, 4, 6]) + bias_out_value = np.array([3, 5, 9, 11]) + updated_add_in_vals, updated_add_out_vals = ChannelAlignment._align_means( + bias_in_value, bias_out_value, conv_out_value, amean, dims_descriptor + ) + assert np.allclose(updated_add_in_vals, np.array(refs[0])) + assert np.allclose(updated_add_out_vals, np.array(refs[1])) + + REF_UPDATED_CONV_IN = np.array([[0], [1], [200], [0.03], [4]]) + REF_UPDATED_CONV_OUT = np.array([[0.0, 2.0, 0.04, 600, 8], [10, 12, 0.14, 1600, 18]]) + REF_UPDATED_BIAS_IN = np.array([2, 4, 600, 0.08, 10]) + + @pytest.mark.parametrize("bias_in_value", [np.array([2, 4, 6, 8, 10]), None]) + def test_align_scales(self, bias_in_value): + def check_updated_values(updated_conv_in, updated_conv_out, updated_bias_in): + assert updated_conv_in.shape == self.REF_UPDATED_CONV_IN.shape + assert np.allclose(updated_conv_in, self.REF_UPDATED_CONV_IN) + assert updated_conv_out.shape == self.REF_UPDATED_CONV_OUT.shape + assert np.allclose(updated_conv_out, self.REF_UPDATED_CONV_OUT) + if bias_in_value is None: + assert updated_bias_in is None + else: + assert updated_bias_in.shape == self.REF_UPDATED_BIAS_IN.shape + assert np.allclose(updated_bias_in, self.REF_UPDATED_BIAS_IN) + + conv_in_value = np.arange(5).reshape(5, 1) + conv_out_value = np.arange(10).reshape(2, 5) * 2 + ascale = np.array([-5.0, 0.0, 1e-3, 1e3, 2]) + eps = 1e-10 + # Check nothing will happen if dims are wrong + dims_descriptor = LayoutDescriptor(1, 0, 0) + updated_conv_in, updated_conv_out, updated_bias_in = ChannelAlignment._align_scales( + conv_in_value, conv_out_value, bias_in_value, ascale, dims_descriptor, dims_descriptor, eps + ) + assert updated_conv_in is conv_in_value + assert updated_conv_out is conv_out_value + assert updated_bias_in is bias_in_value + + dims_descriptor = LayoutDescriptor(0, 1, 0) + updated_conv_in, updated_conv_out, updated_bias_in = ChannelAlignment._align_scales( + conv_in_value, conv_out_value, bias_in_value, ascale, dims_descriptor, dims_descriptor, eps + ) + check_updated_values(updated_conv_in, updated_conv_out, updated_bias_in) + + # Check group conv producer case + conv_in_value = conv_in_value.reshape(1, 5, 1) + dims_descriptor_in = LayoutDescriptor(1, 2, 0) + dims_descriptor_out = LayoutDescriptor(0, 1, 0) + updated_conv_in, updated_conv_out, updated_bias_in = ChannelAlignment._align_scales( + conv_in_value, conv_out_value, bias_in_value, ascale, dims_descriptor_in, dims_descriptor_out, eps + ) + updated_conv_in = updated_conv_in.reshape(updated_conv_in.shape[1:]) + check_updated_values(updated_conv_in, updated_conv_out, updated_bias_in) + + GET_NODES_TEST_CASES = [] + GET_NODES_TEST_CASES = [(VALID_CONV_LAYER_ATTR, VALID_CONV_LAYER_ATTR, True)] + GET_NODES_TEST_CASES.extend([(attr, VALID_CONV_LAYER_ATTR, True) for attr in INVALID_CONSUMER_CONV_LAYER_ATTRS]) + GET_NODES_TEST_CASES.extend([(VALID_CONV_LAYER_ATTR, attr, False) for attr in INVALID_CONSUMER_CONV_LAYER_ATTRS]) + GET_NODES_TEST_CASES.extend( + [ + (INVALID_CONV_LAYER_ATTR, VALID_CONV_LAYER_ATTR, True), + (VALID_CONV_LAYER_ATTR, INVALID_CONV_LAYER_ATTR, False), + (INVALID_CONV_LAYER_ATTR, INVALID_CONV_LAYER_ATTR, False), + ] + ) + GET_NODES_TEST_CASES.extend( + [(VALID_CONV_LAYER_ATTR, None, False), (None, VALID_CONV_LAYER_ATTR, False), (None, None, False)] + ) + + @pytest.mark.parametrize("first_conv_attrs,second_conv_attrs,ref_match", GET_NODES_TEST_CASES) + def test_get_node_pairs(self, first_conv_attrs, second_conv_attrs, ref_match): + algorithm = ChannelAlignment() + algorithm._backend_entity = self.get_backend_cls() + if not first_conv_attrs is None: + first_conv_attrs = self.convert_conv_layer_attrs(first_conv_attrs) + if not second_conv_attrs is None: + second_conv_attrs = self.convert_conv_layer_attrs(second_conv_attrs) + nncf_graph = NNCFGraphCA( + self.get_conv_metatype(), + conv_layer_attrs=first_conv_attrs, + conv_2_layer_attrs=second_conv_attrs, + use_one_layer_attrs=False, + ) + pairs = algorithm._get_node_pairs(nncf_graph.nncf_graph) + if ref_match: + assert len(pairs) == 1 + conv_in, add_in, conv_out = pairs[0] + assert conv_in.node_name == "/Conv_1_0" + assert add_in is None + assert conv_out.node_name == "/Conv_2_0" + else: + assert len(pairs) == 0 + + def _get_nncf_graph(self, num_biases: int) -> NNCFGraph: + cla = self.convert_conv_layer_attrs(VALID_CONV_LAYER_ATTR) + if num_biases == 0: + return NNCFGraphCA(self.get_conv_metatype(), cla).nncf_graph + bla = self.get_add_layer_attrs() + if num_biases == 1: + return NNCFGraphCAWithBias( + self.get_conv_metatype(), + self.get_add_metatype(), + cla, + both_biases=False, + constant_metatype=self.get_constant_metatype(), + add_layer_attrs=bla, + ).nncf_graph + return NNCFGraphCAWithBias( + self.get_conv_metatype(), + self.get_add_metatype(), + cla, + both_biases=True, + add_layer_attrs=bla, + constant_metatype=self.get_constant_metatype(), + ).nncf_graph + + @pytest.mark.parametrize("empty_statistics", [False, True]) + @pytest.mark.parametrize("num_biases", [0, 1, 2]) + # pylint: disable=too-many-statements + # pylint: disable=too-many-branches + def test_transformation_layout(self, empty_statistics, num_biases, mocker): + mocked_transformer = mocker.MagicMock() + self.mock_model_transformer_factory(mocker, mocked_transformer) + + nncf_graph = self._get_nncf_graph(num_biases) + self.mock_nncf_graph_factory(mocker, nncf_graph) + + self.mock_command_creation_factory(mocker) + + statistic_points = StatisticPointsContainer() + target_node_name = "/Add_1_0" if num_biases else "/Conv_1_0" + target_node = nncf_graph.get_node_by_name(target_node_name) + backend_cls = self.get_backend_cls() + ref_input_port_id, _ = backend_cls.get_activation_port_ids_for_node(target_node) + target_point = self.target_point(TargetType.POST_LAYER_OPERATION, target_node_name, ref_input_port_id) + + class TestTensorStats(MinMaxTensorStatistic): + @staticmethod + def tensor_eq(*args, **kwargs): + return True + + def get_constant_lambda(value, counter=False): + if counter: + _state = 0 + + def f(*args, **kwargs): + if not counter: + return value + nonlocal _state + _state += 1 + return value + str(_state) + + return f + + algorithm = ChannelAlignment() + tensor_collector = TensorCollector() + if empty_statistics: + stat_value = None, None + else: + stat_value = (np.array([-1], dtype=np.int32), np.array([2], dtype=np.int32)) + + tensor_collector.get_statistics = get_constant_lambda(TestTensorStats(*stat_value)) + statistic_points.add_statistic_point(StatisticPoint(target_point, tensor_collector, algorithm._algorithm_key)) + + class MockBackend(backend_cls): + pass + + ref_weights_val = "ref_weights_val" + MockBackend.get_weight_value = get_constant_lambda(ref_weights_val, True) + ref_bias_val = "ref_bias_val" + MockBackend.get_bias_value = get_constant_lambda(ref_bias_val, True) + ref_dims_descr = "ref_dims_descr" + MockBackend.get_dims_descriptor = get_constant_lambda(ref_dims_descr, True) + + algorithm._backend_entity = MockBackend + algorithm._set_backend_entity = mocker.MagicMock() + ref_bias_in_after_align = "ref_bias_in_after_align" + ref_bias_out_after_align = "ref_bias_out_after_align" + algorithm._align_means = mocker.MagicMock(return_value=(ref_bias_in_after_align, ref_bias_out_after_align)) + ref_weights_in_after_scale_align = "ref_weights_in_after_scale_align" + ref_weights_out_after_scale_align = "ref_weights_in_after_scale_align " + ref_bias_in_after_scale_align = "ref_bias_in_after_scale_align" if num_biases > 1 else None + algorithm._align_scales = mocker.MagicMock( + return_value=( + ref_weights_in_after_scale_align, + ref_weights_out_after_scale_align, + ref_bias_in_after_scale_align, + ) + ) + algorithm.apply(None, nncf_graph, statistic_points) + + if empty_statistics: + assert algorithm._align_means.call_count == 0 + assert algorithm._align_scales.call_count == 0 + mocked_transformer.transform.assert_called_once() + arg = mocked_transformer.transform.call_args.args[0] + assert len(arg.transformations) == 0 + return + + align_means_called = 1 if num_biases == 2 else 0 + assert algorithm._align_means.call_count == align_means_called + if align_means_called: + algorithm._align_means.assert_called_once_with( + ref_bias_val + "1", + ref_bias_val + "2", + ref_weights_val + "2", + np.array(0.5, dtype=np.float32), + ref_dims_descr + "2", + ) + + assert algorithm._align_scales.call_count == 1 + args = algorithm._align_scales.call_args.args + assert args[0] == ref_weights_val + "1" + assert args[1] == ref_weights_val + "2" + if num_biases == 2: + assert args[2] == ref_bias_in_after_align + elif num_biases == 1: + assert args[2] == ref_bias_val + "1" + else: + assert args[2] is None + assert ((args[3] - 3) < EPS).all() + assert args[4] == ref_dims_descr + "1" + assert args[5] == ref_dims_descr + "2" + assert args[6] < EPS + + mocked_transformer.transform.assert_called_once() + arg = mocked_transformer.transform.call_args.args[0] + transformations = arg.transformations + + target_names = {"/Conv_1_0": [], "/Conv_2_0": []} + ref_values = { + "/Conv_1_0": { + "weight_value": ref_weights_in_after_scale_align, + "bias_value": ref_bias_in_after_scale_align, + }, + "/Conv_2_0": {"weight_value": ref_weights_out_after_scale_align, "bias_value": ref_bias_out_after_align}, + } + bias_update_cls, weights_update_cls = self.get_transformation_commands() + for transformation in transformations: + assert transformation.type == TransformationType.CHANGE + tp = transformation.target_point + if isinstance(transformation, bias_update_cls): + _class = bias_update_cls + _attr = "bias_value" + elif isinstance(transformation, weights_update_cls): + _class = weights_update_cls + _attr = "weight_value" + else: + raise RuntimeError(f"Wrong type of transformation: {type(transformation)}") + + target_names[tp.target_node_name].append(_class) + assert ref_values[tp.target_node_name][_attr] == getattr(transformation, _attr) + + if num_biases == 2: + ref_len = {"/Conv_1_0": 2, "/Conv_2_0": 2} + elif num_biases == 1: + ref_len = {"/Conv_1_0": 2, "/Conv_2_0": 1} + else: + ref_len = {"/Conv_1_0": 1, "/Conv_2_0": 1} + + for node_name, _transformations in target_names.items(): + _ref_len = ref_len[node_name] + assert len(_transformations) == _ref_len + assert weights_update_cls in _transformations + if _ref_len == 2: + assert bias_update_cls in _transformations + + @pytest.mark.parametrize("num_biases", [0, 1, 2]) + def test_get_statistic_points(self, num_biases, mocker): + nncf_graph = self._get_nncf_graph(num_biases) + self.mock_nncf_graph_factory(mocker, nncf_graph) + + ref_subset_size = "ref_subset_size" + ref_inplace = "ref_inplace" + algorithm = ChannelAlignment(ref_subset_size, ref_inplace) + algorithm._set_backend_entity = mocker.MagicMock() + backend_cls = self.get_backend_cls() + ref_stat_collector = "ref_stat_collector" + + class MockBackend(backend_cls): + pass + + MockBackend.get_statistic_collector = mocker.MagicMock(return_value=ref_stat_collector) + algorithm._backend_entity = MockBackend + + statistic_container = algorithm.get_statistic_points(None, nncf_graph) + + backend_cls = self.get_backend_cls() + target_node_name = "/Add_1_0" if num_biases else "/Conv_1_0" + target_node = nncf_graph.get_node_by_name(target_node_name) + ref_input_port_id, _ = backend_cls.get_activation_port_ids_for_node(target_node) + + assert len(statistic_container) == 1 + assert target_node_name in statistic_container + stat_points = statistic_container[target_node_name] + assert len(stat_points) == 1 + + assert len(stat_points[0].algorithm_to_tensor_collectors.keys()) == 1 + assert algorithm._algorithm_key in stat_points[0].algorithm_to_tensor_collectors + tensor_collectors = stat_points[0].algorithm_to_tensor_collectors[algorithm._algorithm_key] + assert len(tensor_collectors) == 1 + assert tensor_collectors[0] == ref_stat_collector + MockBackend.get_statistic_collector.assert_called_once_with((0, 2, 3), 1e-4, ref_subset_size, ref_inplace) + + target_point = stat_points[0].target_point + assert target_point.target_node_name == target_node_name + assert target_point.port_id == ref_input_port_id + assert target_point.type == TargetType.POST_LAYER_OPERATION + + @pytest.mark.parametrize("inplace_ref", [False, True]) + @pytest.mark.parametrize("q_ref", [1e-4, 0.3]) + def test_statistic_collectors(self, inplace_ref, q_ref): + reduction_shape_ref = (0, 2, 3) + num_samples_ref = 123 + statistic_collector: TensorCollector = self.get_backend_cls().get_statistic_collector( + reduction_shape=reduction_shape_ref, q=q_ref, num_samples=num_samples_ref, inplace=inplace_ref + ) + + assert len(statistic_collector.reducers) == 1 + reducer = statistic_collector.reducers.pop() + assert isinstance(reducer, QuantileReducer) + assert reducer._reduction_shape == reduction_shape_ref + assert np.allclose(reducer._quantile, (q_ref, 1 - q_ref)) + + assert len(statistic_collector.aggregators) == 2 + for aggr in statistic_collector.aggregators.values(): + assert isinstance(aggr, MedianAggregator) + assert aggr.num_samples == num_samples_ref + assert not aggr._use_per_sample_stats diff --git a/tests/post_training/test_templates/test_fast_bias_correction.py b/tests/post_training/test_templates/test_fast_bias_correction.py new file mode 100644 index 00000000000..b972ce851cd --- /dev/null +++ b/tests/post_training/test_templates/test_fast_bias_correction.py @@ -0,0 +1,121 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import abstractmethod +from typing import List, TypeVar + +import pytest + +from nncf.common.factory import NNCFGraphFactory +from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters +from nncf.quantization.advanced_parameters import OverflowFix +from nncf.quantization.algorithms.fast_bias_correction.algorithm import FastBiasCorrection +from nncf.quantization.algorithms.fast_bias_correction.backend import FastBiasCorrectionAlgoBackend +from nncf.quantization.algorithms.post_training.algorithm import PostTrainingQuantization +from tests.post_training.test_templates.helpers import ConvBNTestModel +from tests.post_training.test_templates.helpers import ConvTestModel +from tests.post_training.test_templates.helpers import get_static_dataset + +TModel = TypeVar("TModel") +TTensor = TypeVar("TTensor") + + +class TemplateTestFBCAlgorithm: + @staticmethod + @abstractmethod + def list_to_backend_type(data: List) -> TTensor: + """ + Convert list to backend specific type + + :param data: List of data. + + :return: Converted data. + """ + + @staticmethod + @abstractmethod + def get_backend() -> FastBiasCorrectionAlgoBackend: + """ + Get backend specific FastBiasCorrectionAlgoBackend + + :return FastBiasCorrectionAlgoBackend: Backend specific FastBiasCorrectionAlgoBackend + """ + + @pytest.mark.parametrize( + "bias_value, bias_shift, channel_axis, ref_shape", + ( + ([1, 1], [0.1, 0.1], 1, [2]), + ([[1, 1]], [0.1, 0.1], -1, [1, 2]), + ([[1, 1]], [0.1, 0.1], 1, [1, 2]), + ), + ) + def test_reshape_bias_shift(self, bias_value: list, bias_shift: list, channel_axis: int, ref_shape: list): + """ + Checks the result of the FastBiasCorrection.reshape_bias_shift method for backend specific datatype. + """ + bias_value = self.list_to_backend_type(data=bias_value) + bias_shift = self.list_to_backend_type(data=bias_shift) + + algo = FastBiasCorrection(subset_size=1, inplace_statistics=False) + # pylint: disable=protected-access + algo._backend_entity = self.get_backend() + new_bias_shift = algo.reshape_bias_shift(bias_shift, bias_value, channel_axis) + assert list(new_bias_shift.shape) == ref_shape + + @staticmethod + def fn_to_type(tensor): + return tensor + + @staticmethod + @abstractmethod + def get_transform_fn(): + """ + Get transformation function for dataset. + """ + + @staticmethod + @abstractmethod + def backend_specific_model(model: TModel, tmp_dir: str): + """ + Return backend specific model. + """ + + @staticmethod + @abstractmethod + def check_bias(model: TModel, ref_bias: list): + """ + Return backend specific model. + """ + + @staticmethod + def get_quantization_algorithm(): + return PostTrainingQuantization( + subset_size=1, + fast_bias_correction=True, + advanced_parameters=AdvancedQuantizationParameters(overflow_fix=OverflowFix.DISABLE), + ) + + @pytest.mark.parametrize( + "model_cls, ref_bias", + ( + (ConvTestModel, [0.0288348, 1.0838453]), + (ConvBNTestModel, [0.08396978, 1.1676897]), + ), + ) + def test_update_bias(self, model_cls, ref_bias, tmpdir): + model = self.backend_specific_model(model_cls(), tmpdir) + dataset = get_static_dataset(model_cls.INPUT_SIZE, self.get_transform_fn(), self.fn_to_type) + + quantization_algorithm = self.get_quantization_algorithm() + graph = NNCFGraphFactory.create(model) + quantized_model = quantization_algorithm.apply(model, graph, dataset=dataset) + + self.check_bias(quantized_model, ref_bias) diff --git a/tests/post_training/test_ptq_params.py b/tests/post_training/test_templates/test_ptq_params.py similarity index 73% rename from tests/post_training/test_ptq_params.py rename to tests/post_training/test_templates/test_ptq_params.py index 2923b9cec38..a2ec340c2ff 100644 --- a/tests/post_training/test_ptq_params.py +++ b/tests/post_training/test_templates/test_ptq_params.py @@ -10,6 +10,7 @@ # limitations under the License. from abc import abstractmethod from collections import Counter +from copy import deepcopy from typing import Dict import pytest @@ -22,12 +23,17 @@ from nncf.common.quantization.structs import QuantizationPreset from nncf.common.quantization.structs import QuantizerConfig from nncf.common.quantization.structs import QuantizerGroup +from nncf.common.tensor_statistics.statistic_point import StatisticPoint +from nncf.common.tensor_statistics.statistic_point import StatisticPointsContainer +from nncf.common.tensor_statistics.statistics import MinMaxTensorStatistic from nncf.parameters import ModelType from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters from nncf.quantization.advanced_parameters import OverflowFix from nncf.quantization.algorithms.min_max.algorithm import MinMaxQuantization from nncf.quantization.algorithms.post_training.algorithm import PostTrainingQuantization +from nncf.quantization.passes import transform_to_inference_graph from nncf.quantization.range_estimator import RangeEstimatorParametersSet +from nncf.scopes import IgnoredScope from tests.common.quantization.metatypes import Conv2dTestMetatype from tests.common.quantization.metatypes import IdentityTestMetatype from tests.common.quantization.metatypes import LinearTestMetatype @@ -135,12 +141,12 @@ def test_range_estimator_per_tensor(self, test_params, range_estimator_params): assert min_max_algo._range_estimator_params[QuantizerGroup.ACTIVATIONS] == range_estimator_params params = test_params["test_range_estimator_per_tensor"] - stat_points = min_max_algo.get_statistic_points(params["model"]) + stat_points = min_max_algo.get_statistic_points(params["model"], params["nncf_graph"]) assert len(stat_points) == params["stat_points_num"] for _, stat_point in stat_points.items(): for stat_point_ in stat_point: - for tensor_collector in stat_point_.algorithm_to_tensor_collectors[MinMaxQuantization]: + for tensor_collector in stat_point_.algorithm_to_tensor_collectors[min_max_algo._algorithm_key]: if stat_point_.target_point.is_weight_target_point(): # default tensor_collector for weights self.check_is_min_max_statistic_collector(tensor_collector) @@ -166,7 +172,12 @@ def test_quantize_outputs(self, test_params, quantize_outputs): assert min_max_algo._quantize_outputs == quantize_outputs hw_patterns = test_params["test_model_type_pass"]["hw_patterns"] ignored_patterns = test_params["test_model_type_pass"]["ignored_patterns"] - q_setup = min_max_algo._get_quantizer_setup(nncf_graph, hw_patterns, ignored_patterns) + inference_nncf_graph = transform_to_inference_graph( + deepcopy(nncf_graph), + min_max_algo._backend_entity.shapeof_metatypes, + min_max_algo._backend_entity.read_variable_metatypes, + ) + q_setup = min_max_algo._get_quantizer_setup(nncf_graph, inference_nncf_graph, hw_patterns, ignored_patterns) act_num_q, weight_num_q = 0, 0 for quantization_point in q_setup.quantization_points.values(): if quantization_point.is_activation_quantization_point(): @@ -186,7 +197,12 @@ def test_ignored_scopes(self, test_params, ignored_scopes_data): nncf_graph = test_params["test_ignored_scopes"]["nncf_graph"] hw_patterns = test_params["test_model_type_pass"]["hw_patterns"] ignored_patterns = test_params["test_model_type_pass"]["ignored_patterns"] - q_setup = min_max_algo._get_quantizer_setup(nncf_graph, hw_patterns, ignored_patterns) + inference_nncf_graph = transform_to_inference_graph( + deepcopy(nncf_graph), + min_max_algo._backend_entity.shapeof_metatypes, + min_max_algo._backend_entity.read_variable_metatypes, + ) + q_setup = min_max_algo._get_quantizer_setup(nncf_graph, inference_nncf_graph, hw_patterns, ignored_patterns) act_num_q, weight_num_q = 0, 0 for quantization_point in q_setup.quantization_points.values(): if quantization_point.is_activation_quantization_point(): @@ -206,19 +222,30 @@ def test_model_type_pass(self, test_params, model_type): nncf_graph = test_params["test_model_type_pass"]["nncf_graph"] hw_patterns = test_params["test_model_type_pass"]["hw_patterns"] ignored_patterns = test_params["test_model_type_pass"]["ignored_patterns"] - q_setup = min_max_algo._get_quantizer_setup(nncf_graph, hw_patterns, ignored_patterns) + inference_nncf_graph = transform_to_inference_graph( + deepcopy(nncf_graph), + min_max_algo._backend_entity.shapeof_metatypes, + min_max_algo._backend_entity.read_variable_metatypes, + ) + q_setup = min_max_algo._get_quantizer_setup(nncf_graph, inference_nncf_graph, hw_patterns, ignored_patterns) for quantization_point in q_setup.quantization_points.values(): if quantization_point.is_activation_quantization_point(): node_names = quantization_point.directly_quantized_operator_node_names for node_name in node_names: - if nncf_graph.get_node_by_name(node_name).metatype == min_max_algo._backend_entity.mat_mul_metatype: + if ( + nncf_graph.get_node_by_name(node_name).metatype + == min_max_algo._backend_entity.mat_mul_metatypes + ): assert quantization_point.qconfig.mode == QuantizationMode.ASYMMETRIC min_max_algo._apply_model_type_pass(model_type, q_setup, nncf_graph) for quantization_point in q_setup.quantization_points.values(): if quantization_point.is_activation_quantization_point(): node_names = quantization_point.directly_quantized_operator_node_names for node_name in node_names: - if nncf_graph.get_node_by_name(node_name).metatype == min_max_algo._backend_entity.mat_mul_metatype: + if ( + nncf_graph.get_node_by_name(node_name).metatype + == min_max_algo._backend_entity.mat_mul_metatypes + ): assert quantization_point.qconfig.mode == QuantizationMode.SYMMETRIC @pytest.mark.parametrize( @@ -234,8 +261,8 @@ def test_model_type_pass(self, test_params, model_type): ], ) def test_quantization_points_overflow_fix(self, overflow_fix, affected_target_points, ignored_ops): - # Checks the return value of _get_quantization_points_overflow_fix - # based on the overflow_fix and weight target points. + # Checks the return value of _get_quantization_points_overflow_fix based on + # the overflow_fix and weight target points. model = ModelToTestOverflowFix(self.metatypes_mapping) nncf_graph = model.nncf_graph @@ -247,9 +274,9 @@ def test_quantization_points_overflow_fix(self, overflow_fix, affected_target_po # Remove ignored nodes from weight_target_points filtered_weight_target_points = {} - for t_p in weight_target_points.keys(): + for t_p, config in weight_target_points.items(): if t_p.target_node_name not in ignored_ops: - filtered_weight_target_points[t_p] = weight_target_points[t_p] + filtered_weight_target_points[t_p] = config algo = MinMaxQuantization() algo._backend_entity = self.get_algo_backend() @@ -257,3 +284,52 @@ def test_quantization_points_overflow_fix(self, overflow_fix, affected_target_po overflow_fix, filtered_weight_target_points, nncf_graph ) assert Counter([t_p.target_node_name for t_p in target_points_overflow_fix]) == Counter(affected_target_points) + + @pytest.mark.parametrize("validate_scopes", (True, False)) + def test_validate_scope(self, test_params, validate_scopes): + nncf_graph = test_params["test_model_type_pass"]["nncf_graph"] + inference_nncf_graph = transform_to_inference_graph(deepcopy(nncf_graph), []) + ignored_patterns = test_params["test_model_type_pass"]["ignored_patterns"] + algo = MinMaxQuantization( + ignored_scope=IgnoredScope(names=["some_node"], validate=validate_scopes), + ) + algo._backend_entity = self.get_algo_backend() + if validate_scopes: + with pytest.raises(RuntimeError, match="Ignored nodes with name"): + algo._get_ignored_names(nncf_graph, inference_nncf_graph, ignored_patterns) + else: + algo._get_ignored_names(nncf_graph, inference_nncf_graph, ignored_patterns) + + @pytest.mark.parametrize("mode", ["target_point", "unified_scales"]) + def test_empty_statistics(self, mode, mocker): + algo = MinMaxQuantization() + target_point = self.target_point(TargetType.PRE_LAYER_OPERATION, "A", 0) + stat_points = StatisticPointsContainer() + + class DummyMinMaxTensorStatistic(MinMaxTensorStatistic): + def tensor_eq(self): + return True + + class EmptyTensorCollector: + def get_statistics(self): + return DummyMinMaxTensorStatistic(None, None) + + dummy_tp = {target_point: QuantizerConfig()} + if mode == "target_point": + dummy_tps = (dummy_tp, {}) + else: + dummy_tps = ({}, ((target_point,),)) + stat_points.add_statistic_point(StatisticPoint(target_point, EmptyTensorCollector(), algo._algorithm_key)) + mocker.patch("nncf.common.factory.ModelTransformerFactory.create", return_value=mocker.MagicMock()) + mocker.patch( + "nncf.quantization.algorithms.min_max.algorithm.MinMaxQuantization._get_quantization_target_points", + return_value=dummy_tps, + ) + mocker.patch( + "nncf.quantization.algorithms.min_max.algorithm.MinMaxQuantization._get_quantization_points_overflow_fix", + return_value=mocker.MagicMock(), + ) + with pytest.raises(RuntimeError) as exc_info: + algo.apply(None, None, stat_points) + + assert str(exc_info.value) == "Statistics were not collected for the node A" diff --git a/tests/post_training/test_quantizer_config.py b/tests/post_training/test_templates/test_quantizer_config.py similarity index 73% rename from tests/post_training/test_quantizer_config.py rename to tests/post_training/test_templates/test_quantizer_config.py index 20961bf3907..e614138d0a9 100644 --- a/tests/post_training/test_quantizer_config.py +++ b/tests/post_training/test_templates/test_quantizer_config.py @@ -10,6 +10,7 @@ # limitations under the License. from abc import abstractmethod +from copy import deepcopy from dataclasses import dataclass from typing import List @@ -31,10 +32,11 @@ from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters from nncf.quantization.advanced_parameters import QuantizationParameters from nncf.quantization.algorithms.post_training.algorithm import PostTrainingQuantization +from nncf.quantization.passes import transform_to_inference_graph from nncf.quantization.range_estimator import RangeEstimatorParametersSet -from tests.post_training.models import NNCFGraphToTest -from tests.post_training.models import NNCFGraphToTestDepthwiseConv -from tests.post_training.models import NNCFGraphToTestSumAggregation +from tests.post_training.test_templates.models import NNCFGraphToTest +from tests.post_training.test_templates.models import NNCFGraphToTestDepthwiseConv +from tests.post_training.test_templates.models import NNCFGraphToTestSumAggregation # pylint: disable=protected-access,too-many-branches @@ -82,8 +84,14 @@ def test_default_quantizer_config(self, single_conv_nncf_graph): algo = PostTrainingQuantization() min_max_algo = algo.algorithms[0] min_max_algo._backend_entity = self.get_algo_backend() + nncf_graph = single_conv_nncf_graph.nncf_graph + inference_nncf_graph = transform_to_inference_graph( + deepcopy(nncf_graph), + min_max_algo._backend_entity.shapeof_metatypes, + min_max_algo._backend_entity.read_variable_metatypes, + ) q_setup = min_max_algo._get_quantizer_setup( - single_conv_nncf_graph.nncf_graph, hw_patterns=GraphPattern(), ignored_patterns=GraphPattern() + nncf_graph, inference_nncf_graph, hw_patterns=GraphPattern(), ignored_patterns=GraphPattern() ) weight_default_config = QuantizerConfig( @@ -106,10 +114,9 @@ def test_default_quantizer_config(self, single_conv_nncf_graph): @pytest.mark.parametrize("preset", [QuantizationPreset.MIXED, QuantizationPreset.PERFORMANCE]) @pytest.mark.parametrize("weight_bits", [8]) @pytest.mark.parametrize("activation_bits", [8]) - @pytest.mark.parametrize("signed_weights", [None]) - @pytest.mark.parametrize("signed_activations", [None]) - # TODO(kshpv): add signed_activations and signed_weights which should be independent from HW config. - def test_quantizer_config_from_ptq_params( + @pytest.mark.parametrize("signed_weights", [None, True, False]) + @pytest.mark.parametrize("signed_activations", [None, True, False]) + def test_quantizer_config_from_ptq_params_for_CPU( self, weight_per_channel, activation_per_channel, @@ -133,35 +140,56 @@ def test_quantizer_config_from_ptq_params( ) min_max_algo = algo.algorithms[0] min_max_algo._backend_entity = self.get_algo_backend() - q_setup = min_max_algo._get_quantizer_setup( - single_conv_nncf_graph.nncf_graph, hw_patterns=GraphPattern(), ignored_patterns=GraphPattern() + nncf_graph = single_conv_nncf_graph.nncf_graph + inference_nncf_graph = transform_to_inference_graph( + deepcopy(nncf_graph), + min_max_algo._backend_entity.shapeof_metatypes, + min_max_algo._backend_entity.read_variable_metatypes, ) - q_g_to_quantization_mode = {} - for q_g in QuantizerGroup: - q_g_to_quantization_mode[q_g] = preset.get_params_configured_by_preset(q_g)["mode"] + if signed_weights is False or signed_activations in [True, False]: # Incompatible with HW CPU config + with pytest.raises( + ValueError, + match=".*?Quantization parameter constraints specified in NNCF config are incompatible.*?", + ): + q_setup = min_max_algo._get_quantizer_setup( + nncf_graph, inference_nncf_graph, hw_patterns=GraphPattern(), ignored_patterns=GraphPattern() + ) + else: + q_setup = min_max_algo._get_quantizer_setup( + nncf_graph, inference_nncf_graph, hw_patterns=GraphPattern(), ignored_patterns=GraphPattern() + ) + q_g_to_quantization_mode = {} + for q_g in QuantizerGroup: + q_g_to_quantization_mode[q_g] = preset.get_params_configured_by_preset(q_g)["mode"] - assert len(q_setup.quantization_points) == 2 + assert len(q_setup.quantization_points) == 2 - for quantization_point in q_setup.quantization_points.values(): - if quantization_point.is_weight_quantization_point(): - assert quantization_point.qconfig.mode == q_g_to_quantization_mode[QuantizerGroup.WEIGHTS] - assert quantization_point.qconfig.per_channel == weight_per_channel - assert quantization_point.qconfig.num_bits == weight_bits - if signed_weights is not None: - assert quantization_point.qconfig.signedness_to_force == signed_weights - if quantization_point.is_activation_quantization_point(): - assert quantization_point.qconfig.per_channel == activation_per_channel - assert quantization_point.qconfig.num_bits == activation_bits - assert quantization_point.qconfig.mode == q_g_to_quantization_mode[QuantizerGroup.ACTIVATIONS] - if signed_activations is not None: - assert quantization_point.qconfig.signedness_to_force == signed_activations + for quantization_point in q_setup.quantization_points.values(): + if quantization_point.is_weight_quantization_point(): + assert quantization_point.qconfig.mode == q_g_to_quantization_mode[QuantizerGroup.WEIGHTS] + assert quantization_point.qconfig.per_channel == weight_per_channel + assert quantization_point.qconfig.num_bits == weight_bits + if signed_weights is not None: + assert quantization_point.qconfig.signedness_to_force == signed_weights + if quantization_point.is_activation_quantization_point(): + assert quantization_point.qconfig.per_channel == activation_per_channel + assert quantization_point.qconfig.num_bits == activation_bits + assert quantization_point.qconfig.mode == q_g_to_quantization_mode[QuantizerGroup.ACTIVATIONS] + if signed_activations is not None: + assert quantization_point.qconfig.signedness_to_force == signed_activations def test_depthwise_conv_default_quantizer_config(self, depthwise_conv_nncf_graph): algo = PostTrainingQuantization() min_max_algo = algo.algorithms[0] min_max_algo._backend_entity = self.get_algo_backend() + nncf_graph = depthwise_conv_nncf_graph.nncf_graph + inference_nncf_graph = transform_to_inference_graph( + deepcopy(nncf_graph), + min_max_algo._backend_entity.shapeof_metatypes, + min_max_algo._backend_entity.read_variable_metatypes, + ) q_setup = min_max_algo._get_quantizer_setup( - depthwise_conv_nncf_graph.nncf_graph, hw_patterns=GraphPattern(), ignored_patterns=GraphPattern() + nncf_graph, inference_nncf_graph, hw_patterns=GraphPattern(), ignored_patterns=GraphPattern() ) weight_default_config = QuantizerConfig( @@ -184,11 +212,13 @@ def test_depthwise_conv_default_quantizer_config(self, depthwise_conv_nncf_graph ) @pytest.mark.parametrize("q_config_mode", [QuantizationMode.SYMMETRIC, QuantizationMode.ASYMMETRIC]) @pytest.mark.parametrize("q_config_per_channel", [True, False]) + @pytest.mark.parametrize("num_samples", [5, 12]) def test_get_stat_collector( self, range_estimator_params, q_config_mode, q_config_per_channel, + num_samples, conv_sum_aggregation_nncf_graph, statistic_collector_parameters: TestGetStatisticsCollectorParameters, ): @@ -214,7 +244,7 @@ def test_get_stat_collector( target_point = list(min_max_algo._quantization_target_points_to_qconfig.keys())[0] tensor_collector = min_max_algo._get_stat_collector( - conv_sum_aggregation_nncf_graph.nncf_graph, target_point, q_config + conv_sum_aggregation_nncf_graph.nncf_graph, target_point, q_config, num_samples ) is_weight_tp = target_point.is_weight_target_point() @@ -251,3 +281,5 @@ def test_get_stat_collector( assert reducer._reduction_shape == params.ref_per_ch_reduction_shape else: assert reducer._reduction_shape == params.ref_per_tensor_reduction_shape + + assert tensor_collector.num_samples == num_samples diff --git a/tests/post_training/test_templates/test_smooth_quant.py b/tests/post_training/test_templates/test_smooth_quant.py new file mode 100644 index 00000000000..42fe17e01b0 --- /dev/null +++ b/tests/post_training/test_templates/test_smooth_quant.py @@ -0,0 +1,247 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import abstractmethod +from typing import Callable, Dict, TypeVar + +import pytest + +from nncf.common.factory import NNCFGraphFactory +from nncf.common.factory import StatisticsAggregatorFactory +from nncf.common.graph.graph import NNCFNode +from nncf.experimental.common.tensor_statistics.collectors import AbsMaxReducer +from nncf.experimental.common.tensor_statistics.collectors import MaxAggregator +from nncf.parameters import ModelType +from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters +from nncf.quantization.advanced_parameters import OverflowFix +from nncf.quantization.algorithms.post_training.algorithm import PostTrainingQuantization +from nncf.quantization.algorithms.smooth_quant.algorithm import SmoothQuant +from nncf.quantization.algorithms.smooth_quant.backend import SmoothQuantAlgoBackend +from tests.post_training.test_templates.helpers import LinearMultiShapeModel +from tests.post_training.test_templates.helpers import NonZeroLinearModel +from tests.post_training.test_templates.helpers import get_static_dataset + +TModel = TypeVar("TModel") +TTensor = TypeVar("TTensor") + + +class TemplateTestSQAlgorithm: + @staticmethod + def fn_to_type(tensor) -> TTensor: + return tensor + + @staticmethod + @abstractmethod + def get_transform_fn() -> Callable: + """ + Get transformation function for dataset. + """ + + @staticmethod + @abstractmethod + def backend_specific_model(model: TModel, tmp_dir: str) -> TModel: + """ + Return backend specific model. + """ + + @staticmethod + @abstractmethod + def check_scales(model: TModel, reference_values: Dict[str, TTensor]) -> None: + """ + Checking scales from model with references. + """ + + @staticmethod + @abstractmethod + def get_backend() -> SmoothQuantAlgoBackend: + """ + Returns backend-specific SmoothQuantAlgoBackend. + """ + + @staticmethod + @abstractmethod + def get_matmul_metatype(): + """ + Returns backend-specific MatMul metatype + """ + + @staticmethod + def get_quantization_algorithm(): + return PostTrainingQuantization( + subset_size=1, + model_type=ModelType.TRANSFORMER, + advanced_parameters=AdvancedQuantizationParameters( + overflow_fix=OverflowFix.DISABLE, smooth_quant_alpha=0.95, inplace_statistics=False + ), + ) + + @pytest.mark.parametrize( + "model_cls, reference_values", + ( + ( + LinearMultiShapeModel, + { + "/Reshape_0_0/sq_multiply": [[[[1.0594617, 1.1019668, 1.2208323, 1.1003988]]]], + "/Split_1_0/sq_multiply": [[[[1.1276343, 0.7605822]]]], + "/Split_0_0/sq_multiply": [[[[0.32575992, 0.33121374]]]], + "/Reshape_1_0_0/sq_multiply": [ + [ + [ + 0.3251956, + 0.3326432, + 1.5490624, + 0.7233769, + 0.3689916, + 0.4845651, + 1.2022541, + 1.3118246, + ] + ] + ], + "/Reshape_1_0_1/sq_multiply": [[[0.4699388], [0.3369332], [0.3674589]]], + "/Reshape_2_0_0/sq_multiply": [[0.1242606]], + "/ReduceMax_0_0/sq_multiply": [ + [0.0944255, 0.0853033, 0.7187095, 0.3429819, 0.1422914, 0.2127623, 0.4640060, 0.7210725] + ], + }, + ), + ), + ) + def test_smooth_quant_algo(self, model_cls, reference_values, tmpdir): + model = self.backend_specific_model(model_cls(), tmpdir) + dataset = get_static_dataset(model_cls.INPUT_SIZE, self.get_transform_fn(), self.fn_to_type) + + quantization_algorithm = self.get_quantization_algorithm() + graph = NNCFGraphFactory.create(model) + quantized_model = quantization_algorithm.apply(model, graph, dataset=dataset) + + self.check_scales(quantized_model, reference_values) + + # pylint:disable=protected-access + def test_get_abs_max_channel_collector(self): + backend = self.get_backend() + reduction_shape = (3, 2, 1) + samples = 1 + + for inplace_type in [False, True]: + backend_tensor_collector = backend.get_abs_max_channel_collector( + num_samples=samples, + stats_reduction_shape=reduction_shape, + inplace=inplace_type, + branch_key="test_branch", + ) + + for aggregator in backend_tensor_collector.aggregators.values(): + assert isinstance(aggregator, MaxAggregator) + + for reducer in backend_tensor_collector.reducers: + assert isinstance(reducer, AbsMaxReducer) + assert reducer.inplace == inplace_type + assert reducer._reduction_shape == reduction_shape + + @pytest.mark.parametrize( + "model_cls, references", + ( + ( + LinearMultiShapeModel, + [ + ("/MatMul_1", 0), + ("/MatMul", 0), + ("/linear_2/MatMul", 0), + ("/linear_1/MatMul", 0), + ("/MatMul_2", 0), + ("/MatMul_4", 1), + ("55", 1), + ("41", 0), + ("19", 1), + ("24", 0), + ], + ), + ), + ) + # pylint:disable=protected-access + def test__get_nodes_to_smooth_data(self, model_cls, references, tmpdir): + model = self.backend_specific_model(model_cls(), tmpdir) + nncf_graph = NNCFGraphFactory.create(model) + + algo = SmoothQuant() + algo._set_backend_entity(model) + smooth_data = algo._get_nodes_to_smooth_data(nncf_graph) + smooth_data = {d["node_to_smooth"].node_name: d["input_act_port"] for d in smooth_data} + + for ref_node_name, ref_port_id in references: + assert ref_node_name in smooth_data + assert smooth_data[ref_node_name] == ref_port_id + + def test_empty_stats(self, mocker, tmpdir): + model_cls = NonZeroLinearModel + model = self.backend_specific_model(model_cls(), tmpdir) + dataset = get_static_dataset(model_cls.INPUT_SIZE, self.get_transform_fn(), self.fn_to_type) + + graph = NNCFGraphFactory.create(model) + algo = SmoothQuant(subset_size=1, inplace_statistics=False) + algo_statistic_points = algo.get_statistic_points(model, graph) + statistics_aggregator = StatisticsAggregatorFactory.create(model, dataset) + statistics_aggregator.register_statistic_points(algo_statistic_points) + statistics_aggregator.collect_statistics(model, graph) + + mocked_transformer = mocker.MagicMock() + mocker.patch("nncf.common.factory.ModelTransformerFactory.create", return_value=mocked_transformer) + algo.apply(model, graph, algo_statistic_points) + + mocked_transformer.transform.assert_called_once() + arg = mocked_transformer.transform.call_args.args[0] + assert len(arg.transformations) == 2 + + mm_metatype = self.get_matmul_metatype() + matmuls = [node for node in graph.topological_sort() if node.metatype == mm_metatype] + for transformation in arg.transformations: + assert transformation.target_point.target_node_name != matmuls[0].node_name + + def test_get_activation_channel_axis(self, node_metatype, layer_attributes, port_id, reference_value): + backend = self.get_backend() + + attributes = { + NNCFNode.METATYPE_ATTR: node_metatype, + NNCFNode.LAYER_ATTRIBUTES: layer_attributes, + NNCFNode.NODE_NAME_ATTR: "test_node", + NNCFNode.ID_NODE_ATTR: 0, + } + node = NNCFNode(attributes) + + try: + # pylint: disable=protected-access + activation_channel_axis = backend.get_activation_channel_axis(node, port_id) + except RuntimeError as e: + if isinstance(e, reference_value): + pytest.xfail("Expected exception") + + assert activation_channel_axis == reference_value + + def test_get_weight_channel_axis(self, node_metatype, layer_attributes, port_id, reference_value): + backend = self.get_backend() + + attributes = { + NNCFNode.METATYPE_ATTR: node_metatype, + NNCFNode.LAYER_ATTRIBUTES: layer_attributes, + NNCFNode.NODE_NAME_ATTR: "test_node", + NNCFNode.ID_NODE_ATTR: 0, + } + node = NNCFNode(attributes) + + try: + # pylint: disable=protected-access + activation_channel_axis = backend.get_weight_channel_axis(node, port_id) + except RuntimeError as e: + if isinstance(e, reference_value): + pytest.xfail("Expected exception") + + assert activation_channel_axis == reference_value diff --git a/tests/shared/case_collection.py b/tests/shared/case_collection.py index 558506302ff..5104f3c642d 100644 --- a/tests/shared/case_collection.py +++ b/tests/shared/case_collection.py @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict +from typing import Dict, List import pytest @@ -27,3 +27,8 @@ def skip_marked_cases_if_options_not_specified(config, items, marks_vs_options: item.add_marker( pytest.mark.skip(reason=f"This test case requires an option {option} to be specified for pytest.") ) + + +def skip_if_backend_not_selected(backend: str, backends_list: List[str]): + if "all" not in backends_list and backend not in backends_list: + pytest.skip("not selected for testing") diff --git a/tests/shared/helpers.py b/tests/shared/helpers.py index 37afb488290..ddcf96a82c0 100644 --- a/tests/shared/helpers.py +++ b/tests/shared/helpers.py @@ -47,9 +47,10 @@ def create_venv_with_nncf(tmp_path: Path, package_type: str, venv_type: str, ext version_string = f"{sys.version_info[0]}.{sys.version_info[1]}" if venv_type == "virtualenv": - subprocess.check_call(f"virtualenv -ppython{version_string} {venv_path}", shell=True) + virtualenv = Path(sys.executable).parent / "virtualenv" + subprocess.check_call(f"{virtualenv} -ppython{version_string} {venv_path}", shell=True) elif venv_type == "venv": - subprocess.check_call(f"python -m venv {venv_path}", shell=True) + subprocess.check_call(f"{sys.executable} -m venv {venv_path}", shell=True) subprocess.check_call(f"{pip_with_venv} install --upgrade pip", shell=True) subprocess.check_call(f"{pip_with_venv} install --upgrade wheel setuptools", shell=True) @@ -62,15 +63,16 @@ def create_venv_with_nncf(tmp_path: Path, package_type: str, venv_type: str, ext extra_reqs_str = "" if extra_reqs is not None and extra_reqs: extra_reqs_str = ",".join(extra_reqs) + extra_reqs_str = f"[{extra_reqs_str}]" if package_type == "pip_pypi": - run_cmd_line = f"{pip_with_venv} install nncf[{extra_reqs_str}]" + run_cmd_line = f"{pip_with_venv} install nncf{extra_reqs_str}" elif package_type == "pip_local": - run_cmd_line = f"{pip_with_venv} install {PROJECT_ROOT}[{extra_reqs_str}]" + run_cmd_line = f"{pip_with_venv} install {PROJECT_ROOT}{extra_reqs_str}" elif package_type == "pip_e_local": - run_cmd_line = f"{pip_with_venv} install -e {PROJECT_ROOT}[{extra_reqs_str}]" + run_cmd_line = f"{pip_with_venv} install -e {PROJECT_ROOT}{extra_reqs_str}" elif package_type == "pip_git_develop": - run_cmd_line = f"{pip_with_venv} install git+{GITHUB_REPO_URL}@develop#egg=nncf[{extra_reqs_str}]" + run_cmd_line = f"{pip_with_venv} install git+{GITHUB_REPO_URL}@develop#egg=nncf{extra_reqs_str}" elif package_type == "build_s": run_cmd_line = f"{python_executable_with_venv} -m build -n -s" elif package_type == "build_w": @@ -83,7 +85,7 @@ def create_venv_with_nncf(tmp_path: Path, package_type: str, venv_type: str, ext # compiled for CUDA 10.2. Thus need to direct pip installation specifically for # torch, otherwise the NNCF will only work in CPU mode. torch_extra_index = " --extra-index-url https://download.pytorch.org/whl/cu116" - if "torch" in extra_reqs and "build" not in package_type: + if extra_reqs is not None and "torch" in extra_reqs and "build" not in package_type: run_cmd_line += torch_extra_index subprocess.run(run_cmd_line, check=True, shell=True, cwd=PROJECT_ROOT) @@ -194,6 +196,7 @@ def compare_stats(expected: Dict[str, np.ndarray], actual: Dict[str, np.ndarray] stats = actual[ref_name] for param in param_names: ref_param, actual_param = ref_stats.get(param), stats.get(param) + assert np.array(ref_param).shape == np.array(actual_param).shape assert np.allclose(ref_param, actual_param, atol=1e-5) diff --git a/tests/shared/isolation_runner.py b/tests/shared/isolation_runner.py index 3924da3a50b..7b8ce9f0b27 100644 --- a/tests/shared/isolation_runner.py +++ b/tests/shared/isolation_runner.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import inspect import os import subprocess diff --git a/tests/shared/metric_thresholds.py b/tests/shared/metric_thresholds.py index 6f39bd49496..0fb0f5d83cf 100644 --- a/tests/shared/metric_thresholds.py +++ b/tests/shared/metric_thresholds.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + DIFF_TARGET_MIN_GLOBAL = -0.1 DIFF_TARGET_MAX_GLOBAL = 0.1 DIFF_FP32_MIN_GLOBAL = -1.0 diff --git a/tests/shared/paths.py b/tests/shared/paths.py index cbc5285d937..61d4aade112 100644 --- a/tests/shared/paths.py +++ b/tests/shared/paths.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import os import site from pathlib import Path diff --git a/tests/shared/patterns.py b/tests/shared/patterns.py index ffb9711d5e3..274e4c59394 100644 --- a/tests/shared/patterns.py +++ b/tests/shared/patterns.py @@ -15,6 +15,8 @@ from nncf.common.graph.patterns.manager import PatternsManager from nncf.common.utils.backend import BackendType +# pylint: disable=protected-access + def check_hw_patterns(backend: BackendType, reasons: Dict[HWFusedPatternNames, str]): backend_patterns = PatternsManager._get_backend_hw_patterns_map(backend) diff --git a/tests/shared/test_templates/__init__.py b/tests/shared/test_templates/__init__.py new file mode 100644 index 00000000000..9b29b47534a --- /dev/null +++ b/tests/shared/test_templates/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/shared/test_templates/template_test_nncf_tensor.py b/tests/shared/test_templates/template_test_nncf_tensor.py new file mode 100644 index 00000000000..9fff5e9de1c --- /dev/null +++ b/tests/shared/test_templates/template_test_nncf_tensor.py @@ -0,0 +1,548 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint: disable=too-many-function-args + +import operator +from abc import abstractmethod +from typing import TypeVar + +import pytest + +from nncf.experimental.tensor import Tensor +from nncf.experimental.tensor import TensorDataType +from nncf.experimental.tensor import TensorDeviceType +from nncf.experimental.tensor import functions + +TModel = TypeVar("TModel") +TTensor = TypeVar("TTensor") + + +OPERATOR_MAP = { + "add": operator.add, + "sub": operator.sub, + "mul": operator.mul, + "pow": operator.pow, + "truediv": operator.truediv, + "floordiv": operator.floordiv, + "neg": lambda a, _: -a, +} + +COMPARISON_OPERATOR_MAP = { + "lt": operator.lt, + "le": operator.le, + "eq": operator.eq, + "ne": operator.ne, + "gt": operator.gt, + "ge": operator.ge, +} + + +# pylint: disable=too-many-public-methods +class TemplateTestNNCFTensorOperators: + @staticmethod + @abstractmethod + def to_tensor(x: TTensor) -> TTensor: + pass + + @pytest.mark.parametrize("op_name", OPERATOR_MAP.keys()) + def test_operators_tensor(self, op_name): + tensor_a = self.to_tensor([1, 2]) + tensor_b = self.to_tensor([22, 11]) + + nncf_tensor_a = Tensor(tensor_a) + nncf_tensor_b = Tensor(tensor_b) + + fn = OPERATOR_MAP[op_name] + res = fn(tensor_a, tensor_b) + res_nncf = fn(nncf_tensor_a, nncf_tensor_b) + + assert res.dtype == res_nncf.data.dtype + assert all(res == res_nncf.data) + assert isinstance(res_nncf, Tensor) + + @pytest.mark.parametrize("op_name", OPERATOR_MAP.keys()) + def test_operators_int(self, op_name): + tensor_a = self.to_tensor([1, 2]) + value = 2 + + nncf_tensor_a = Tensor(tensor_a) + + fn = OPERATOR_MAP[op_name] + res = fn(tensor_a, value) + res_nncf = fn(nncf_tensor_a, value) + + assert res.dtype == res_nncf.data.dtype + assert all(res == res_nncf.data) + assert isinstance(res_nncf, Tensor) + + @pytest.mark.parametrize("op_name", ("add", "sub", "mul", "truediv", "floordiv")) + def test_operators_int_rev(self, op_name): + tensor_a = self.to_tensor([1, 2]) + value = 2 + + nncf_tensor_a = Tensor(tensor_a) + + fn = OPERATOR_MAP[op_name] + res = fn(value, tensor_a) + res_nncf = fn(value, nncf_tensor_a) + + assert res.dtype == res_nncf.data.dtype + assert all(res == res_nncf.data) + assert isinstance(res_nncf, Tensor) + + @pytest.mark.parametrize("op_name", COMPARISON_OPERATOR_MAP.keys()) + def test_comparison_tensor(self, op_name): + tensor_a = self.to_tensor((1,)) + tensor_b = self.to_tensor((2,)) + + nncf_tensor_a = Tensor(tensor_a) + nncf_tensor_b = Tensor(tensor_b) + + fn = COMPARISON_OPERATOR_MAP[op_name] + res = fn(tensor_a, tensor_b) + res_nncf = fn(nncf_tensor_a, nncf_tensor_b) + + assert res == res_nncf + assert isinstance(res_nncf, Tensor) + + @pytest.mark.parametrize("op_name", COMPARISON_OPERATOR_MAP.keys()) + def test_comparison_int(self, op_name): + tensor_a = self.to_tensor((1,)) + value = 2 + + nncf_tensor_a = Tensor(tensor_a) + + fn = COMPARISON_OPERATOR_MAP[op_name] + res = fn(tensor_a, value) + res_nncf = fn(nncf_tensor_a, value) + + assert res == res_nncf + assert isinstance(res_nncf, Tensor) + + @pytest.mark.parametrize("op_name", COMPARISON_OPERATOR_MAP.keys()) + def test_comparison_int_rev(self, op_name): + tensor_a = self.to_tensor((1,)) + value = 2 + + nncf_tensor_a = Tensor(tensor_a) + + fn = COMPARISON_OPERATOR_MAP[op_name] + res = fn(value, tensor_a) + res_nncf = fn(value, nncf_tensor_a) + + assert res == res_nncf + assert isinstance(res_nncf, Tensor) + + @pytest.mark.parametrize( + "val, axis, ref", + ( + (1, None, 1), + ([1], None, 1), + ([[[[1], [2]], [[1], [2]]]], None, [[1, 2], [1, 2]]), + ([[[[1], [2]], [[1], [2]]]], 0, [[[1], [2]], [[1], [2]]]), + ([[[[1], [2]], [[1], [2]]]], -1, [[[1, 2], [1, 2]]]), + ), + ) + def test_squeeze(self, val, axis, ref): + tensor = self.to_tensor(val) + nncf_tensor = Tensor(tensor) + ref_tensor = self.to_tensor(ref) + res = nncf_tensor.squeeze(axis=axis) + if isinstance(ref, list): + assert functions.all(res == ref_tensor) + else: + assert res == ref_tensor + assert isinstance(res, Tensor) + + @pytest.mark.parametrize( + "val, axis, ref", + ( + (1, None, 1), + ([1], None, 1), + ([[[[1], [2]], [[1], [2]]]], None, [[1, 2], [1, 2]]), + ([[[[1], [2]], [[1], [2]]]], 0, [[[1], [2]], [[1], [2]]]), + ([[[[1], [2]], [[1], [2]]]], -1, [[[1, 2], [1, 2]]]), + ), + ) + def test_fn_squeeze(self, val, axis, ref): + tensor = self.to_tensor(val) + nncf_tensor = Tensor(tensor) + ref_tensor = self.to_tensor(ref) + res = functions.squeeze(nncf_tensor, axis=axis) + if isinstance(ref, list): + assert functions.all(res == ref_tensor) + else: + assert res == ref_tensor + assert isinstance(res, Tensor) + + @pytest.mark.parametrize( + "val,ref", + ( + (1, 1), + ([1], 1), + ([[[[1], [2]], [[1], [2]]]], [1, 2, 1, 2]), + ), + ) + def test_flatten(self, val, ref): + tensor = self.to_tensor(val) + nncf_tensor = Tensor(tensor) + ref_tensor = self.to_tensor(ref) + res = nncf_tensor.flatten() + if isinstance(ref, list): + assert all(res.data == ref_tensor) + else: + assert res.data == ref_tensor + assert isinstance(res, Tensor) + + @pytest.mark.parametrize( + "val, axis, ref", + ( + (1, None, 1), + ([1], None, 1), + ([[[[1], [2]], [[3], [4]]]], None, 4), + ([[1, 2], [3, 4]], 1, [2, 4]), + ), + ) + def test_max(self, val, axis, ref): + tensor = self.to_tensor(val) + nncf_tensor = Tensor(tensor) + ref_tensor = self.to_tensor(ref) + res = nncf_tensor.max(axis=axis) + if isinstance(ref, list): + assert all(res.data == ref_tensor) + else: + assert res.data == ref_tensor + assert isinstance(res, Tensor) + + @pytest.mark.parametrize( + "val, axis, ref", + ( + (1, None, 1), + ([1], None, 1), + ([[[[1], [2]], [[3], [4]]]], None, 4), + ([[1, 2], [3, 4]], 1, [2, 4]), + ), + ) + def test_fn_max(self, val, axis, ref): + tensor = self.to_tensor(val) + nncf_tensor = Tensor(tensor) + ref_tensor = self.to_tensor(ref) + res = functions.max(nncf_tensor, axis=axis) + if isinstance(ref, list): + assert all(res.data == ref_tensor) + else: + assert res.data == ref_tensor + assert isinstance(res, Tensor) + + @pytest.mark.parametrize( + "val, axis, ref", + ( + (1, None, 1), + ([1], None, 1), + ([[[[1], [2]], [[3], [4]]]], None, 1), + ([[1, 2], [3, 4]], 1, [1, 3]), + ), + ) + def test_min(self, val, axis, ref): + nncf_tensor = Tensor(self.to_tensor(val)) + ref_tensor = self.to_tensor(ref) + res = nncf_tensor.min(axis=axis) + if isinstance(ref, list): + assert all(res.data == ref_tensor) + else: + assert res.data == ref_tensor + assert isinstance(res, Tensor) + + @pytest.mark.parametrize( + "val, axis, ref", + ( + (1, None, 1), + ([1], None, 1), + ([[[[1], [2]], [[3], [4]]]], None, 1), + ([[1, 2], [3, 4]], 1, [1, 3]), + ), + ) + def test_fn_min(self, val, axis, ref): + nncf_tensor = Tensor(self.to_tensor(val)) + ref_tensor = self.to_tensor(ref) + res = functions.min(nncf_tensor, axis=axis) + if isinstance(ref, list): + assert all(res.data == ref_tensor) + else: + assert res.data == ref_tensor + assert isinstance(res, Tensor) + + @pytest.mark.parametrize( + "val, ref", + ( + (-1, 1), + ([-1, 1], [1, 1]), + ), + ) + def test_abs(self, val, ref): + nncf_tensor = Tensor(self.to_tensor(val)) + nncf_ref_tensor = Tensor(self.to_tensor(ref)) + res = nncf_tensor.abs() + if isinstance(ref, list): + assert all(res == nncf_ref_tensor) + else: + assert res == nncf_ref_tensor + assert isinstance(res, Tensor) + + @pytest.mark.parametrize( + "val, ref", + ( + (-1, 1), + ([-1, 1], [1, 1]), + ), + ) + def test_fn_abs(self, val, ref): + nncf_tensor = Tensor(self.to_tensor(val)) + nncf_ref_tensor = Tensor(self.to_tensor(ref)) + res = functions.abs(nncf_tensor) + if isinstance(ref, list): + assert all(res == nncf_ref_tensor) + else: + assert res == nncf_ref_tensor + assert isinstance(res, Tensor) + + def test_getitem(self): + arr = [0, 1, 2] + nncf_tensor = Tensor(self.to_tensor(arr)) + res = nncf_tensor[1] + assert res == 1 + assert isinstance(res, Tensor) + + def test_iter(self): + arr = [0, 1, 2] + nncf_tensor = Tensor(self.to_tensor(arr)) + i = 0 + for x in nncf_tensor: + assert x == arr[i] + assert isinstance(x, Tensor) + i += 1 + + # Math + + @pytest.mark.parametrize( + "axis, ref", + ( + (None, 3), + (0, [2, 1]), + ), + ) + def test_fn_count_nonzero(self, axis, ref): + tensor = self.to_tensor([[1, 2], [1, 0]]) + nncf_tensor = Tensor(tensor) + ref_tensor = self.to_tensor(ref) + res = functions.count_nonzero(nncf_tensor, axis=axis) + if axis is None: + assert res.data == ref_tensor + else: + assert all(res.data == self.to_tensor(ref)) + assert isinstance(res, Tensor) + + def test_fn_zeros_like(self): + tensor = self.to_tensor([1, 2]) + nncf_tensor = Tensor(tensor) + + res = functions.zeros_like(nncf_tensor) + assert all(res == Tensor(tensor * 0)) + assert isinstance(res, Tensor) + + def test_fn_maximum(self): + tensor_a = Tensor(self.to_tensor([1, 2])) + tensor_b = Tensor(self.to_tensor([2, 1])) + tensor_ref = self.to_tensor([2, 2]) + + res = functions.maximum(tensor_a, tensor_b) + assert all(res.data == tensor_ref) + assert isinstance(res, Tensor) + + def test_fn_maximum_list(self): + tensor_a = Tensor(self.to_tensor([1, 2])) + tensor_b = [2, 1] + tensor_ref = self.to_tensor([2, 2]) + + res = functions.maximum(tensor_a, tensor_b) + assert all(res.data == tensor_ref) + assert isinstance(res, Tensor) + + def test_fn_minimum(self): + tensor_a = Tensor(self.to_tensor([1, 2])) + tensor_b = Tensor(self.to_tensor([2, 1])) + tensor_ref = self.to_tensor([1, 1]) + + res = functions.minimum(tensor_a, tensor_b) + assert all(res.data == tensor_ref) + assert isinstance(res, Tensor) + + def test_fn_minimum_list(self): + tensor_a = Tensor(self.to_tensor([1, 2])) + tensor_b = [2, 1] + tensor_ref = self.to_tensor([1, 1]) + + res = functions.minimum(tensor_a, tensor_b) + assert all(res.data == tensor_ref) + assert isinstance(res, Tensor) + + def test_fn_ones_like(self): + tensor_a = Tensor(self.to_tensor([1, 2])) + tensor_ref = self.to_tensor([1, 1]) + + res = functions.ones_like(tensor_a) + assert all(res.data == tensor_ref) + assert isinstance(res, Tensor) + + @pytest.mark.parametrize( + "val, axis, ref", + ( + ([True, True], None, True), + ([True, False], None, False), + ([False, False], None, False), + ([[True, True], [False, True]], 0, [False, True]), + ), + ) + def test_fn_all(self, val, axis, ref): + tensor = Tensor(self.to_tensor(val)) + res = functions.all(tensor, axis=axis) + if isinstance(ref, list): + assert all(res.data == self.to_tensor(ref)) + else: + assert res.data == self.to_tensor(ref) + assert isinstance(res, Tensor) + + @pytest.mark.parametrize( + "val, axis, ref", + ( + ([True, True], None, True), + ([True, False], None, True), + ([False, False], None, False), + ([[False, True], [False, False]], 0, [False, True]), + ), + ) + def test_fn_any(self, val, axis, ref): + tensor = Tensor(self.to_tensor(val)) + res = functions.any(tensor, axis=axis) + if isinstance(ref, list): + assert all(res.data == self.to_tensor(ref)) + else: + assert res == ref + assert isinstance(res, Tensor) + + def test_fn_where(self): + tensor = Tensor(self.to_tensor([1, -1])) + tensor_ref = self.to_tensor([1, 0]) + res = functions.where(tensor > 0, 1, 0) + assert all(res.data == tensor_ref) + assert isinstance(res, Tensor) + + @pytest.mark.parametrize( + "val, ref", + ( + ([], True), + ([1], False), + (1, False), + ), + ) + def test_fn_isempty(self, val, ref): + tensor = Tensor(self.to_tensor(val)) + res = functions.isempty(tensor) + assert res == ref + assert isinstance(res, Tensor) + + @pytest.mark.parametrize( + "val, ref", + ( + ([], True), + ([1], False), + (1, False), + ), + ) + def test_isempty(self, val, ref): + tensor = Tensor(self.to_tensor(val)) + res = tensor.isempty() + assert res == ref + assert isinstance(res, Tensor) + + @pytest.mark.parametrize( + "x1, x2, rtol, atol, ref", + ( + ([0.1], [0.1], None, None, True), + ([0.1], [0.10001], None, None, False), + ([0.1], [0.10001], 0.1, None, True), + ([0.1], [0.10001], None, 0.1, True), + ([0.1], [0.20001], None, 0.1, False), + ), + ) + def test_fn_allclose(self, x1, x2, rtol, atol, ref): + tensor1 = Tensor(self.to_tensor(x1)) + tensor2 = Tensor(self.to_tensor(x2)) + if rtol is not None: + res = functions.allclose(tensor1, tensor2, rtol=rtol) + elif atol is not None: + res = functions.allclose(tensor1, tensor2, atol=atol) + else: + res = functions.allclose(tensor1, tensor2) + assert res == ref + assert isinstance(res, Tensor) + + @pytest.mark.parametrize( + "x1, x2, rtol, atol, ref", + ( + ([0.1], [0.1], None, None, [True]), + ([0.1], [0.10001], None, None, [False]), + ([0.1], [0.10001], 0.1, None, [True]), + ([0.1], [0.10001], None, 0.1, [True]), + ), + ) + def test_fn_isclose(self, x1, x2, rtol, atol, ref): + tensor1 = Tensor(self.to_tensor(x1)) + tensor2 = Tensor(self.to_tensor(x2)) + if rtol is not None: + res = functions.isclose(tensor1, tensor2, rtol=rtol) + elif atol is not None: + res = functions.isclose(tensor1, tensor2, atol=atol) + else: + res = functions.isclose(tensor1, tensor2) + assert all(res == self.to_tensor(ref)) + assert isinstance(res, Tensor) + + def test_device(self): + tensor = Tensor(self.to_tensor([1])) + assert tensor.device == TensorDeviceType.CPU + + def test_astype(self): + tensor = Tensor(self.to_tensor([1])) + res = tensor.astype(TensorDataType.int8) + assert isinstance(res, Tensor) + assert res.dtype == TensorDataType.int8 + + def test_fn_astype(self): + tensor = Tensor(self.to_tensor([1])) + res = functions.astype(tensor, TensorDataType.int8) + assert isinstance(res, Tensor) + assert res.dtype == TensorDataType.int8 + + def test_reshape(self): + tensor = Tensor(self.to_tensor([1, 1])) + assert tensor.shape == [2] + assert tensor.reshape([1, 2]).shape == [1, 2] + + def test_fn_reshape(self): + tensor = Tensor(self.to_tensor([1, 1])) + assert tensor.shape == [2] + assert functions.reshape(tensor, [1, 2]).shape == [1, 2] + + def test_not_implemented(self): + with pytest.raises(NotImplementedError, match="is not implemented for"): + functions.device({}, [1, 2]) diff --git a/tests/tensorflow/README.md b/tests/tensorflow/README.md index b713e20e677..7f5c13bc942 100644 --- a/tests/tensorflow/README.md +++ b/tests/tensorflow/README.md @@ -1,6 +1,7 @@ -# Tesing NNCF in Tensorflow +# Testing NNCF in Tensorflow ## Introduction + In this folder, there are test files available to test if the nncf module is installed and works properly in your local or server environment. It will test NNCF module with mock datasets(`cifar10` for classification, or `coco2017` for detection & segmentation) and mock models. Before testing make sure that symlinks from `tests/tensorflow/data` are correct. They may be corrupted if the repo was downloaded to Windows machine via git without `core.symlinks` parameter enabled. @@ -10,26 +11,31 @@ Before testing make sure that symlinks from `tests/tensorflow/data` are correct. --- ## pre-commit test + A generic way to run TF pre-commit tests is via `make`: -``` + +```bash make install-tensorflow-test make test-tensorflow ``` Another way is to run `pytest` explicitly: -``` + +```bash pytest tests/common tests/tensorflow \ - --junitxml nncf-tests.xml + --junitxml nncf-tests.xml ``` + The tests results will be saved in `nncf-tests.xml`. ## nightly-test - Below is a description of the parameters to be used when building. -``` + +```text --ignore-unknown-dependency - ignore dependencies whose outcome is not known ---data=DATA-DIR Path to test datasets + ignore dependencies whose outcome is not known +--data=DATA-DIR Path to test datasets --sota-checkpoints-dir=SOTA_CHECKPOINTS_DIR Path to checkpoints directory for sota accuracy test --sota-data-dir=SOTA_DATA_DIR @@ -46,15 +52,17 @@ The tests results will be saved in `nncf-tests.xml`. ``` ### test_sanity_sample.py + In this file, you will **test the basic training and evalutation loop in NNCF**. The `generate_config_params` function will generate some test configs that will be tested, and it will be saved into `CONFIG_PARAMS`. One example in `CONFIG_PARAMS` is like: `('classification', '{nncf-dir}/tests/tensorflow/data/configs/resnet50_cifar10_magnitude_sparsity_int8.json', 'cifar10', 'tfrecord')`. The functions `test_model_eval`, `test_model_train`, `test_trained_model_eval`, or other similar functions are the key functions in this file. It receives parameters from config which is generated as sample, and the variable `main` in this function will get main function which is defined in each task(e.g. for classification: `examples/tensorflow/classification/main.py`). Each function will test the model from checkpoint, or train the model with 1~2 epochs, or test the onnx exporting of the tf model. - ### test_weekly.py + In this file, you will **optimize and train the pre-trained models in `GLOBAL_CONFIG` with each dataset, and test the trained model's metrics within the `tolerance` value and `expected_accuracy`**. The `tolerance` term is the term on how much error to allow for relative accuracy, with the default value of 0.5. For example, if the expected accuracy is 75 and the tolerance value is 0.5, then an accuracy between 74.5 and 75.5 is allowed for test. You should give `--run-weekly-tests` parameter to run the whole process. It will take a long time because it will train the certain models. Example of the tfds dataset structure is like below: -``` + +```text tfds ├── cifar10 │ └── cifar10 @@ -73,28 +81,30 @@ tfds And example of the command of the weekly test is like below: -``` +```bash pytest --junitxml nncf-tests.xml tests/tensorflow/test_weekly.py -s \ ---run-weekly-tests \ ---data {PATH_TO_TFDS_OR_TFRECORDS_DATA_PATH} \ ---models-dir {PATH_TO_PRETRAINED_MODELS_CKPT_PATH} \ ---metrics-dump-path ./weekly_test_dump + --run-weekly-tests \ + --data {PATH_TO_TFDS_OR_TFRECORDS_DATA_PATH} \ + --models-dir {PATH_TO_PRETRAINED_MODELS_CKPT_PATH} \ + --metrics-dump-path ./weekly_test_dump ``` - ### test_sota_checkpoints.py + In this file, you can **test whether the trained models from weekly test match the expected performance**. You can see the configurations are written in `sota_checkpoints_eval.json`, which contains the tasks / datasets / topologies. In topologies, it contains model name as a key and various datas such as config file path, ckpt path, target performance based on metric_type, compression method or etc. OV test will extract the `IR` or `frozen graph` from each model and test the extraced graph's accuracy. You can run the test from OV extracted model or eval from tensorflow model as follow: -``` + +```bash pytest test_sota_checkpoints.py -s \ --m oveval \ ---sota-checkpoints-dir={SOTA_CKPT_DIR} \ ---run-openvino-eval \ ---ov-data-dir={OV_DATA_DIR} ---metrics-dump-path ./ov_test_dump -``` + -m oveval \ + --sota-checkpoints-dir={SOTA_CKPT_DIR} \ + --run-openvino-eval \ + --ov-data-dir={OV_DATA_DIR} \ + --metrics-dump-path ./ov_test_dump ``` + +```bash pytest test_sota_checkpoints.py -s \ ---sota-checkpoints-dir={SOTA_CKPT_DIR}, ---sota-data-dir={SOTA_DATA_DIR} ---metrics-dump-path ./eval_test_dump -``` \ No newline at end of file + --sota-checkpoints-dir={SOTA_CKPT_DIR} \ + --sota-data-dir={SOTA_DATA_DIR} \ + --metrics-dump-path ./eval_test_dump \ +``` diff --git a/tests/tensorflow/accuracy_aware_training/test_keras_api.py b/tests/tensorflow/accuracy_aware_training/test_keras_api.py index bdf5e17cce9..5a5251f3469 100644 --- a/tests/tensorflow/accuracy_aware_training/test_keras_api.py +++ b/tests/tensorflow/accuracy_aware_training/test_keras_api.py @@ -152,6 +152,7 @@ def inverse_loss(y_true, y_pred): statistics = compress_model.accuracy_aware_fit( dataset, compression_ctrl, + uncompressed_model_accuracy=uncompressed_model_accuracy, nncf_config=config, callbacks=compression_callbacks, initial_epoch=0, @@ -207,6 +208,7 @@ def inverse_loss(y_true, y_pred): statistics = compress_model.accuracy_aware_fit( dataset, compression_ctrl, + uncompressed_model_accuracy=uncompressed_model_accuracy, nncf_config=config, callbacks=compression_callbacks, initial_epoch=0, diff --git a/tests/tensorflow/data/model_transormer/2.12/functional_insert_after.dot b/tests/tensorflow/data/model_transormer/2.12/functional_insert_after.dot new file mode 120000 index 00000000000..23a839c236c --- /dev/null +++ b/tests/tensorflow/data/model_transormer/2.12/functional_insert_after.dot @@ -0,0 +1 @@ +../2.5/functional_insert_after.dot \ No newline at end of file diff --git a/tests/tensorflow/data/model_transormer/2.12/functional_insert_before.dot b/tests/tensorflow/data/model_transormer/2.12/functional_insert_before.dot new file mode 120000 index 00000000000..cc8ec9021d5 --- /dev/null +++ b/tests/tensorflow/data/model_transormer/2.12/functional_insert_before.dot @@ -0,0 +1 @@ +../2.5/functional_insert_before.dot \ No newline at end of file diff --git a/tests/tensorflow/data/model_transormer/2.12/sequential_block_insert_after.dot b/tests/tensorflow/data/model_transormer/2.12/sequential_block_insert_after.dot new file mode 120000 index 00000000000..8fdbe772a1b --- /dev/null +++ b/tests/tensorflow/data/model_transormer/2.12/sequential_block_insert_after.dot @@ -0,0 +1 @@ +../2.5/sequential_block_insert_after.dot \ No newline at end of file diff --git a/tests/tensorflow/data/model_transormer/2.12/sequential_block_insert_before.dot b/tests/tensorflow/data/model_transormer/2.12/sequential_block_insert_before.dot new file mode 120000 index 00000000000..94a0f201eda --- /dev/null +++ b/tests/tensorflow/data/model_transormer/2.12/sequential_block_insert_before.dot @@ -0,0 +1 @@ +../2.5/sequential_block_insert_before.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.11/quantized/hw/CPU/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.11/quantized/hw/CPU/inception_v3.dot index ee079e7a2c7..4bdff5fc871 100644 --- a/tests/tensorflow/data/reference_graphs/2.11/quantized/hw/CPU/inception_v3.dot +++ b/tests/tensorflow/data/reference_graphs/2.11/quantized/hw/CPU/inception_v3.dot @@ -2853,18 +2853,6 @@ args_0 [op=Placeholder]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; "inception_v3/conv2d_87/Conv2D" [op=Conv2D]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; -"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; -"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; -"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_93/SymmQuant/Abs" [op=Abs]; @@ -2909,14 +2897,18 @@ args_0 [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "inception_v3/batch_normalization_87/FusedBatchNormV3" [op=FusedBatchNormV3]; -"inception_v3/batch_normalization_85/Const" [op=Const]; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; +"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; +"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; +"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/batch_normalization_93/Const" [op=Const]; "inception_v3/batch_normalization_93/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp" [op=ReadVariableOp]; @@ -2929,62 +2921,80 @@ args_0 [op=Placeholder]; "inception_v3/activation_91/Relu" [op=Relu]; "inception_v3/activation_88/Relu" [op=Relu]; "inception_v3/activation_87/Relu" [op=Relu]; -"inception_v3/activation_85/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/batch_normalization_85/Const" [op=Const]; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; "inception_v3/activation_93/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_85/Relu" [op=Relu]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed9_1/concat/axis" [op=Const]; "inception_v3/mixed9_1/concat" [op=ConcatV2]; "inception_v3/concatenate_1/concat/axis" [op=Const]; "inception_v3/concatenate_1/concat" [op=ConcatV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed10/concat/axis" [op=Const]; "inception_v3/mixed10/concat" [op=ConcatV2]; "inception_v3/avg_pool/Mean/reduction_indices" [op=Const]; @@ -6155,19 +6165,6 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_87/Conv2D"; "inception_v3/conv2d_87/Conv2D" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; -"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; -"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_93/SymmQuant/Abs"; "inception_v3/conv2d_93/SymmQuant/Abs" -> "inception_v3/conv2d_93/SymmQuant/add"; @@ -6213,14 +6210,19 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; "inception_v3/batch_normalization_87/FusedBatchNormV3" -> "inception_v3/activation_87/Relu"; -"inception_v3/batch_normalization_85/Const" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; -"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; +"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; +"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/batch_normalization_93/Const" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/ReadVariableOp/resource" -> "inception_v3/batch_normalization_93/ReadVariableOp"; "inception_v3/batch_normalization_93/ReadVariableOp" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; @@ -6229,82 +6231,90 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/FusedBatchNormV3" -> "inception_v3/activation_93/Relu"; -"inception_v3/activation_92/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_91/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_88/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_85/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_93/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_92/Relu" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/Relu" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/Relu" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/batch_normalization_85/Const" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; +"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/activation_93/Relu" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_85/Relu" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed9_1/concat/axis" -> "inception_v3/mixed9_1/concat"; "inception_v3/mixed9_1/concat" -> "inception_v3/mixed10/concat"; "inception_v3/concatenate_1/concat/axis" -> "inception_v3/concatenate_1/concat"; "inception_v3/concatenate_1/concat" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat/axis" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat" -> "inception_v3/avg_pool/Mean"; "inception_v3/avg_pool/Mean/reduction_indices" -> "inception_v3/avg_pool/Mean"; diff --git a/tests/tensorflow/data/reference_graphs/2.11/quantized/hw/GPU/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.11/quantized/hw/GPU/inception_v3.dot index ee079e7a2c7..4bdff5fc871 100644 --- a/tests/tensorflow/data/reference_graphs/2.11/quantized/hw/GPU/inception_v3.dot +++ b/tests/tensorflow/data/reference_graphs/2.11/quantized/hw/GPU/inception_v3.dot @@ -2853,18 +2853,6 @@ args_0 [op=Placeholder]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; "inception_v3/conv2d_87/Conv2D" [op=Conv2D]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; -"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; -"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; -"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_93/SymmQuant/Abs" [op=Abs]; @@ -2909,14 +2897,18 @@ args_0 [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "inception_v3/batch_normalization_87/FusedBatchNormV3" [op=FusedBatchNormV3]; -"inception_v3/batch_normalization_85/Const" [op=Const]; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; +"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; +"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; +"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/batch_normalization_93/Const" [op=Const]; "inception_v3/batch_normalization_93/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp" [op=ReadVariableOp]; @@ -2929,62 +2921,80 @@ args_0 [op=Placeholder]; "inception_v3/activation_91/Relu" [op=Relu]; "inception_v3/activation_88/Relu" [op=Relu]; "inception_v3/activation_87/Relu" [op=Relu]; -"inception_v3/activation_85/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/batch_normalization_85/Const" [op=Const]; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; "inception_v3/activation_93/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_85/Relu" [op=Relu]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed9_1/concat/axis" [op=Const]; "inception_v3/mixed9_1/concat" [op=ConcatV2]; "inception_v3/concatenate_1/concat/axis" [op=Const]; "inception_v3/concatenate_1/concat" [op=ConcatV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed10/concat/axis" [op=Const]; "inception_v3/mixed10/concat" [op=ConcatV2]; "inception_v3/avg_pool/Mean/reduction_indices" [op=Const]; @@ -6155,19 +6165,6 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_87/Conv2D"; "inception_v3/conv2d_87/Conv2D" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; -"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; -"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_93/SymmQuant/Abs"; "inception_v3/conv2d_93/SymmQuant/Abs" -> "inception_v3/conv2d_93/SymmQuant/add"; @@ -6213,14 +6210,19 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; "inception_v3/batch_normalization_87/FusedBatchNormV3" -> "inception_v3/activation_87/Relu"; -"inception_v3/batch_normalization_85/Const" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; -"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; +"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; +"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/batch_normalization_93/Const" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/ReadVariableOp/resource" -> "inception_v3/batch_normalization_93/ReadVariableOp"; "inception_v3/batch_normalization_93/ReadVariableOp" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; @@ -6229,82 +6231,90 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/FusedBatchNormV3" -> "inception_v3/activation_93/Relu"; -"inception_v3/activation_92/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_91/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_88/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_85/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_93/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_92/Relu" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/Relu" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/Relu" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/batch_normalization_85/Const" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; +"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/activation_93/Relu" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_85/Relu" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed9_1/concat/axis" -> "inception_v3/mixed9_1/concat"; "inception_v3/mixed9_1/concat" -> "inception_v3/mixed10/concat"; "inception_v3/concatenate_1/concat/axis" -> "inception_v3/concatenate_1/concat"; "inception_v3/concatenate_1/concat" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat/axis" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat" -> "inception_v3/avg_pool/Mean"; "inception_v3/avg_pool/Mean/reduction_indices" -> "inception_v3/avg_pool/Mean"; diff --git a/tests/tensorflow/data/reference_graphs/2.11/quantized/hw/VPU/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.11/quantized/hw/VPU/inception_v3.dot index ee079e7a2c7..4bdff5fc871 100644 --- a/tests/tensorflow/data/reference_graphs/2.11/quantized/hw/VPU/inception_v3.dot +++ b/tests/tensorflow/data/reference_graphs/2.11/quantized/hw/VPU/inception_v3.dot @@ -2853,18 +2853,6 @@ args_0 [op=Placeholder]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; "inception_v3/conv2d_87/Conv2D" [op=Conv2D]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; -"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; -"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; -"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_93/SymmQuant/Abs" [op=Abs]; @@ -2909,14 +2897,18 @@ args_0 [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "inception_v3/batch_normalization_87/FusedBatchNormV3" [op=FusedBatchNormV3]; -"inception_v3/batch_normalization_85/Const" [op=Const]; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; +"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; +"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; +"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/batch_normalization_93/Const" [op=Const]; "inception_v3/batch_normalization_93/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp" [op=ReadVariableOp]; @@ -2929,62 +2921,80 @@ args_0 [op=Placeholder]; "inception_v3/activation_91/Relu" [op=Relu]; "inception_v3/activation_88/Relu" [op=Relu]; "inception_v3/activation_87/Relu" [op=Relu]; -"inception_v3/activation_85/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/batch_normalization_85/Const" [op=Const]; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; "inception_v3/activation_93/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_85/Relu" [op=Relu]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed9_1/concat/axis" [op=Const]; "inception_v3/mixed9_1/concat" [op=ConcatV2]; "inception_v3/concatenate_1/concat/axis" [op=Const]; "inception_v3/concatenate_1/concat" [op=ConcatV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed10/concat/axis" [op=Const]; "inception_v3/mixed10/concat" [op=ConcatV2]; "inception_v3/avg_pool/Mean/reduction_indices" [op=Const]; @@ -6155,19 +6165,6 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_87/Conv2D"; "inception_v3/conv2d_87/Conv2D" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; -"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; -"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_93/SymmQuant/Abs"; "inception_v3/conv2d_93/SymmQuant/Abs" -> "inception_v3/conv2d_93/SymmQuant/add"; @@ -6213,14 +6210,19 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; "inception_v3/batch_normalization_87/FusedBatchNormV3" -> "inception_v3/activation_87/Relu"; -"inception_v3/batch_normalization_85/Const" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; -"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; +"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; +"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/batch_normalization_93/Const" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/ReadVariableOp/resource" -> "inception_v3/batch_normalization_93/ReadVariableOp"; "inception_v3/batch_normalization_93/ReadVariableOp" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; @@ -6229,82 +6231,90 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/FusedBatchNormV3" -> "inception_v3/activation_93/Relu"; -"inception_v3/activation_92/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_91/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_88/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_85/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_93/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_92/Relu" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/Relu" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/Relu" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/batch_normalization_85/Const" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; +"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/activation_93/Relu" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_85/Relu" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed9_1/concat/axis" -> "inception_v3/mixed9_1/concat"; "inception_v3/mixed9_1/concat" -> "inception_v3/mixed10/concat"; "inception_v3/concatenate_1/concat/axis" -> "inception_v3/concatenate_1/concat"; "inception_v3/concatenate_1/concat" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat/axis" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat" -> "inception_v3/avg_pool/Mean"; "inception_v3/avg_pool/Mean/reduction_indices" -> "inception_v3/avg_pool/Mean"; diff --git a/tests/tensorflow/data/reference_graphs/2.11/quantized/w_sym_ch_a_asym_t/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.11/quantized/w_sym_ch_a_asym_t/inception_v3.dot index ee079e7a2c7..4bdff5fc871 100644 --- a/tests/tensorflow/data/reference_graphs/2.11/quantized/w_sym_ch_a_asym_t/inception_v3.dot +++ b/tests/tensorflow/data/reference_graphs/2.11/quantized/w_sym_ch_a_asym_t/inception_v3.dot @@ -2853,18 +2853,6 @@ args_0 [op=Placeholder]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; "inception_v3/conv2d_87/Conv2D" [op=Conv2D]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; -"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; -"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; -"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_93/SymmQuant/Abs" [op=Abs]; @@ -2909,14 +2897,18 @@ args_0 [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "inception_v3/batch_normalization_87/FusedBatchNormV3" [op=FusedBatchNormV3]; -"inception_v3/batch_normalization_85/Const" [op=Const]; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; +"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; +"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; +"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/batch_normalization_93/Const" [op=Const]; "inception_v3/batch_normalization_93/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp" [op=ReadVariableOp]; @@ -2929,62 +2921,80 @@ args_0 [op=Placeholder]; "inception_v3/activation_91/Relu" [op=Relu]; "inception_v3/activation_88/Relu" [op=Relu]; "inception_v3/activation_87/Relu" [op=Relu]; -"inception_v3/activation_85/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/batch_normalization_85/Const" [op=Const]; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; "inception_v3/activation_93/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_85/Relu" [op=Relu]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed9_1/concat/axis" [op=Const]; "inception_v3/mixed9_1/concat" [op=ConcatV2]; "inception_v3/concatenate_1/concat/axis" [op=Const]; "inception_v3/concatenate_1/concat" [op=ConcatV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed10/concat/axis" [op=Const]; "inception_v3/mixed10/concat" [op=ConcatV2]; "inception_v3/avg_pool/Mean/reduction_indices" [op=Const]; @@ -6155,19 +6165,6 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_87/Conv2D"; "inception_v3/conv2d_87/Conv2D" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; -"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; -"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_93/SymmQuant/Abs"; "inception_v3/conv2d_93/SymmQuant/Abs" -> "inception_v3/conv2d_93/SymmQuant/add"; @@ -6213,14 +6210,19 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; "inception_v3/batch_normalization_87/FusedBatchNormV3" -> "inception_v3/activation_87/Relu"; -"inception_v3/batch_normalization_85/Const" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; -"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; +"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; +"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/batch_normalization_93/Const" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/ReadVariableOp/resource" -> "inception_v3/batch_normalization_93/ReadVariableOp"; "inception_v3/batch_normalization_93/ReadVariableOp" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; @@ -6229,82 +6231,90 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/FusedBatchNormV3" -> "inception_v3/activation_93/Relu"; -"inception_v3/activation_92/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_91/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_88/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_85/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_93/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_92/Relu" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/Relu" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/Relu" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/batch_normalization_85/Const" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; +"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/activation_93/Relu" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_85/Relu" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed9_1/concat/axis" -> "inception_v3/mixed9_1/concat"; "inception_v3/mixed9_1/concat" -> "inception_v3/mixed10/concat"; "inception_v3/concatenate_1/concat/axis" -> "inception_v3/concatenate_1/concat"; "inception_v3/concatenate_1/concat" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat/axis" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat" -> "inception_v3/avg_pool/Mean"; "inception_v3/avg_pool/Mean/reduction_indices" -> "inception_v3/avg_pool/Mean"; diff --git a/tests/tensorflow/data/reference_graphs/2.11/quantized/w_sym_t_a_sym_t/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.11/quantized/w_sym_t_a_sym_t/inception_v3.dot index eea821b911f..7a545513550 100644 --- a/tests/tensorflow/data/reference_graphs/2.11/quantized/w_sym_t_a_sym_t/inception_v3.dot +++ b/tests/tensorflow/data/reference_graphs/2.11/quantized/w_sym_t_a_sym_t/inception_v3.dot @@ -2755,18 +2755,6 @@ args_0 [op=Placeholder]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/conv2d_87/Conv2D" [op=Conv2D]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; -"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; -"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_93/SymmQuant/Abs" [op=Abs]; @@ -2811,14 +2799,18 @@ args_0 [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "inception_v3/batch_normalization_87/FusedBatchNormV3" [op=FusedBatchNormV3]; -"inception_v3/batch_normalization_85/Const" [op=Const]; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; +"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; +"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/batch_normalization_93/Const" [op=Const]; "inception_v3/batch_normalization_93/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp" [op=ReadVariableOp]; @@ -2831,56 +2823,74 @@ args_0 [op=Placeholder]; "inception_v3/activation_91/Relu" [op=Relu]; "inception_v3/activation_88/Relu" [op=Relu]; "inception_v3/activation_87/Relu" [op=Relu]; -"inception_v3/activation_85/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/batch_normalization_85/Const" [op=Const]; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; "inception_v3/activation_93/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul" [op=Mul]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_91/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/SymmQuant/Abs" [op=Abs]; +"inception_v3/activation_91/fake_quantize/SymmQuant/add/y" [op=Const]; +"inception_v3/activation_91/fake_quantize/SymmQuant/add" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/SymmQuant/mul" [op=Mul]; +"inception_v3/activation_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_92/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/SymmQuant/Abs" [op=Abs]; +"inception_v3/activation_92/fake_quantize/SymmQuant/add/y" [op=Const]; +"inception_v3/activation_92/fake_quantize/SymmQuant/add" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/SymmQuant/mul" [op=Mul]; +"inception_v3/activation_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_87/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/SymmQuant/Abs" [op=Abs]; +"inception_v3/activation_87/fake_quantize/SymmQuant/add/y" [op=Const]; +"inception_v3/activation_87/fake_quantize/SymmQuant/add" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/SymmQuant/mul" [op=Mul]; +"inception_v3/activation_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_88/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/SymmQuant/Abs" [op=Abs]; +"inception_v3/activation_88/fake_quantize/SymmQuant/add/y" [op=Const]; +"inception_v3/activation_88/fake_quantize/SymmQuant/add" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/SymmQuant/mul" [op=Mul]; +"inception_v3/activation_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_85/Relu" [op=Relu]; +"inception_v3/activation_85/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/SymmQuant/Abs" [op=Abs]; +"inception_v3/activation_85/fake_quantize/SymmQuant/add/y" [op=Const]; +"inception_v3/activation_85/fake_quantize/SymmQuant/add" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/SymmQuant/mul" [op=Mul]; +"inception_v3/activation_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed9_1/concat/axis" [op=Const]; "inception_v3/mixed9_1/concat" [op=ConcatV2]; "inception_v3/concatenate_1/concat/axis" [op=Const]; "inception_v3/concatenate_1/concat" [op=ConcatV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul" [op=Mul]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_93/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/SymmQuant/Abs" [op=Abs]; +"inception_v3/activation_93/fake_quantize/SymmQuant/add/y" [op=Const]; +"inception_v3/activation_93/fake_quantize/SymmQuant/add" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/SymmQuant/mul" [op=Mul]; +"inception_v3/activation_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed10/concat/axis" [op=Const]; "inception_v3/mixed10/concat" [op=ConcatV2]; "inception_v3/avg_pool/Mean/reduction_indices" [op=Const]; @@ -5952,19 +5962,6 @@ args_0 -> "inception_v3/input_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVars"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/conv2d_87/Conv2D"; "inception_v3/conv2d_87/Conv2D" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; -"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/conv2d_85/Conv2D"; -"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_93/SymmQuant/Abs"; "inception_v3/conv2d_93/SymmQuant/Abs" -> "inception_v3/conv2d_93/SymmQuant/add"; @@ -6010,14 +6007,19 @@ args_0 -> "inception_v3/input_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; "inception_v3/batch_normalization_87/FusedBatchNormV3" -> "inception_v3/activation_87/Relu"; -"inception_v3/batch_normalization_85/Const" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; -"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; +"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/conv2d_85/Conv2D"; +"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/batch_normalization_93/Const" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/ReadVariableOp/resource" -> "inception_v3/batch_normalization_93/ReadVariableOp"; "inception_v3/batch_normalization_93/ReadVariableOp" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; @@ -6026,76 +6028,84 @@ args_0 -> "inception_v3/input_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/FusedBatchNormV3" -> "inception_v3/activation_93/Relu"; -"inception_v3/activation_92/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_91/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_88/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_85/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_93/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_92/Relu" -> "inception_v3/activation_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/Relu" -> "inception_v3/activation_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/Relu" -> "inception_v3/activation_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/batch_normalization_85/Const" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; +"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/activation_93/Relu" -> "inception_v3/activation_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/SymmQuant/Abs"; +"inception_v3/activation_91/fake_quantize/SymmQuant/Abs" -> "inception_v3/activation_91/fake_quantize/SymmQuant/add"; +"inception_v3/activation_91/fake_quantize/SymmQuant/add/y" -> "inception_v3/activation_91/fake_quantize/SymmQuant/add"; +"inception_v3/activation_91/fake_quantize/SymmQuant/add" -> "inception_v3/activation_91/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_91/fake_quantize/SymmQuant/add" -> "inception_v3/activation_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_91/fake_quantize/SymmQuant/mul" -> "inception_v3/activation_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_92/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/SymmQuant/Abs"; +"inception_v3/activation_92/fake_quantize/SymmQuant/Abs" -> "inception_v3/activation_92/fake_quantize/SymmQuant/add"; +"inception_v3/activation_92/fake_quantize/SymmQuant/add/y" -> "inception_v3/activation_92/fake_quantize/SymmQuant/add"; +"inception_v3/activation_92/fake_quantize/SymmQuant/add" -> "inception_v3/activation_92/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_92/fake_quantize/SymmQuant/add" -> "inception_v3/activation_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_92/fake_quantize/SymmQuant/mul" -> "inception_v3/activation_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_87/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/SymmQuant/Abs"; +"inception_v3/activation_87/fake_quantize/SymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/SymmQuant/add"; +"inception_v3/activation_87/fake_quantize/SymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/SymmQuant/add"; +"inception_v3/activation_87/fake_quantize/SymmQuant/add" -> "inception_v3/activation_87/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_87/fake_quantize/SymmQuant/add" -> "inception_v3/activation_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_87/fake_quantize/SymmQuant/mul" -> "inception_v3/activation_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_88/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/SymmQuant/Abs"; +"inception_v3/activation_88/fake_quantize/SymmQuant/Abs" -> "inception_v3/activation_88/fake_quantize/SymmQuant/add"; +"inception_v3/activation_88/fake_quantize/SymmQuant/add/y" -> "inception_v3/activation_88/fake_quantize/SymmQuant/add"; +"inception_v3/activation_88/fake_quantize/SymmQuant/add" -> "inception_v3/activation_88/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_88/fake_quantize/SymmQuant/add" -> "inception_v3/activation_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_88/fake_quantize/SymmQuant/mul" -> "inception_v3/activation_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_85/Relu" -> "inception_v3/activation_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/SymmQuant/Abs"; +"inception_v3/activation_85/fake_quantize/SymmQuant/Abs" -> "inception_v3/activation_85/fake_quantize/SymmQuant/add"; +"inception_v3/activation_85/fake_quantize/SymmQuant/add/y" -> "inception_v3/activation_85/fake_quantize/SymmQuant/add"; +"inception_v3/activation_85/fake_quantize/SymmQuant/add" -> "inception_v3/activation_85/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_85/fake_quantize/SymmQuant/add" -> "inception_v3/activation_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_85/fake_quantize/SymmQuant/mul" -> "inception_v3/activation_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed9_1/concat/axis" -> "inception_v3/mixed9_1/concat"; "inception_v3/mixed9_1/concat" -> "inception_v3/mixed10/concat"; "inception_v3/concatenate_1/concat/axis" -> "inception_v3/concatenate_1/concat"; "inception_v3/concatenate_1/concat" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; +"inception_v3/activation_93/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/SymmQuant/Abs"; +"inception_v3/activation_93/fake_quantize/SymmQuant/Abs" -> "inception_v3/activation_93/fake_quantize/SymmQuant/add"; +"inception_v3/activation_93/fake_quantize/SymmQuant/add/y" -> "inception_v3/activation_93/fake_quantize/SymmQuant/add"; +"inception_v3/activation_93/fake_quantize/SymmQuant/add" -> "inception_v3/activation_93/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_93/fake_quantize/SymmQuant/add" -> "inception_v3/activation_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_93/fake_quantize/SymmQuant/mul" -> "inception_v3/activation_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat/axis" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat" -> "inception_v3/avg_pool/Mean"; "inception_v3/avg_pool/Mean/reduction_indices" -> "inception_v3/avg_pool/Mean"; diff --git a/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/densenet121.pb b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/densenet121.pb new file mode 120000 index 00000000000..66077c5d228 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/densenet121.pb @@ -0,0 +1 @@ +../../../2.5/pruning/filter_pruning/densenet121.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/inception_v3.dot new file mode 120000 index 00000000000..d2410123a13 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/inception_v3.dot @@ -0,0 +1 @@ +../../../2.11/pruning/filter_pruning/inception_v3.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/mobilenet_v1.pb b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/mobilenet_v1.pb new file mode 120000 index 00000000000..4d9a880364a --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/mobilenet_v1.pb @@ -0,0 +1 @@ +../../../2.8/pruning/filter_pruning/mobilenet_v1.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/mobilenet_v2.pb b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/mobilenet_v2.pb new file mode 120000 index 00000000000..e17ca40b07f --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/mobilenet_v2.pb @@ -0,0 +1 @@ +../../../2.5/pruning/filter_pruning/mobilenet_v2.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/mobilenet_v3_large.pb b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/mobilenet_v3_large.pb new file mode 120000 index 00000000000..34a1302f52e --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/mobilenet_v3_large.pb @@ -0,0 +1 @@ +../../../2.5/pruning/filter_pruning/mobilenet_v3_large.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/mobilenet_v3_small.pb b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/mobilenet_v3_small.pb new file mode 120000 index 00000000000..bb7c2d925b7 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/mobilenet_v3_small.pb @@ -0,0 +1 @@ +../../../2.5/pruning/filter_pruning/mobilenet_v3_small.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/resnet50.pb b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/resnet50.pb new file mode 120000 index 00000000000..e85e33dd6c8 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/resnet50.pb @@ -0,0 +1 @@ +../../../2.5/pruning/filter_pruning/resnet50.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/retinanet.pb b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/retinanet.pb new file mode 120000 index 00000000000..9167b5db0ea --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/retinanet.pb @@ -0,0 +1 @@ +../../../2.5/pruning/filter_pruning/retinanet.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/sequential_model.pb b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/sequential_model.pb new file mode 120000 index 00000000000..678552a4738 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/sequential_model.pb @@ -0,0 +1 @@ +../../../2.5/pruning/filter_pruning/sequential_model.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/sequential_no_input_model.pb b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/sequential_no_input_model.pb new file mode 120000 index 00000000000..1303a24b47f --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/sequential_no_input_model.pb @@ -0,0 +1 @@ +../../../2.5/pruning/filter_pruning/sequential_no_input_model.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/shared_layers_model.pb b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/shared_layers_model.pb new file mode 120000 index 00000000000..f387475d4a2 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/shared_layers_model.pb @@ -0,0 +1 @@ +../../../2.5/pruning/filter_pruning/shared_layers_model.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/vgg16.pb b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/vgg16.pb new file mode 120000 index 00000000000..cf385346f90 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/vgg16.pb @@ -0,0 +1 @@ +../../../2.5/pruning/filter_pruning/vgg16.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/yolo_v4.pb b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/yolo_v4.pb new file mode 120000 index 00000000000..e6628c7cf00 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/pruning/filter_pruning/yolo_v4.pb @@ -0,0 +1 @@ +../../../2.5/pruning/filter_pruning/yolo_v4.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/CPU/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/CPU/inception_v3.dot new file mode 120000 index 00000000000..b7f25fb2012 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/CPU/inception_v3.dot @@ -0,0 +1 @@ +../../../../2.11/quantized/hw/CPU/inception_v3.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/CPU/mobilenet_v2.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/CPU/mobilenet_v2.pb new file mode 120000 index 00000000000..5226200225d --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/CPU/mobilenet_v2.pb @@ -0,0 +1 @@ +../../../../2.5/quantized/hw/CPU/mobilenet_v2.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/CPU/resnet50.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/CPU/resnet50.pb new file mode 120000 index 00000000000..8ecf9505e44 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/CPU/resnet50.pb @@ -0,0 +1 @@ +../../../../2.5/quantized/hw/CPU/resnet50.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/GPU/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/GPU/inception_v3.dot new file mode 120000 index 00000000000..dac7cb1d8a8 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/GPU/inception_v3.dot @@ -0,0 +1 @@ +../../../../2.11/quantized/hw/GPU/inception_v3.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/GPU/mobilenet_v2.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/GPU/mobilenet_v2.pb new file mode 120000 index 00000000000..0b233fa28b6 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/GPU/mobilenet_v2.pb @@ -0,0 +1 @@ +../../../../2.5/quantized/hw/GPU/mobilenet_v2.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/GPU/resnet50.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/GPU/resnet50.pb new file mode 120000 index 00000000000..8b417f58e27 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/GPU/resnet50.pb @@ -0,0 +1 @@ +../../../../2.5/quantized/hw/GPU/resnet50.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/VPU/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/VPU/inception_v3.dot new file mode 120000 index 00000000000..d7baa691f29 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/VPU/inception_v3.dot @@ -0,0 +1 @@ +../../../../2.11/quantized/hw/VPU/inception_v3.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/VPU/mobilenet_v2.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/VPU/mobilenet_v2.pb new file mode 120000 index 00000000000..d4d42d12ce2 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/VPU/mobilenet_v2.pb @@ -0,0 +1 @@ +../../../../2.5/quantized/hw/VPU/mobilenet_v2.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/VPU/resnet50.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/VPU/resnet50.pb new file mode 120000 index 00000000000..134fb9ada38 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/hw/VPU/resnet50.pb @@ -0,0 +1 @@ +../../../../2.5/quantized/hw/VPU/resnet50.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/densenet121.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/densenet121.pb new file mode 120000 index 00000000000..e52a7dccd05 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/densenet121.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_ch_a_asym_t/densenet121.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/inception_v3.dot new file mode 120000 index 00000000000..a3e84ecd554 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/inception_v3.dot @@ -0,0 +1 @@ +../../../2.11/quantized/w_sym_ch_a_asym_t/inception_v3.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mask_rcnn.dot b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mask_rcnn.dot new file mode 120000 index 00000000000..673ef2cdf80 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mask_rcnn.dot @@ -0,0 +1 @@ +../../../2.8/quantized/w_sym_ch_a_asym_t/mask_rcnn.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v1.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v1.pb new file mode 120000 index 00000000000..847560a33b2 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v1.pb @@ -0,0 +1 @@ +../../../2.8/quantized/w_sym_ch_a_sym_t/mobilenet_v1.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v2.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v2.pb new file mode 120000 index 00000000000..402c94b31ed --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v2.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_ch_a_asym_t/mobilenet_v2.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v2_quantize_outputs.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v2_quantize_outputs.pb new file mode 120000 index 00000000000..478fc8d3fa8 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v2_quantize_outputs.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_ch_a_asym_t/mobilenet_v2_quantize_outputs.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v3_large.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v3_large.pb new file mode 120000 index 00000000000..88db8bb91a3 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v3_large.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_ch_a_asym_t/mobilenet_v3_large.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v3_small.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v3_small.pb new file mode 120000 index 00000000000..21f9bd9f320 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/mobilenet_v3_small.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_ch_a_asym_t/mobilenet_v3_small.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/resnet50.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/resnet50.pb new file mode 120000 index 00000000000..369cfbb1f53 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/resnet50.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_ch_a_asym_t/resnet50.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/resnet50v2.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/resnet50v2.pb new file mode 120000 index 00000000000..fe6a09b9f27 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/resnet50v2.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_ch_a_asym_t/resnet50v2.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/retinanet.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/retinanet.pb new file mode 120000 index 00000000000..5856e0292ed --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/retinanet.pb @@ -0,0 +1 @@ +../../../2.8/quantized/w_sym_ch_a_asym_t/retinanet.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/retinanet_quantize_outputs.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/retinanet_quantize_outputs.pb new file mode 120000 index 00000000000..045530738aa --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/retinanet_quantize_outputs.pb @@ -0,0 +1 @@ +../../../2.8/quantized/w_sym_ch_a_asym_t/retinanet_quantize_outputs.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/sequential_model.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/sequential_model.pb new file mode 120000 index 00000000000..491a3242973 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/sequential_model.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_ch_a_asym_t/sequential_model.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/sequential_model_quantize_outputs.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/sequential_model_quantize_outputs.pb new file mode 120000 index 00000000000..473c0e42c8c --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/sequential_model_quantize_outputs.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_ch_a_asym_t/sequential_model_quantize_outputs.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/sequential_no_input_model.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/sequential_no_input_model.pb new file mode 120000 index 00000000000..bc092233d92 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/sequential_no_input_model.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_ch_a_asym_t/sequential_no_input_model.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/shared_layers_model.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/shared_layers_model.pb new file mode 120000 index 00000000000..5a0db633b39 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/shared_layers_model.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_ch_a_asym_t/shared_layers_model.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/shared_layers_model_quantize_outputs.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/shared_layers_model_quantize_outputs.pb new file mode 120000 index 00000000000..679d3657e8a --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/shared_layers_model_quantize_outputs.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_ch_a_asym_t/shared_layers_model_quantize_outputs.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/vgg16.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/vgg16.pb new file mode 120000 index 00000000000..b34288d4928 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/vgg16.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_ch_a_asym_t/vgg16.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/yolo_v4.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/yolo_v4.pb new file mode 120000 index 00000000000..58992e924f8 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_ch_a_asym_t/yolo_v4.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_ch_a_asym_t/yolo_v4.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/densenet121.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/densenet121.pb new file mode 120000 index 00000000000..c39ddb5fef0 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/densenet121.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_t_a_sym_t/densenet121.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/inception_v3.dot new file mode 120000 index 00000000000..9c198b73f64 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/inception_v3.dot @@ -0,0 +1 @@ +../../../2.11/quantized/w_sym_t_a_sym_t/inception_v3.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mask_rcnn.dot b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mask_rcnn.dot new file mode 120000 index 00000000000..6b1b555dd66 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mask_rcnn.dot @@ -0,0 +1 @@ +../../../2.8/quantized/w_sym_t_a_sym_t/mask_rcnn.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v1.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v1.pb new file mode 120000 index 00000000000..004ad70bec7 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v1.pb @@ -0,0 +1 @@ +../../../2.8/quantized/w_sym_t_a_sym_t/mobilenet_v1.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v2.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v2.pb new file mode 120000 index 00000000000..4d1ae27c997 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v2.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_t_a_sym_t/mobilenet_v2.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v2_quantize_outputs.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v2_quantize_outputs.pb new file mode 120000 index 00000000000..9992bc1eac7 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v2_quantize_outputs.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_t_a_sym_t/mobilenet_v2_quantize_outputs.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v3_large.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v3_large.pb new file mode 120000 index 00000000000..6d3cdb1eca4 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v3_large.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_t_a_sym_t/mobilenet_v3_large.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v3_small.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v3_small.pb new file mode 120000 index 00000000000..4430ee0c872 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/mobilenet_v3_small.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_t_a_sym_t/mobilenet_v3_small.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/resnet50.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/resnet50.pb new file mode 120000 index 00000000000..4238a12dc61 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/resnet50.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_t_a_sym_t/resnet50.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/resnet50v2.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/resnet50v2.pb new file mode 120000 index 00000000000..73b592146d7 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/resnet50v2.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_t_a_sym_t/resnet50v2.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/retinanet.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/retinanet.pb new file mode 120000 index 00000000000..cf17c2e0a2e --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/retinanet.pb @@ -0,0 +1 @@ +../../../2.8/quantized/w_sym_t_a_sym_t/retinanet.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/retinanet_quantize_outputs.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/retinanet_quantize_outputs.pb new file mode 120000 index 00000000000..75c2a9e7f3d --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/retinanet_quantize_outputs.pb @@ -0,0 +1 @@ +../../../2.8/quantized/w_sym_t_a_sym_t/retinanet_quantize_outputs.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/sequential_model.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/sequential_model.pb new file mode 120000 index 00000000000..b747ec9baaf --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/sequential_model.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_t_a_sym_t/sequential_model.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/sequential_model_quantize_outputs.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/sequential_model_quantize_outputs.pb new file mode 120000 index 00000000000..ea977b6cef5 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/sequential_model_quantize_outputs.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_t_a_sym_t/sequential_model_quantize_outputs.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/sequential_no_input_model.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/sequential_no_input_model.pb new file mode 120000 index 00000000000..7d1bd9b4e9f --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/sequential_no_input_model.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_t_a_sym_t/sequential_no_input_model.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/shared_layers_model.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/shared_layers_model.pb new file mode 120000 index 00000000000..bee0db70b08 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/shared_layers_model.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_t_a_sym_t/shared_layers_model.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/shared_layers_model_quantize_outputs.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/shared_layers_model_quantize_outputs.pb new file mode 120000 index 00000000000..31fedbe39ee --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/shared_layers_model_quantize_outputs.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_t_a_sym_t/shared_layers_model_quantize_outputs.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/vgg16.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/vgg16.pb new file mode 120000 index 00000000000..a3cfb8cd8e7 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/vgg16.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_t_a_sym_t/vgg16.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/yolo_v4.pb b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/yolo_v4.pb new file mode 120000 index 00000000000..cb535dddeb7 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/quantized/w_sym_t_a_sym_t/yolo_v4.pb @@ -0,0 +1 @@ +../../../2.5/quantized/w_sym_t_a_sym_t/yolo_v4.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/densenet121.pb b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/densenet121.pb new file mode 120000 index 00000000000..53ef85a5e13 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/densenet121.pb @@ -0,0 +1 @@ +../../../2.5/sparsity/magnitude_sparsity/densenet121.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/inception_v3.dot new file mode 120000 index 00000000000..573fecc45a8 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/inception_v3.dot @@ -0,0 +1 @@ +../../../2.11/sparsity/magnitude_sparsity/inception_v3.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mask_rcnn.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mask_rcnn.dot new file mode 120000 index 00000000000..5ad140472a0 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mask_rcnn.dot @@ -0,0 +1 @@ +../../../2.8/sparsity/magnitude_sparsity/mask_rcnn.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v1.pb b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v1.pb new file mode 120000 index 00000000000..0c4164c846d --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v1.pb @@ -0,0 +1 @@ +../../../2.8/sparsity/magnitude_sparsity/mobilenet_v1.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v2.pb b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v2.pb new file mode 120000 index 00000000000..fb622ce0ab9 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v2.pb @@ -0,0 +1 @@ +../../../2.5/sparsity/magnitude_sparsity/mobilenet_v2.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v2_slim.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v2_slim.dot new file mode 120000 index 00000000000..07e2dcd5caf --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v2_slim.dot @@ -0,0 +1 @@ +../../../2.5/sparsity/magnitude_sparsity/mobilenet_v2_slim.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v3_large.pb b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v3_large.pb new file mode 120000 index 00000000000..efa01e61c4c --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v3_large.pb @@ -0,0 +1 @@ +../../../2.5/sparsity/magnitude_sparsity/mobilenet_v3_large.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v3_small.pb b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v3_small.pb new file mode 120000 index 00000000000..3a40c46bcde --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/mobilenet_v3_small.pb @@ -0,0 +1 @@ +../../../2.5/sparsity/magnitude_sparsity/mobilenet_v3_small.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/resnet50.pb b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/resnet50.pb new file mode 120000 index 00000000000..d49a59a2f7d --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/resnet50.pb @@ -0,0 +1 @@ +../../../2.5/sparsity/magnitude_sparsity/resnet50.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/resnet50v2.pb b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/resnet50v2.pb new file mode 120000 index 00000000000..bebbdaf2654 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/resnet50v2.pb @@ -0,0 +1 @@ +../../../2.5/sparsity/magnitude_sparsity/resnet50v2.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/retinanet.pb b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/retinanet.pb new file mode 120000 index 00000000000..ac5d48eb711 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/retinanet.pb @@ -0,0 +1 @@ +../../../2.5/sparsity/magnitude_sparsity/retinanet.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/sequential_model.pb b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/sequential_model.pb new file mode 120000 index 00000000000..4f3195d8239 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/sequential_model.pb @@ -0,0 +1 @@ +../../../2.5/sparsity/magnitude_sparsity/sequential_model.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/sequential_no_input_model.pb b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/sequential_no_input_model.pb new file mode 120000 index 00000000000..4c075a813e5 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/sequential_no_input_model.pb @@ -0,0 +1 @@ +../../../2.5/sparsity/magnitude_sparsity/sequential_no_input_model.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/shared_layers_model.pb b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/shared_layers_model.pb new file mode 120000 index 00000000000..98185c6b3dd --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/shared_layers_model.pb @@ -0,0 +1 @@ +../../../2.5/sparsity/magnitude_sparsity/shared_layers_model.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/vgg16.pb b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/vgg16.pb new file mode 120000 index 00000000000..c672d49b6f6 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/vgg16.pb @@ -0,0 +1 @@ +../../../2.5/sparsity/magnitude_sparsity/vgg16.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/yolo_v4.pb b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/yolo_v4.pb new file mode 120000 index 00000000000..f864bedfb08 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/magnitude_sparsity/yolo_v4.pb @@ -0,0 +1 @@ +../../../2.5/sparsity/magnitude_sparsity/yolo_v4.pb \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/densenet121.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/densenet121.dot new file mode 120000 index 00000000000..e13eb607609 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/densenet121.dot @@ -0,0 +1 @@ +../../../2.5/sparsity/rb_sparsity/densenet121.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/inception_resnet_v2.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/inception_resnet_v2.dot new file mode 100644 index 00000000000..c08a617524d --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/inception_resnet_v2.dot @@ -0,0 +1,8447 @@ +strict digraph { +args_0 [op=Placeholder]; +"inception_resnet_v2/conv2d/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d/Round" [op=Round]; +"inception_resnet_v2/conv2d/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d/mul" [op=Mul]; +"inception_resnet_v2/conv2d/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization/Const" [op=Const]; +"inception_resnet_v2/batch_normalization/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_1/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_1/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_1/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_1/Round" [op=Round]; +"inception_resnet_v2/conv2d_1/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_1/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_1/mul" [op=Mul]; +"inception_resnet_v2/conv2d_1/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_1/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_1/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_1/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_1/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_1/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_1/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_1/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_1/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_1/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_2/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_2/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_2/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_2/Round" [op=Round]; +"inception_resnet_v2/conv2d_2/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_2/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_2/mul" [op=Mul]; +"inception_resnet_v2/conv2d_2/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_2/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_2/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_2/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_2/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_2/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_2/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_2/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_2/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_2/Relu" [op=Relu]; +"inception_resnet_v2/max_pooling2d/MaxPool" [op=MaxPool]; +"inception_resnet_v2/conv2d_3/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_3/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_3/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_3/Round" [op=Round]; +"inception_resnet_v2/conv2d_3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_3/mul" [op=Mul]; +"inception_resnet_v2/conv2d_3/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_3/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_3/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_3/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_3/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_3/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_3/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_3/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_4/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_4/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_4/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_4/Round" [op=Round]; +"inception_resnet_v2/conv2d_4/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_4/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_4/mul" [op=Mul]; +"inception_resnet_v2/conv2d_4/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_4/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_4/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_4/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_4/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_4/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_4/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_4/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_4/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_4/Relu" [op=Relu]; +"inception_resnet_v2/max_pooling2d_1/MaxPool" [op=MaxPool]; +"inception_resnet_v2/conv2d_8/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_8/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_8/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_8/Round" [op=Round]; +"inception_resnet_v2/conv2d_8/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_8/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_8/mul" [op=Mul]; +"inception_resnet_v2/conv2d_8/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_8/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_8/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_8/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_8/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_8/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_8/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_8/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_8/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_8/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_9/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_9/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_9/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_9/Round" [op=Round]; +"inception_resnet_v2/conv2d_9/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_9/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_9/mul" [op=Mul]; +"inception_resnet_v2/conv2d_9/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_6/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_6/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_6/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_6/Round" [op=Round]; +"inception_resnet_v2/conv2d_6/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_6/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_6/mul" [op=Mul]; +"inception_resnet_v2/conv2d_6/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_9/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_9/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_9/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_9/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_9/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_9/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_9/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_9/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_6/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_6/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_6/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_6/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_6/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_6/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_6/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_6/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/average_pooling2d/AvgPool" [op=AvgPool]; +"inception_resnet_v2/activation_9/Relu" [op=Relu]; +"inception_resnet_v2/activation_6/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_11/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_11/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_11/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_11/Round" [op=Round]; +"inception_resnet_v2/conv2d_11/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_11/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_11/mul" [op=Mul]; +"inception_resnet_v2/conv2d_11/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_10/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_10/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_10/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_10/Round" [op=Round]; +"inception_resnet_v2/conv2d_10/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_10/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_10/mul" [op=Mul]; +"inception_resnet_v2/conv2d_10/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_7/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_7/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_7/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_7/Round" [op=Round]; +"inception_resnet_v2/conv2d_7/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_7/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_7/mul" [op=Mul]; +"inception_resnet_v2/conv2d_7/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_5/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_5/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_5/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_5/Round" [op=Round]; +"inception_resnet_v2/conv2d_5/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_5/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_5/mul" [op=Mul]; +"inception_resnet_v2/conv2d_5/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_11/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_11/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_11/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_11/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_11/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_11/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_11/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_11/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_10/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_10/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_10/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_10/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_10/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_10/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_10/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_10/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_7/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_7/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_7/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_7/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_7/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_7/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_7/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_7/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_5/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_5/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_5/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_5/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_5/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_5/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_5/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_5/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_5/Relu" [op=Relu]; +"inception_resnet_v2/activation_7/Relu" [op=Relu]; +"inception_resnet_v2/activation_10/Relu" [op=Relu]; +"inception_resnet_v2/activation_11/Relu" [op=Relu]; +"inception_resnet_v2/mixed_5b/concat/axis" [op=Const]; +"inception_resnet_v2/mixed_5b/concat" [op=ConcatV2]; +"inception_resnet_v2/conv2d_15/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_15/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_15/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_15/Round" [op=Round]; +"inception_resnet_v2/conv2d_15/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_15/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_15/mul" [op=Mul]; +"inception_resnet_v2/conv2d_15/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_15/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_15/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_15/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_15/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_15/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_15/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_15/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_15/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_15/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_16/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_16/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_16/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_16/Round" [op=Round]; +"inception_resnet_v2/conv2d_16/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_16/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_16/mul" [op=Mul]; +"inception_resnet_v2/conv2d_16/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_13/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_13/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_13/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_13/Round" [op=Round]; +"inception_resnet_v2/conv2d_13/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_13/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_13/mul" [op=Mul]; +"inception_resnet_v2/conv2d_13/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_16/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_16/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_16/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_16/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_16/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_16/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_16/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_16/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_13/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_13/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_13/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_13/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_13/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_13/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_13/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_13/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_16/Relu" [op=Relu]; +"inception_resnet_v2/activation_13/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_17/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_17/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_17/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_17/Round" [op=Round]; +"inception_resnet_v2/conv2d_17/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_17/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_17/mul" [op=Mul]; +"inception_resnet_v2/conv2d_17/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_14/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_14/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_14/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_14/Round" [op=Round]; +"inception_resnet_v2/conv2d_14/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_14/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_14/mul" [op=Mul]; +"inception_resnet_v2/conv2d_14/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_12/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_12/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_12/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_12/Round" [op=Round]; +"inception_resnet_v2/conv2d_12/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_12/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_12/mul" [op=Mul]; +"inception_resnet_v2/conv2d_12/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_17/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_17/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_17/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_17/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_17/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_17/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_17/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_17/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_14/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_14/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_14/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_14/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_14/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_14/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_14/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_14/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_12/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_12/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_12/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_12/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_12/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_12/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_12/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_12/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_12/Relu" [op=Relu]; +"inception_resnet_v2/activation_14/Relu" [op=Relu]; +"inception_resnet_v2/activation_17/Relu" [op=Relu]; +"inception_resnet_v2/block35_1_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block35_1_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block35_1_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_1_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_1_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block35_1_conv/Round" [op=Round]; +"inception_resnet_v2/block35_1_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_1_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_1_conv/mul" [op=Mul]; +"inception_resnet_v2/block35_1_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block35_1_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_1_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_1_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer/add" [op=AddV2]; +"inception_resnet_v2/block35_1_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_21/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_21/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_21/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_21/Round" [op=Round]; +"inception_resnet_v2/conv2d_21/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_21/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_21/mul" [op=Mul]; +"inception_resnet_v2/conv2d_21/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_21/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_21/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_21/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_21/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_21/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_21/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_21/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_21/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_21/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_22/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_22/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_22/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_22/Round" [op=Round]; +"inception_resnet_v2/conv2d_22/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_22/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_22/mul" [op=Mul]; +"inception_resnet_v2/conv2d_22/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_19/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_19/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_19/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_19/Round" [op=Round]; +"inception_resnet_v2/conv2d_19/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_19/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_19/mul" [op=Mul]; +"inception_resnet_v2/conv2d_19/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_22/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_22/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_22/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_22/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_22/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_22/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_22/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_22/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_19/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_19/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_19/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_19/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_19/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_19/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_19/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_19/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_22/Relu" [op=Relu]; +"inception_resnet_v2/activation_19/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_23/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_23/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_23/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_23/Round" [op=Round]; +"inception_resnet_v2/conv2d_23/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_23/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_23/mul" [op=Mul]; +"inception_resnet_v2/conv2d_23/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_20/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_20/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_20/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_20/Round" [op=Round]; +"inception_resnet_v2/conv2d_20/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_20/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_20/mul" [op=Mul]; +"inception_resnet_v2/conv2d_20/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_18/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_18/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_18/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_18/Round" [op=Round]; +"inception_resnet_v2/conv2d_18/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_18/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_18/mul" [op=Mul]; +"inception_resnet_v2/conv2d_18/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_23/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_23/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_23/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_23/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_23/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_23/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_23/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_23/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_20/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_20/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_20/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_20/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_20/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_20/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_20/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_20/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_18/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_18/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_18/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_18/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_18/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_18/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_18/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_18/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_18/Relu" [op=Relu]; +"inception_resnet_v2/activation_20/Relu" [op=Relu]; +"inception_resnet_v2/activation_23/Relu" [op=Relu]; +"inception_resnet_v2/block35_2_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block35_2_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block35_2_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_2_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_2_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block35_2_conv/Round" [op=Round]; +"inception_resnet_v2/block35_2_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_2_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_2_conv/mul" [op=Mul]; +"inception_resnet_v2/block35_2_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block35_2_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_2_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_2_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_1/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_1/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_1/add" [op=AddV2]; +"inception_resnet_v2/block35_2_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_27/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_27/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_27/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_27/Round" [op=Round]; +"inception_resnet_v2/conv2d_27/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_27/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_27/mul" [op=Mul]; +"inception_resnet_v2/conv2d_27/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_27/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_27/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_27/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_27/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_27/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_27/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_27/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_27/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_27/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_28/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_28/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_28/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_28/Round" [op=Round]; +"inception_resnet_v2/conv2d_28/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_28/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_28/mul" [op=Mul]; +"inception_resnet_v2/conv2d_28/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_25/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_25/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_25/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_25/Round" [op=Round]; +"inception_resnet_v2/conv2d_25/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_25/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_25/mul" [op=Mul]; +"inception_resnet_v2/conv2d_25/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_28/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_28/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_28/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_28/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_28/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_28/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_28/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_28/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_25/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_25/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_25/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_25/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_25/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_25/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_25/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_25/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_28/Relu" [op=Relu]; +"inception_resnet_v2/activation_25/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_29/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_29/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_29/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_29/Round" [op=Round]; +"inception_resnet_v2/conv2d_29/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_29/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_29/mul" [op=Mul]; +"inception_resnet_v2/conv2d_29/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_26/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_26/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_26/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_26/Round" [op=Round]; +"inception_resnet_v2/conv2d_26/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_26/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_26/mul" [op=Mul]; +"inception_resnet_v2/conv2d_26/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_24/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_24/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_24/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_24/Round" [op=Round]; +"inception_resnet_v2/conv2d_24/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_24/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_24/mul" [op=Mul]; +"inception_resnet_v2/conv2d_24/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_29/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_29/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_29/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_29/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_29/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_29/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_29/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_29/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_26/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_26/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_26/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_26/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_26/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_26/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_26/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_26/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_24/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_24/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_24/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_24/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_24/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_24/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_24/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_24/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_24/Relu" [op=Relu]; +"inception_resnet_v2/activation_26/Relu" [op=Relu]; +"inception_resnet_v2/activation_29/Relu" [op=Relu]; +"inception_resnet_v2/block35_3_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block35_3_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block35_3_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_3_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_3_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block35_3_conv/Round" [op=Round]; +"inception_resnet_v2/block35_3_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_3_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_3_conv/mul" [op=Mul]; +"inception_resnet_v2/block35_3_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block35_3_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_3_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_3_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_2/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_2/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_2/add" [op=AddV2]; +"inception_resnet_v2/block35_3_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_33/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_33/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_33/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_33/Round" [op=Round]; +"inception_resnet_v2/conv2d_33/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_33/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_33/mul" [op=Mul]; +"inception_resnet_v2/conv2d_33/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_33/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_33/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_33/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_33/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_33/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_33/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_33/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_33/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_33/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_34/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_34/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_34/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_34/Round" [op=Round]; +"inception_resnet_v2/conv2d_34/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_34/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_34/mul" [op=Mul]; +"inception_resnet_v2/conv2d_34/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_31/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_31/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_31/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_31/Round" [op=Round]; +"inception_resnet_v2/conv2d_31/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_31/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_31/mul" [op=Mul]; +"inception_resnet_v2/conv2d_31/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_34/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_34/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_34/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_34/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_34/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_34/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_34/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_34/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_31/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_31/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_31/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_31/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_31/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_31/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_31/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_31/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_34/Relu" [op=Relu]; +"inception_resnet_v2/activation_31/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_35/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_35/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_35/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_35/Round" [op=Round]; +"inception_resnet_v2/conv2d_35/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_35/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_35/mul" [op=Mul]; +"inception_resnet_v2/conv2d_35/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_32/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_32/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_32/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_32/Round" [op=Round]; +"inception_resnet_v2/conv2d_32/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_32/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_32/mul" [op=Mul]; +"inception_resnet_v2/conv2d_32/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_30/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_30/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_30/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_30/Round" [op=Round]; +"inception_resnet_v2/conv2d_30/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_30/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_30/mul" [op=Mul]; +"inception_resnet_v2/conv2d_30/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_35/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_35/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_35/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_35/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_35/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_35/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_35/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_35/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_32/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_32/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_32/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_32/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_32/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_32/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_32/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_32/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_30/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_30/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_30/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_30/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_30/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_30/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_30/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_30/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_30/Relu" [op=Relu]; +"inception_resnet_v2/activation_32/Relu" [op=Relu]; +"inception_resnet_v2/activation_35/Relu" [op=Relu]; +"inception_resnet_v2/block35_4_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block35_4_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block35_4_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_4_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_4_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block35_4_conv/Round" [op=Round]; +"inception_resnet_v2/block35_4_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_4_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_4_conv/mul" [op=Mul]; +"inception_resnet_v2/block35_4_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block35_4_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_4_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_4_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_3/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_3/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_3/add" [op=AddV2]; +"inception_resnet_v2/block35_4_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_39/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_39/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_39/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_39/Round" [op=Round]; +"inception_resnet_v2/conv2d_39/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_39/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_39/mul" [op=Mul]; +"inception_resnet_v2/conv2d_39/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_39/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_39/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_39/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_39/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_39/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_39/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_39/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_39/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_39/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_40/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_40/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_40/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_40/Round" [op=Round]; +"inception_resnet_v2/conv2d_40/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_40/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_40/mul" [op=Mul]; +"inception_resnet_v2/conv2d_40/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_37/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_37/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_37/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_37/Round" [op=Round]; +"inception_resnet_v2/conv2d_37/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_37/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_37/mul" [op=Mul]; +"inception_resnet_v2/conv2d_37/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_40/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_40/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_40/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_40/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_40/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_40/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_40/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_40/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_37/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_37/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_37/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_37/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_37/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_37/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_37/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_37/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_40/Relu" [op=Relu]; +"inception_resnet_v2/activation_37/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_41/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_41/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_41/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_41/Round" [op=Round]; +"inception_resnet_v2/conv2d_41/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_41/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_41/mul" [op=Mul]; +"inception_resnet_v2/conv2d_41/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_38/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_38/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_38/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_38/Round" [op=Round]; +"inception_resnet_v2/conv2d_38/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_38/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_38/mul" [op=Mul]; +"inception_resnet_v2/conv2d_38/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_36/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_36/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_36/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_36/Round" [op=Round]; +"inception_resnet_v2/conv2d_36/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_36/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_36/mul" [op=Mul]; +"inception_resnet_v2/conv2d_36/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_41/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_41/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_41/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_41/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_41/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_41/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_41/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_41/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_38/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_38/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_38/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_38/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_38/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_38/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_38/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_38/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_36/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_36/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_36/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_36/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_36/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_36/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_36/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_36/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_36/Relu" [op=Relu]; +"inception_resnet_v2/activation_38/Relu" [op=Relu]; +"inception_resnet_v2/activation_41/Relu" [op=Relu]; +"inception_resnet_v2/block35_5_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block35_5_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block35_5_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_5_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_5_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block35_5_conv/Round" [op=Round]; +"inception_resnet_v2/block35_5_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_5_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_5_conv/mul" [op=Mul]; +"inception_resnet_v2/block35_5_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block35_5_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_5_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_5_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_4/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_4/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_4/add" [op=AddV2]; +"inception_resnet_v2/block35_5_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_45/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_45/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_45/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_45/Round" [op=Round]; +"inception_resnet_v2/conv2d_45/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_45/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_45/mul" [op=Mul]; +"inception_resnet_v2/conv2d_45/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_45/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_45/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_45/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_45/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_45/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_45/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_45/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_45/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_45/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_46/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_46/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_46/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_46/Round" [op=Round]; +"inception_resnet_v2/conv2d_46/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_46/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_46/mul" [op=Mul]; +"inception_resnet_v2/conv2d_46/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_43/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_43/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_43/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_43/Round" [op=Round]; +"inception_resnet_v2/conv2d_43/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_43/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_43/mul" [op=Mul]; +"inception_resnet_v2/conv2d_43/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_46/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_46/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_46/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_46/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_46/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_46/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_46/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_46/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_43/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_43/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_43/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_43/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_43/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_43/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_43/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_43/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_46/Relu" [op=Relu]; +"inception_resnet_v2/activation_43/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_47/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_47/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_47/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_47/Round" [op=Round]; +"inception_resnet_v2/conv2d_47/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_47/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_47/mul" [op=Mul]; +"inception_resnet_v2/conv2d_47/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_44/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_44/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_44/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_44/Round" [op=Round]; +"inception_resnet_v2/conv2d_44/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_44/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_44/mul" [op=Mul]; +"inception_resnet_v2/conv2d_44/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_42/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_42/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_42/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_42/Round" [op=Round]; +"inception_resnet_v2/conv2d_42/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_42/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_42/mul" [op=Mul]; +"inception_resnet_v2/conv2d_42/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_47/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_47/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_47/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_47/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_47/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_47/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_47/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_47/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_44/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_44/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_44/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_44/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_44/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_44/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_44/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_44/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_42/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_42/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_42/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_42/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_42/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_42/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_42/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_42/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_42/Relu" [op=Relu]; +"inception_resnet_v2/activation_44/Relu" [op=Relu]; +"inception_resnet_v2/activation_47/Relu" [op=Relu]; +"inception_resnet_v2/block35_6_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block35_6_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block35_6_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_6_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_6_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block35_6_conv/Round" [op=Round]; +"inception_resnet_v2/block35_6_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_6_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_6_conv/mul" [op=Mul]; +"inception_resnet_v2/block35_6_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block35_6_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_6_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_6_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_5/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_5/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_5/add" [op=AddV2]; +"inception_resnet_v2/block35_6_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_51/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_51/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_51/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_51/Round" [op=Round]; +"inception_resnet_v2/conv2d_51/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_51/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_51/mul" [op=Mul]; +"inception_resnet_v2/conv2d_51/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_51/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_51/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_51/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_51/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_51/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_51/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_51/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_51/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_51/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_52/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_52/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_52/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_52/Round" [op=Round]; +"inception_resnet_v2/conv2d_52/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_52/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_52/mul" [op=Mul]; +"inception_resnet_v2/conv2d_52/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_49/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_49/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_49/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_49/Round" [op=Round]; +"inception_resnet_v2/conv2d_49/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_49/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_49/mul" [op=Mul]; +"inception_resnet_v2/conv2d_49/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_52/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_52/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_52/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_52/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_52/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_52/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_52/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_52/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_49/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_49/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_49/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_49/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_49/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_49/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_49/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_49/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_52/Relu" [op=Relu]; +"inception_resnet_v2/activation_49/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_53/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_53/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_53/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_53/Round" [op=Round]; +"inception_resnet_v2/conv2d_53/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_53/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_53/mul" [op=Mul]; +"inception_resnet_v2/conv2d_53/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_50/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_50/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_50/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_50/Round" [op=Round]; +"inception_resnet_v2/conv2d_50/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_50/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_50/mul" [op=Mul]; +"inception_resnet_v2/conv2d_50/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_48/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_48/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_48/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_48/Round" [op=Round]; +"inception_resnet_v2/conv2d_48/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_48/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_48/mul" [op=Mul]; +"inception_resnet_v2/conv2d_48/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_53/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_53/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_53/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_53/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_53/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_53/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_53/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_53/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_50/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_50/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_50/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_50/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_50/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_50/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_50/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_50/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_48/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_48/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_48/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_48/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_48/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_48/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_48/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_48/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_48/Relu" [op=Relu]; +"inception_resnet_v2/activation_50/Relu" [op=Relu]; +"inception_resnet_v2/activation_53/Relu" [op=Relu]; +"inception_resnet_v2/block35_7_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block35_7_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block35_7_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_7_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_7_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block35_7_conv/Round" [op=Round]; +"inception_resnet_v2/block35_7_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_7_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_7_conv/mul" [op=Mul]; +"inception_resnet_v2/block35_7_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block35_7_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_7_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_7_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_6/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_6/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_6/add" [op=AddV2]; +"inception_resnet_v2/block35_7_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_57/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_57/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_57/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_57/Round" [op=Round]; +"inception_resnet_v2/conv2d_57/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_57/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_57/mul" [op=Mul]; +"inception_resnet_v2/conv2d_57/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_57/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_57/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_57/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_57/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_57/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_57/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_57/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_57/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_57/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_58/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_58/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_58/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_58/Round" [op=Round]; +"inception_resnet_v2/conv2d_58/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_58/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_58/mul" [op=Mul]; +"inception_resnet_v2/conv2d_58/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_55/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_55/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_55/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_55/Round" [op=Round]; +"inception_resnet_v2/conv2d_55/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_55/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_55/mul" [op=Mul]; +"inception_resnet_v2/conv2d_55/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_58/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_58/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_58/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_58/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_58/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_58/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_58/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_58/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_55/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_55/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_55/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_55/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_55/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_55/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_55/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_55/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_58/Relu" [op=Relu]; +"inception_resnet_v2/activation_55/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_59/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_59/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_59/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_59/Round" [op=Round]; +"inception_resnet_v2/conv2d_59/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_59/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_59/mul" [op=Mul]; +"inception_resnet_v2/conv2d_59/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_56/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_56/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_56/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_56/Round" [op=Round]; +"inception_resnet_v2/conv2d_56/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_56/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_56/mul" [op=Mul]; +"inception_resnet_v2/conv2d_56/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_54/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_54/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_54/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_54/Round" [op=Round]; +"inception_resnet_v2/conv2d_54/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_54/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_54/mul" [op=Mul]; +"inception_resnet_v2/conv2d_54/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_59/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_59/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_59/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_59/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_59/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_59/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_59/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_59/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_56/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_56/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_56/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_56/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_56/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_56/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_56/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_56/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_54/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_54/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_54/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_54/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_54/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_54/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_54/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_54/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_54/Relu" [op=Relu]; +"inception_resnet_v2/activation_56/Relu" [op=Relu]; +"inception_resnet_v2/activation_59/Relu" [op=Relu]; +"inception_resnet_v2/block35_8_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block35_8_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block35_8_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_8_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_8_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block35_8_conv/Round" [op=Round]; +"inception_resnet_v2/block35_8_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_8_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_8_conv/mul" [op=Mul]; +"inception_resnet_v2/block35_8_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block35_8_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_8_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_8_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_7/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_7/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_7/add" [op=AddV2]; +"inception_resnet_v2/block35_8_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_63/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_63/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_63/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_63/Round" [op=Round]; +"inception_resnet_v2/conv2d_63/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_63/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_63/mul" [op=Mul]; +"inception_resnet_v2/conv2d_63/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_63/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_63/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_63/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_63/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_63/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_63/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_63/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_63/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_63/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_64/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_64/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_64/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_64/Round" [op=Round]; +"inception_resnet_v2/conv2d_64/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_64/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_64/mul" [op=Mul]; +"inception_resnet_v2/conv2d_64/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_61/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_61/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_61/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_61/Round" [op=Round]; +"inception_resnet_v2/conv2d_61/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_61/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_61/mul" [op=Mul]; +"inception_resnet_v2/conv2d_61/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_64/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_64/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_64/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_64/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_64/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_64/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_64/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_64/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_61/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_61/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_61/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_61/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_61/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_61/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_61/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_61/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_64/Relu" [op=Relu]; +"inception_resnet_v2/activation_61/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_65/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_65/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_65/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_65/Round" [op=Round]; +"inception_resnet_v2/conv2d_65/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_65/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_65/mul" [op=Mul]; +"inception_resnet_v2/conv2d_65/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_62/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_62/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_62/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_62/Round" [op=Round]; +"inception_resnet_v2/conv2d_62/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_62/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_62/mul" [op=Mul]; +"inception_resnet_v2/conv2d_62/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_60/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_60/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_60/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_60/Round" [op=Round]; +"inception_resnet_v2/conv2d_60/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_60/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_60/mul" [op=Mul]; +"inception_resnet_v2/conv2d_60/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_65/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_65/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_65/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_65/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_65/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_65/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_65/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_65/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_62/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_62/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_62/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_62/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_62/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_62/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_62/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_62/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_60/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_60/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_60/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_60/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_60/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_60/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_60/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_60/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_60/Relu" [op=Relu]; +"inception_resnet_v2/activation_62/Relu" [op=Relu]; +"inception_resnet_v2/activation_65/Relu" [op=Relu]; +"inception_resnet_v2/block35_9_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block35_9_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block35_9_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_9_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_9_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block35_9_conv/Round" [op=Round]; +"inception_resnet_v2/block35_9_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_9_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_9_conv/mul" [op=Mul]; +"inception_resnet_v2/block35_9_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block35_9_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_9_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_9_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_8/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_8/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_8/add" [op=AddV2]; +"inception_resnet_v2/block35_9_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_69/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_69/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_69/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_69/Round" [op=Round]; +"inception_resnet_v2/conv2d_69/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_69/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_69/mul" [op=Mul]; +"inception_resnet_v2/conv2d_69/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_69/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_69/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_69/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_69/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_69/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_69/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_69/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_69/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_69/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_70/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_70/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_70/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_70/Round" [op=Round]; +"inception_resnet_v2/conv2d_70/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_70/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_70/mul" [op=Mul]; +"inception_resnet_v2/conv2d_70/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_67/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_67/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_67/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_67/Round" [op=Round]; +"inception_resnet_v2/conv2d_67/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_67/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_67/mul" [op=Mul]; +"inception_resnet_v2/conv2d_67/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_70/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_70/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_70/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_70/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_70/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_70/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_70/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_70/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_67/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_67/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_67/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_67/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_67/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_67/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_67/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_67/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_70/Relu" [op=Relu]; +"inception_resnet_v2/activation_67/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_71/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_71/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_71/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_71/Round" [op=Round]; +"inception_resnet_v2/conv2d_71/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_71/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_71/mul" [op=Mul]; +"inception_resnet_v2/conv2d_71/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_68/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_68/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_68/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_68/Round" [op=Round]; +"inception_resnet_v2/conv2d_68/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_68/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_68/mul" [op=Mul]; +"inception_resnet_v2/conv2d_68/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_66/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_66/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_66/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_66/Round" [op=Round]; +"inception_resnet_v2/conv2d_66/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_66/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_66/mul" [op=Mul]; +"inception_resnet_v2/conv2d_66/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_71/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_71/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_71/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_71/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_71/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_71/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_71/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_71/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_68/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_68/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_68/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_68/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_68/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_68/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_68/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_68/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_66/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_66/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_66/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_66/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_66/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_66/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_66/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_66/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_66/Relu" [op=Relu]; +"inception_resnet_v2/activation_68/Relu" [op=Relu]; +"inception_resnet_v2/activation_71/Relu" [op=Relu]; +"inception_resnet_v2/block35_10_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block35_10_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block35_10_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_10_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_10_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block35_10_conv/Round" [op=Round]; +"inception_resnet_v2/block35_10_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_10_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_10_conv/mul" [op=Mul]; +"inception_resnet_v2/block35_10_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block35_10_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block35_10_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block35_10_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_9/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_9/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_9/add" [op=AddV2]; +"inception_resnet_v2/block35_10_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_73/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_73/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_73/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_73/Round" [op=Round]; +"inception_resnet_v2/conv2d_73/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_73/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_73/mul" [op=Mul]; +"inception_resnet_v2/conv2d_73/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_73/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_73/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_73/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_73/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_73/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_73/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_73/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_73/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_73/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_74/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_74/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_74/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_74/Round" [op=Round]; +"inception_resnet_v2/conv2d_74/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_74/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_74/mul" [op=Mul]; +"inception_resnet_v2/conv2d_74/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_74/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_74/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_74/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_74/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_74/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_74/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_74/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_74/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_74/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_75/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_75/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_75/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_75/Round" [op=Round]; +"inception_resnet_v2/conv2d_75/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_75/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_75/mul" [op=Mul]; +"inception_resnet_v2/conv2d_75/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_72/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_72/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_72/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_72/Round" [op=Round]; +"inception_resnet_v2/conv2d_72/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_72/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_72/mul" [op=Mul]; +"inception_resnet_v2/conv2d_72/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_75/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_75/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_75/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_75/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_75/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_75/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_75/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_75/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_72/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_72/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_72/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_72/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_72/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_72/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_72/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_72/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_72/Relu" [op=Relu]; +"inception_resnet_v2/activation_75/Relu" [op=Relu]; +"inception_resnet_v2/max_pooling2d_2/MaxPool" [op=MaxPool]; +"inception_resnet_v2/mixed_6a/concat/axis" [op=Const]; +"inception_resnet_v2/mixed_6a/concat" [op=ConcatV2]; +"inception_resnet_v2/conv2d_77/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_77/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_77/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_77/Round" [op=Round]; +"inception_resnet_v2/conv2d_77/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_77/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_77/mul" [op=Mul]; +"inception_resnet_v2/conv2d_77/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_77/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_77/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_77/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_77/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_77/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_77/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_77/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_77/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_77/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_78/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_78/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_78/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_78/Round" [op=Round]; +"inception_resnet_v2/conv2d_78/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_78/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_78/mul" [op=Mul]; +"inception_resnet_v2/conv2d_78/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_78/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_78/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_78/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_78/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_78/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_78/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_78/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_78/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_78/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_79/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_79/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_79/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_79/Round" [op=Round]; +"inception_resnet_v2/conv2d_79/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_79/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_79/mul" [op=Mul]; +"inception_resnet_v2/conv2d_79/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_76/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_76/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_76/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_76/Round" [op=Round]; +"inception_resnet_v2/conv2d_76/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_76/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_76/mul" [op=Mul]; +"inception_resnet_v2/conv2d_76/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_79/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_79/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_79/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_79/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_79/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_79/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_79/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_79/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_76/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_76/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_76/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_76/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_76/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_76/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_76/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_76/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_76/Relu" [op=Relu]; +"inception_resnet_v2/activation_79/Relu" [op=Relu]; +"inception_resnet_v2/block17_1_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_1_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_1_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_1_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_1_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_1_conv/Round" [op=Round]; +"inception_resnet_v2/block17_1_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_1_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_1_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_1_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_1_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_1_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_1_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_10/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_10/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_10/add" [op=AddV2]; +"inception_resnet_v2/block17_1_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_81/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_81/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_81/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_81/Round" [op=Round]; +"inception_resnet_v2/conv2d_81/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_81/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_81/mul" [op=Mul]; +"inception_resnet_v2/conv2d_81/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_81/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_81/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_81/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_81/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_81/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_81/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_81/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_81/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_81/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_82/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_82/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_82/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_82/Round" [op=Round]; +"inception_resnet_v2/conv2d_82/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_82/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_82/mul" [op=Mul]; +"inception_resnet_v2/conv2d_82/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_82/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_82/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_82/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_82/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_82/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_82/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_82/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_82/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_82/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_83/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_83/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_83/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_83/Round" [op=Round]; +"inception_resnet_v2/conv2d_83/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_83/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_83/mul" [op=Mul]; +"inception_resnet_v2/conv2d_83/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_80/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_80/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_80/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_80/Round" [op=Round]; +"inception_resnet_v2/conv2d_80/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_80/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_80/mul" [op=Mul]; +"inception_resnet_v2/conv2d_80/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_83/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_83/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_83/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_83/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_83/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_83/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_83/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_83/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_80/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_80/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_80/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_80/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_80/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_80/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_80/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_80/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_80/Relu" [op=Relu]; +"inception_resnet_v2/activation_83/Relu" [op=Relu]; +"inception_resnet_v2/block17_2_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_2_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_2_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_2_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_2_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_2_conv/Round" [op=Round]; +"inception_resnet_v2/block17_2_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_2_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_2_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_2_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_2_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_2_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_2_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_11/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_11/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_11/add" [op=AddV2]; +"inception_resnet_v2/block17_2_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_85/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_85/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_85/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_85/Round" [op=Round]; +"inception_resnet_v2/conv2d_85/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_85/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_85/mul" [op=Mul]; +"inception_resnet_v2/conv2d_85/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_85/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_85/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_86/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_86/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_86/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_86/Round" [op=Round]; +"inception_resnet_v2/conv2d_86/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_86/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_86/mul" [op=Mul]; +"inception_resnet_v2/conv2d_86/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_86/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_86/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_86/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_86/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_86/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_86/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_86/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_86/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_86/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_87/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_87/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_87/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_87/Round" [op=Round]; +"inception_resnet_v2/conv2d_87/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_87/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_87/mul" [op=Mul]; +"inception_resnet_v2/conv2d_87/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_84/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_84/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_84/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_84/Round" [op=Round]; +"inception_resnet_v2/conv2d_84/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_84/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_84/mul" [op=Mul]; +"inception_resnet_v2/conv2d_84/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_87/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_87/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_87/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_87/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_87/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_87/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_84/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_84/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_84/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_84/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_84/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_84/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_84/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_84/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_84/Relu" [op=Relu]; +"inception_resnet_v2/activation_87/Relu" [op=Relu]; +"inception_resnet_v2/block17_3_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_3_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_3_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_3_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_3_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_3_conv/Round" [op=Round]; +"inception_resnet_v2/block17_3_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_3_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_3_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_3_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_3_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_3_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_3_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_12/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_12/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_12/add" [op=AddV2]; +"inception_resnet_v2/block17_3_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_89/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_89/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_89/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_89/Round" [op=Round]; +"inception_resnet_v2/conv2d_89/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_89/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_89/mul" [op=Mul]; +"inception_resnet_v2/conv2d_89/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_89/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_89/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_89/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_89/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_89/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_89/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_89/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_89/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_89/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_90/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_90/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_90/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_90/Round" [op=Round]; +"inception_resnet_v2/conv2d_90/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_90/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_90/mul" [op=Mul]; +"inception_resnet_v2/conv2d_90/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_90/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_90/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_90/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_90/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_90/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_90/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_90/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_90/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_90/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_91/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_91/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_91/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_91/Round" [op=Round]; +"inception_resnet_v2/conv2d_91/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_91/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_91/mul" [op=Mul]; +"inception_resnet_v2/conv2d_91/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_88/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_88/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_88/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_88/Round" [op=Round]; +"inception_resnet_v2/conv2d_88/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_88/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_88/mul" [op=Mul]; +"inception_resnet_v2/conv2d_88/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_91/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_91/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_91/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_91/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_91/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_91/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_91/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_91/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_88/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_88/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_88/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_88/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_88/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_88/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_88/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_88/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_88/Relu" [op=Relu]; +"inception_resnet_v2/activation_91/Relu" [op=Relu]; +"inception_resnet_v2/block17_4_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_4_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_4_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_4_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_4_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_4_conv/Round" [op=Round]; +"inception_resnet_v2/block17_4_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_4_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_4_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_4_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_4_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_4_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_4_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_13/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_13/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_13/add" [op=AddV2]; +"inception_resnet_v2/block17_4_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_93/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_93/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_93/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_93/Round" [op=Round]; +"inception_resnet_v2/conv2d_93/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_93/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_93/mul" [op=Mul]; +"inception_resnet_v2/conv2d_93/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_93/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_93/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_93/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_93/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_93/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_93/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_93/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_94/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_94/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_94/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_94/Round" [op=Round]; +"inception_resnet_v2/conv2d_94/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_94/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_94/mul" [op=Mul]; +"inception_resnet_v2/conv2d_94/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_94/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_94/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_94/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_94/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_94/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_94/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_94/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_94/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_94/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_95/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_95/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_95/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_95/Round" [op=Round]; +"inception_resnet_v2/conv2d_95/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_95/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_95/mul" [op=Mul]; +"inception_resnet_v2/conv2d_95/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_92/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_92/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_92/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_92/Round" [op=Round]; +"inception_resnet_v2/conv2d_92/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_92/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_92/mul" [op=Mul]; +"inception_resnet_v2/conv2d_92/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_95/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_95/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_95/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_95/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_95/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_95/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_95/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_95/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_92/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_92/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_92/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_92/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_92/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_92/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_92/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_92/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_92/Relu" [op=Relu]; +"inception_resnet_v2/activation_95/Relu" [op=Relu]; +"inception_resnet_v2/block17_5_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_5_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_5_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_5_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_5_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_5_conv/Round" [op=Round]; +"inception_resnet_v2/block17_5_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_5_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_5_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_5_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_5_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_5_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_5_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_14/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_14/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_14/add" [op=AddV2]; +"inception_resnet_v2/block17_5_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_97/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_97/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_97/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_97/Round" [op=Round]; +"inception_resnet_v2/conv2d_97/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_97/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_97/mul" [op=Mul]; +"inception_resnet_v2/conv2d_97/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_97/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_97/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_97/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_97/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_97/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_97/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_97/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_97/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_97/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_98/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_98/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_98/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_98/Round" [op=Round]; +"inception_resnet_v2/conv2d_98/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_98/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_98/mul" [op=Mul]; +"inception_resnet_v2/conv2d_98/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_98/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_98/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_98/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_98/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_98/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_98/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_98/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_98/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_98/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_99/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_99/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_99/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_99/Round" [op=Round]; +"inception_resnet_v2/conv2d_99/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_99/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_99/mul" [op=Mul]; +"inception_resnet_v2/conv2d_99/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_96/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_96/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_96/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_96/Round" [op=Round]; +"inception_resnet_v2/conv2d_96/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_96/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_96/mul" [op=Mul]; +"inception_resnet_v2/conv2d_96/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_99/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_99/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_99/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_99/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_99/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_99/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_99/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_99/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_96/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_96/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_96/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_96/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_96/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_96/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_96/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_96/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_96/Relu" [op=Relu]; +"inception_resnet_v2/activation_99/Relu" [op=Relu]; +"inception_resnet_v2/block17_6_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_6_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_6_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_6_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_6_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_6_conv/Round" [op=Round]; +"inception_resnet_v2/block17_6_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_6_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_6_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_6_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_6_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_6_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_6_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_15/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_15/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_15/add" [op=AddV2]; +"inception_resnet_v2/block17_6_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_101/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_101/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_101/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_101/Round" [op=Round]; +"inception_resnet_v2/conv2d_101/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_101/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_101/mul" [op=Mul]; +"inception_resnet_v2/conv2d_101/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_101/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_101/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_101/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_101/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_101/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_101/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_101/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_101/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_101/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_102/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_102/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_102/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_102/Round" [op=Round]; +"inception_resnet_v2/conv2d_102/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_102/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_102/mul" [op=Mul]; +"inception_resnet_v2/conv2d_102/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_102/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_102/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_102/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_102/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_102/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_102/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_102/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_102/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_102/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_103/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_103/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_103/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_103/Round" [op=Round]; +"inception_resnet_v2/conv2d_103/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_103/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_103/mul" [op=Mul]; +"inception_resnet_v2/conv2d_103/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_100/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_100/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_100/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_100/Round" [op=Round]; +"inception_resnet_v2/conv2d_100/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_100/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_100/mul" [op=Mul]; +"inception_resnet_v2/conv2d_100/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_103/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_103/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_103/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_103/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_103/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_103/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_103/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_103/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_100/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_100/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_100/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_100/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_100/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_100/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_100/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_100/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_100/Relu" [op=Relu]; +"inception_resnet_v2/activation_103/Relu" [op=Relu]; +"inception_resnet_v2/block17_7_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_7_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_7_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_7_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_7_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_7_conv/Round" [op=Round]; +"inception_resnet_v2/block17_7_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_7_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_7_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_7_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_7_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_7_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_7_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_16/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_16/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_16/add" [op=AddV2]; +"inception_resnet_v2/block17_7_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_105/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_105/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_105/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_105/Round" [op=Round]; +"inception_resnet_v2/conv2d_105/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_105/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_105/mul" [op=Mul]; +"inception_resnet_v2/conv2d_105/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_105/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_105/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_105/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_105/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_105/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_105/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_105/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_105/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_105/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_106/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_106/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_106/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_106/Round" [op=Round]; +"inception_resnet_v2/conv2d_106/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_106/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_106/mul" [op=Mul]; +"inception_resnet_v2/conv2d_106/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_106/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_106/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_106/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_106/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_106/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_106/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_106/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_106/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_106/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_107/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_107/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_107/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_107/Round" [op=Round]; +"inception_resnet_v2/conv2d_107/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_107/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_107/mul" [op=Mul]; +"inception_resnet_v2/conv2d_107/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_104/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_104/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_104/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_104/Round" [op=Round]; +"inception_resnet_v2/conv2d_104/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_104/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_104/mul" [op=Mul]; +"inception_resnet_v2/conv2d_104/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_107/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_107/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_107/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_107/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_107/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_107/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_107/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_107/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_104/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_104/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_104/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_104/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_104/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_104/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_104/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_104/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_104/Relu" [op=Relu]; +"inception_resnet_v2/activation_107/Relu" [op=Relu]; +"inception_resnet_v2/block17_8_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_8_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_8_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_8_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_8_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_8_conv/Round" [op=Round]; +"inception_resnet_v2/block17_8_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_8_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_8_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_8_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_8_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_8_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_8_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_17/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_17/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_17/add" [op=AddV2]; +"inception_resnet_v2/block17_8_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_109/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_109/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_109/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_109/Round" [op=Round]; +"inception_resnet_v2/conv2d_109/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_109/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_109/mul" [op=Mul]; +"inception_resnet_v2/conv2d_109/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_109/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_109/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_109/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_109/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_109/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_109/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_109/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_109/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_109/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_110/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_110/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_110/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_110/Round" [op=Round]; +"inception_resnet_v2/conv2d_110/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_110/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_110/mul" [op=Mul]; +"inception_resnet_v2/conv2d_110/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_110/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_110/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_110/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_110/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_110/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_110/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_110/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_110/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_110/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_111/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_111/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_111/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_111/Round" [op=Round]; +"inception_resnet_v2/conv2d_111/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_111/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_111/mul" [op=Mul]; +"inception_resnet_v2/conv2d_111/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_108/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_108/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_108/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_108/Round" [op=Round]; +"inception_resnet_v2/conv2d_108/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_108/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_108/mul" [op=Mul]; +"inception_resnet_v2/conv2d_108/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_111/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_111/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_111/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_111/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_111/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_111/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_111/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_111/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_108/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_108/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_108/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_108/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_108/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_108/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_108/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_108/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_108/Relu" [op=Relu]; +"inception_resnet_v2/activation_111/Relu" [op=Relu]; +"inception_resnet_v2/block17_9_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_9_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_9_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_9_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_9_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_9_conv/Round" [op=Round]; +"inception_resnet_v2/block17_9_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_9_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_9_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_9_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_9_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_9_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_9_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_18/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_18/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_18/add" [op=AddV2]; +"inception_resnet_v2/block17_9_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_113/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_113/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_113/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_113/Round" [op=Round]; +"inception_resnet_v2/conv2d_113/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_113/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_113/mul" [op=Mul]; +"inception_resnet_v2/conv2d_113/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_113/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_113/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_113/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_113/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_113/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_113/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_113/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_113/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_113/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_114/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_114/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_114/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_114/Round" [op=Round]; +"inception_resnet_v2/conv2d_114/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_114/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_114/mul" [op=Mul]; +"inception_resnet_v2/conv2d_114/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_114/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_114/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_114/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_114/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_114/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_114/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_114/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_114/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_114/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_115/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_115/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_115/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_115/Round" [op=Round]; +"inception_resnet_v2/conv2d_115/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_115/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_115/mul" [op=Mul]; +"inception_resnet_v2/conv2d_115/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_112/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_112/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_112/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_112/Round" [op=Round]; +"inception_resnet_v2/conv2d_112/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_112/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_112/mul" [op=Mul]; +"inception_resnet_v2/conv2d_112/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_115/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_115/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_115/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_115/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_115/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_115/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_115/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_115/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_112/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_112/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_112/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_112/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_112/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_112/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_112/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_112/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_112/Relu" [op=Relu]; +"inception_resnet_v2/activation_115/Relu" [op=Relu]; +"inception_resnet_v2/block17_10_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_10_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_10_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_10_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_10_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_10_conv/Round" [op=Round]; +"inception_resnet_v2/block17_10_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_10_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_10_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_10_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_10_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_10_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_10_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_19/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_19/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_19/add" [op=AddV2]; +"inception_resnet_v2/block17_10_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_117/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_117/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_117/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_117/Round" [op=Round]; +"inception_resnet_v2/conv2d_117/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_117/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_117/mul" [op=Mul]; +"inception_resnet_v2/conv2d_117/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_117/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_117/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_117/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_117/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_117/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_117/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_117/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_117/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_117/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_118/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_118/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_118/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_118/Round" [op=Round]; +"inception_resnet_v2/conv2d_118/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_118/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_118/mul" [op=Mul]; +"inception_resnet_v2/conv2d_118/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_118/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_118/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_118/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_118/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_118/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_118/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_118/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_118/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_118/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_119/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_119/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_119/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_119/Round" [op=Round]; +"inception_resnet_v2/conv2d_119/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_119/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_119/mul" [op=Mul]; +"inception_resnet_v2/conv2d_119/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_116/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_116/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_116/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_116/Round" [op=Round]; +"inception_resnet_v2/conv2d_116/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_116/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_116/mul" [op=Mul]; +"inception_resnet_v2/conv2d_116/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_119/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_119/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_119/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_119/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_119/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_119/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_119/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_119/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_116/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_116/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_116/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_116/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_116/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_116/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_116/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_116/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_116/Relu" [op=Relu]; +"inception_resnet_v2/activation_119/Relu" [op=Relu]; +"inception_resnet_v2/block17_11_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_11_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_11_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_11_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_11_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_11_conv/Round" [op=Round]; +"inception_resnet_v2/block17_11_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_11_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_11_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_11_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_11_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_11_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_11_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_20/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_20/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_20/add" [op=AddV2]; +"inception_resnet_v2/block17_11_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_121/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_121/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_121/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_121/Round" [op=Round]; +"inception_resnet_v2/conv2d_121/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_121/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_121/mul" [op=Mul]; +"inception_resnet_v2/conv2d_121/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_121/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_121/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_121/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_121/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_121/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_121/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_121/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_121/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_121/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_122/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_122/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_122/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_122/Round" [op=Round]; +"inception_resnet_v2/conv2d_122/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_122/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_122/mul" [op=Mul]; +"inception_resnet_v2/conv2d_122/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_122/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_122/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_122/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_122/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_122/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_122/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_122/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_122/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_122/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_123/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_123/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_123/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_123/Round" [op=Round]; +"inception_resnet_v2/conv2d_123/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_123/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_123/mul" [op=Mul]; +"inception_resnet_v2/conv2d_123/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_120/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_120/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_120/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_120/Round" [op=Round]; +"inception_resnet_v2/conv2d_120/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_120/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_120/mul" [op=Mul]; +"inception_resnet_v2/conv2d_120/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_123/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_123/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_123/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_123/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_123/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_123/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_123/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_123/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_120/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_120/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_120/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_120/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_120/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_120/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_120/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_120/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_120/Relu" [op=Relu]; +"inception_resnet_v2/activation_123/Relu" [op=Relu]; +"inception_resnet_v2/block17_12_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_12_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_12_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_12_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_12_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_12_conv/Round" [op=Round]; +"inception_resnet_v2/block17_12_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_12_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_12_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_12_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_12_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_12_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_12_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_21/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_21/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_21/add" [op=AddV2]; +"inception_resnet_v2/block17_12_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_125/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_125/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_125/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_125/Round" [op=Round]; +"inception_resnet_v2/conv2d_125/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_125/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_125/mul" [op=Mul]; +"inception_resnet_v2/conv2d_125/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_125/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_125/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_125/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_125/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_125/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_125/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_125/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_125/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_125/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_126/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_126/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_126/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_126/Round" [op=Round]; +"inception_resnet_v2/conv2d_126/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_126/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_126/mul" [op=Mul]; +"inception_resnet_v2/conv2d_126/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_126/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_126/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_126/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_126/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_126/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_126/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_126/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_126/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_126/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_127/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_127/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_127/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_127/Round" [op=Round]; +"inception_resnet_v2/conv2d_127/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_127/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_127/mul" [op=Mul]; +"inception_resnet_v2/conv2d_127/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_124/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_124/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_124/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_124/Round" [op=Round]; +"inception_resnet_v2/conv2d_124/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_124/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_124/mul" [op=Mul]; +"inception_resnet_v2/conv2d_124/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_127/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_127/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_127/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_127/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_127/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_127/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_127/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_127/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_124/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_124/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_124/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_124/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_124/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_124/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_124/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_124/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_124/Relu" [op=Relu]; +"inception_resnet_v2/activation_127/Relu" [op=Relu]; +"inception_resnet_v2/block17_13_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_13_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_13_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_13_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_13_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_13_conv/Round" [op=Round]; +"inception_resnet_v2/block17_13_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_13_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_13_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_13_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_13_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_13_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_13_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_22/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_22/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_22/add" [op=AddV2]; +"inception_resnet_v2/block17_13_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_129/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_129/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_129/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_129/Round" [op=Round]; +"inception_resnet_v2/conv2d_129/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_129/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_129/mul" [op=Mul]; +"inception_resnet_v2/conv2d_129/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_129/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_129/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_129/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_129/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_129/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_129/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_129/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_129/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_129/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_130/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_130/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_130/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_130/Round" [op=Round]; +"inception_resnet_v2/conv2d_130/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_130/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_130/mul" [op=Mul]; +"inception_resnet_v2/conv2d_130/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_130/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_130/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_130/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_130/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_130/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_130/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_130/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_130/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_130/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_131/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_131/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_131/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_131/Round" [op=Round]; +"inception_resnet_v2/conv2d_131/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_131/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_131/mul" [op=Mul]; +"inception_resnet_v2/conv2d_131/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_128/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_128/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_128/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_128/Round" [op=Round]; +"inception_resnet_v2/conv2d_128/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_128/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_128/mul" [op=Mul]; +"inception_resnet_v2/conv2d_128/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_131/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_131/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_131/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_131/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_131/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_131/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_131/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_131/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_128/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_128/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_128/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_128/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_128/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_128/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_128/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_128/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_128/Relu" [op=Relu]; +"inception_resnet_v2/activation_131/Relu" [op=Relu]; +"inception_resnet_v2/block17_14_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_14_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_14_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_14_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_14_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_14_conv/Round" [op=Round]; +"inception_resnet_v2/block17_14_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_14_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_14_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_14_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_14_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_14_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_14_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_23/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_23/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_23/add" [op=AddV2]; +"inception_resnet_v2/block17_14_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_133/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_133/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_133/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_133/Round" [op=Round]; +"inception_resnet_v2/conv2d_133/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_133/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_133/mul" [op=Mul]; +"inception_resnet_v2/conv2d_133/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_133/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_133/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_133/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_133/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_133/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_133/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_133/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_133/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_133/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_134/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_134/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_134/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_134/Round" [op=Round]; +"inception_resnet_v2/conv2d_134/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_134/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_134/mul" [op=Mul]; +"inception_resnet_v2/conv2d_134/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_134/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_134/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_134/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_134/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_134/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_134/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_134/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_134/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_134/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_135/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_135/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_135/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_135/Round" [op=Round]; +"inception_resnet_v2/conv2d_135/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_135/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_135/mul" [op=Mul]; +"inception_resnet_v2/conv2d_135/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_132/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_132/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_132/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_132/Round" [op=Round]; +"inception_resnet_v2/conv2d_132/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_132/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_132/mul" [op=Mul]; +"inception_resnet_v2/conv2d_132/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_135/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_135/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_135/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_135/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_135/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_135/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_135/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_135/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_132/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_132/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_132/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_132/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_132/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_132/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_132/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_132/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_132/Relu" [op=Relu]; +"inception_resnet_v2/activation_135/Relu" [op=Relu]; +"inception_resnet_v2/block17_15_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_15_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_15_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_15_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_15_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_15_conv/Round" [op=Round]; +"inception_resnet_v2/block17_15_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_15_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_15_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_15_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_15_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_15_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_15_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_24/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_24/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_24/add" [op=AddV2]; +"inception_resnet_v2/block17_15_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_137/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_137/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_137/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_137/Round" [op=Round]; +"inception_resnet_v2/conv2d_137/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_137/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_137/mul" [op=Mul]; +"inception_resnet_v2/conv2d_137/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_137/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_137/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_137/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_137/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_137/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_137/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_137/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_137/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_137/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_138/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_138/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_138/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_138/Round" [op=Round]; +"inception_resnet_v2/conv2d_138/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_138/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_138/mul" [op=Mul]; +"inception_resnet_v2/conv2d_138/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_138/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_138/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_138/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_138/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_138/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_138/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_138/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_138/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_138/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_139/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_139/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_139/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_139/Round" [op=Round]; +"inception_resnet_v2/conv2d_139/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_139/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_139/mul" [op=Mul]; +"inception_resnet_v2/conv2d_139/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_136/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_136/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_136/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_136/Round" [op=Round]; +"inception_resnet_v2/conv2d_136/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_136/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_136/mul" [op=Mul]; +"inception_resnet_v2/conv2d_136/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_139/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_139/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_139/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_139/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_139/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_139/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_139/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_139/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_136/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_136/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_136/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_136/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_136/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_136/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_136/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_136/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_136/Relu" [op=Relu]; +"inception_resnet_v2/activation_139/Relu" [op=Relu]; +"inception_resnet_v2/block17_16_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_16_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_16_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_16_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_16_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_16_conv/Round" [op=Round]; +"inception_resnet_v2/block17_16_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_16_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_16_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_16_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_16_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_16_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_16_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_25/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_25/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_25/add" [op=AddV2]; +"inception_resnet_v2/block17_16_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_141/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_141/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_141/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_141/Round" [op=Round]; +"inception_resnet_v2/conv2d_141/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_141/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_141/mul" [op=Mul]; +"inception_resnet_v2/conv2d_141/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_141/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_141/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_141/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_141/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_141/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_141/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_141/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_141/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_141/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_142/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_142/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_142/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_142/Round" [op=Round]; +"inception_resnet_v2/conv2d_142/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_142/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_142/mul" [op=Mul]; +"inception_resnet_v2/conv2d_142/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_142/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_142/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_142/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_142/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_142/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_142/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_142/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_142/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_142/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_143/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_143/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_143/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_143/Round" [op=Round]; +"inception_resnet_v2/conv2d_143/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_143/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_143/mul" [op=Mul]; +"inception_resnet_v2/conv2d_143/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_140/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_140/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_140/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_140/Round" [op=Round]; +"inception_resnet_v2/conv2d_140/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_140/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_140/mul" [op=Mul]; +"inception_resnet_v2/conv2d_140/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_143/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_143/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_143/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_143/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_143/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_143/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_143/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_143/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_140/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_140/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_140/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_140/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_140/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_140/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_140/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_140/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_140/Relu" [op=Relu]; +"inception_resnet_v2/activation_143/Relu" [op=Relu]; +"inception_resnet_v2/block17_17_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_17_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_17_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_17_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_17_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_17_conv/Round" [op=Round]; +"inception_resnet_v2/block17_17_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_17_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_17_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_17_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_17_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_17_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_17_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_26/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_26/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_26/add" [op=AddV2]; +"inception_resnet_v2/block17_17_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_145/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_145/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_145/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_145/Round" [op=Round]; +"inception_resnet_v2/conv2d_145/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_145/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_145/mul" [op=Mul]; +"inception_resnet_v2/conv2d_145/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_145/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_145/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_145/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_145/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_145/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_145/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_145/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_145/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_145/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_146/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_146/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_146/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_146/Round" [op=Round]; +"inception_resnet_v2/conv2d_146/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_146/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_146/mul" [op=Mul]; +"inception_resnet_v2/conv2d_146/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_146/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_146/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_146/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_146/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_146/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_146/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_146/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_146/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_146/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_147/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_147/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_147/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_147/Round" [op=Round]; +"inception_resnet_v2/conv2d_147/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_147/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_147/mul" [op=Mul]; +"inception_resnet_v2/conv2d_147/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_144/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_144/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_144/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_144/Round" [op=Round]; +"inception_resnet_v2/conv2d_144/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_144/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_144/mul" [op=Mul]; +"inception_resnet_v2/conv2d_144/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_147/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_147/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_147/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_147/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_147/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_147/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_147/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_147/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_144/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_144/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_144/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_144/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_144/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_144/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_144/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_144/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_144/Relu" [op=Relu]; +"inception_resnet_v2/activation_147/Relu" [op=Relu]; +"inception_resnet_v2/block17_18_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_18_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_18_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_18_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_18_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_18_conv/Round" [op=Round]; +"inception_resnet_v2/block17_18_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_18_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_18_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_18_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_18_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_18_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_18_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_27/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_27/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_27/add" [op=AddV2]; +"inception_resnet_v2/block17_18_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_149/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_149/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_149/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_149/Round" [op=Round]; +"inception_resnet_v2/conv2d_149/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_149/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_149/mul" [op=Mul]; +"inception_resnet_v2/conv2d_149/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_149/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_149/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_149/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_149/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_149/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_149/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_149/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_149/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_149/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_150/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_150/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_150/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_150/Round" [op=Round]; +"inception_resnet_v2/conv2d_150/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_150/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_150/mul" [op=Mul]; +"inception_resnet_v2/conv2d_150/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_150/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_150/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_150/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_150/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_150/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_150/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_150/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_150/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_150/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_151/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_151/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_151/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_151/Round" [op=Round]; +"inception_resnet_v2/conv2d_151/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_151/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_151/mul" [op=Mul]; +"inception_resnet_v2/conv2d_151/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_148/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_148/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_148/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_148/Round" [op=Round]; +"inception_resnet_v2/conv2d_148/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_148/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_148/mul" [op=Mul]; +"inception_resnet_v2/conv2d_148/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_151/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_151/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_151/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_151/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_151/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_151/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_151/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_151/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_148/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_148/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_148/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_148/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_148/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_148/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_148/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_148/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_148/Relu" [op=Relu]; +"inception_resnet_v2/activation_151/Relu" [op=Relu]; +"inception_resnet_v2/block17_19_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_19_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_19_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_19_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_19_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_19_conv/Round" [op=Round]; +"inception_resnet_v2/block17_19_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_19_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_19_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_19_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_19_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_19_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_19_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_28/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_28/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_28/add" [op=AddV2]; +"inception_resnet_v2/block17_19_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_153/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_153/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_153/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_153/Round" [op=Round]; +"inception_resnet_v2/conv2d_153/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_153/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_153/mul" [op=Mul]; +"inception_resnet_v2/conv2d_153/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_153/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_153/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_153/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_153/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_153/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_153/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_153/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_153/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_153/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_154/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_154/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_154/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_154/Round" [op=Round]; +"inception_resnet_v2/conv2d_154/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_154/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_154/mul" [op=Mul]; +"inception_resnet_v2/conv2d_154/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_154/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_154/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_154/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_154/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_154/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_154/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_154/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_154/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_154/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_155/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_155/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_155/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_155/Round" [op=Round]; +"inception_resnet_v2/conv2d_155/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_155/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_155/mul" [op=Mul]; +"inception_resnet_v2/conv2d_155/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_152/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_152/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_152/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_152/Round" [op=Round]; +"inception_resnet_v2/conv2d_152/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_152/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_152/mul" [op=Mul]; +"inception_resnet_v2/conv2d_152/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_155/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_155/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_155/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_155/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_155/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_155/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_155/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_155/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_152/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_152/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_152/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_152/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_152/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_152/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_152/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_152/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_152/Relu" [op=Relu]; +"inception_resnet_v2/activation_155/Relu" [op=Relu]; +"inception_resnet_v2/block17_20_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block17_20_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block17_20_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_20_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_20_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block17_20_conv/Round" [op=Round]; +"inception_resnet_v2/block17_20_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_20_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_20_conv/mul" [op=Mul]; +"inception_resnet_v2/block17_20_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block17_20_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block17_20_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block17_20_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_29/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_29/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_29/add" [op=AddV2]; +"inception_resnet_v2/block17_20_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_160/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_160/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_160/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_160/Round" [op=Round]; +"inception_resnet_v2/conv2d_160/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_160/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_160/mul" [op=Mul]; +"inception_resnet_v2/conv2d_160/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_160/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_160/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_160/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_160/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_160/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_160/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_160/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_160/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_160/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_161/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_161/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_161/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_161/Round" [op=Round]; +"inception_resnet_v2/conv2d_161/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_161/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_161/mul" [op=Mul]; +"inception_resnet_v2/conv2d_161/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_158/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_158/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_158/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_158/Round" [op=Round]; +"inception_resnet_v2/conv2d_158/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_158/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_158/mul" [op=Mul]; +"inception_resnet_v2/conv2d_158/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_156/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_156/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_156/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_156/Round" [op=Round]; +"inception_resnet_v2/conv2d_156/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_156/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_156/mul" [op=Mul]; +"inception_resnet_v2/conv2d_156/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_161/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_161/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_161/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_161/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_161/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_161/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_161/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_161/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_158/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_158/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_158/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_158/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_158/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_158/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_158/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_158/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_156/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_156/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_156/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_156/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_156/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_156/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_156/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_156/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_161/Relu" [op=Relu]; +"inception_resnet_v2/activation_158/Relu" [op=Relu]; +"inception_resnet_v2/activation_156/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_162/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_162/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_162/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_162/Round" [op=Round]; +"inception_resnet_v2/conv2d_162/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_162/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_162/mul" [op=Mul]; +"inception_resnet_v2/conv2d_162/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_159/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_159/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_159/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_159/Round" [op=Round]; +"inception_resnet_v2/conv2d_159/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_159/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_159/mul" [op=Mul]; +"inception_resnet_v2/conv2d_159/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_157/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_157/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_157/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_157/Round" [op=Round]; +"inception_resnet_v2/conv2d_157/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_157/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_157/mul" [op=Mul]; +"inception_resnet_v2/conv2d_157/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_162/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_162/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_162/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_162/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_162/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_162/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_162/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_162/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_159/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_159/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_159/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_159/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_159/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_159/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_159/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_159/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_157/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_157/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_157/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_157/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_157/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_157/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_157/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_157/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_157/Relu" [op=Relu]; +"inception_resnet_v2/activation_159/Relu" [op=Relu]; +"inception_resnet_v2/activation_162/Relu" [op=Relu]; +"inception_resnet_v2/max_pooling2d_3/MaxPool" [op=MaxPool]; +"inception_resnet_v2/mixed_7a/concat/axis" [op=Const]; +"inception_resnet_v2/mixed_7a/concat" [op=ConcatV2]; +"inception_resnet_v2/conv2d_164/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_164/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_164/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_164/Round" [op=Round]; +"inception_resnet_v2/conv2d_164/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_164/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_164/mul" [op=Mul]; +"inception_resnet_v2/conv2d_164/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_164/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_164/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_164/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_164/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_164/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_164/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_164/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_164/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_164/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_165/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_165/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_165/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_165/Round" [op=Round]; +"inception_resnet_v2/conv2d_165/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_165/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_165/mul" [op=Mul]; +"inception_resnet_v2/conv2d_165/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_165/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_165/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_165/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_165/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_165/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_165/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_165/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_165/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_165/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_166/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_166/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_166/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_166/Round" [op=Round]; +"inception_resnet_v2/conv2d_166/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_166/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_166/mul" [op=Mul]; +"inception_resnet_v2/conv2d_166/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_163/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_163/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_163/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_163/Round" [op=Round]; +"inception_resnet_v2/conv2d_163/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_163/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_163/mul" [op=Mul]; +"inception_resnet_v2/conv2d_163/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_166/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_166/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_166/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_166/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_166/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_166/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_166/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_166/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_163/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_163/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_163/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_163/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_163/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_163/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_163/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_163/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_163/Relu" [op=Relu]; +"inception_resnet_v2/activation_166/Relu" [op=Relu]; +"inception_resnet_v2/block8_1_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block8_1_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block8_1_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_1_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_1_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block8_1_conv/Round" [op=Round]; +"inception_resnet_v2/block8_1_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_1_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_1_conv/mul" [op=Mul]; +"inception_resnet_v2/block8_1_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block8_1_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_1_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_1_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_30/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_30/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_30/add" [op=AddV2]; +"inception_resnet_v2/block8_1_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_168/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_168/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_168/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_168/Round" [op=Round]; +"inception_resnet_v2/conv2d_168/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_168/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_168/mul" [op=Mul]; +"inception_resnet_v2/conv2d_168/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_168/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_168/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_168/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_168/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_168/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_168/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_168/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_168/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_168/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_169/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_169/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_169/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_169/Round" [op=Round]; +"inception_resnet_v2/conv2d_169/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_169/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_169/mul" [op=Mul]; +"inception_resnet_v2/conv2d_169/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_169/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_169/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_169/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_169/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_169/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_169/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_169/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_169/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_169/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_170/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_170/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_170/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_170/Round" [op=Round]; +"inception_resnet_v2/conv2d_170/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_170/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_170/mul" [op=Mul]; +"inception_resnet_v2/conv2d_170/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_167/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_167/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_167/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_167/Round" [op=Round]; +"inception_resnet_v2/conv2d_167/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_167/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_167/mul" [op=Mul]; +"inception_resnet_v2/conv2d_167/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_170/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_170/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_170/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_170/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_170/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_170/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_170/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_170/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_167/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_167/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_167/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_167/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_167/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_167/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_167/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_167/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_167/Relu" [op=Relu]; +"inception_resnet_v2/activation_170/Relu" [op=Relu]; +"inception_resnet_v2/block8_2_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block8_2_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block8_2_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_2_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_2_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block8_2_conv/Round" [op=Round]; +"inception_resnet_v2/block8_2_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_2_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_2_conv/mul" [op=Mul]; +"inception_resnet_v2/block8_2_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block8_2_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_2_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_2_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_31/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_31/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_31/add" [op=AddV2]; +"inception_resnet_v2/block8_2_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_172/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_172/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_172/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_172/Round" [op=Round]; +"inception_resnet_v2/conv2d_172/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_172/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_172/mul" [op=Mul]; +"inception_resnet_v2/conv2d_172/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_172/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_172/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_172/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_172/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_172/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_172/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_172/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_172/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_172/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_173/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_173/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_173/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_173/Round" [op=Round]; +"inception_resnet_v2/conv2d_173/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_173/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_173/mul" [op=Mul]; +"inception_resnet_v2/conv2d_173/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_173/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_173/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_173/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_173/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_173/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_173/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_173/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_173/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_173/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_174/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_174/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_174/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_174/Round" [op=Round]; +"inception_resnet_v2/conv2d_174/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_174/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_174/mul" [op=Mul]; +"inception_resnet_v2/conv2d_174/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_171/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_171/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_171/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_171/Round" [op=Round]; +"inception_resnet_v2/conv2d_171/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_171/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_171/mul" [op=Mul]; +"inception_resnet_v2/conv2d_171/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_174/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_174/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_174/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_174/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_174/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_174/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_174/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_174/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_171/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_171/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_171/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_171/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_171/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_171/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_171/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_171/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_171/Relu" [op=Relu]; +"inception_resnet_v2/activation_174/Relu" [op=Relu]; +"inception_resnet_v2/block8_3_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block8_3_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block8_3_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_3_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_3_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block8_3_conv/Round" [op=Round]; +"inception_resnet_v2/block8_3_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_3_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_3_conv/mul" [op=Mul]; +"inception_resnet_v2/block8_3_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block8_3_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_3_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_3_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_32/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_32/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_32/add" [op=AddV2]; +"inception_resnet_v2/block8_3_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_176/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_176/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_176/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_176/Round" [op=Round]; +"inception_resnet_v2/conv2d_176/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_176/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_176/mul" [op=Mul]; +"inception_resnet_v2/conv2d_176/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_176/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_176/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_176/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_176/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_176/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_176/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_176/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_176/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_176/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_177/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_177/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_177/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_177/Round" [op=Round]; +"inception_resnet_v2/conv2d_177/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_177/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_177/mul" [op=Mul]; +"inception_resnet_v2/conv2d_177/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_177/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_177/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_177/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_177/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_177/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_177/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_177/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_177/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_177/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_178/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_178/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_178/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_178/Round" [op=Round]; +"inception_resnet_v2/conv2d_178/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_178/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_178/mul" [op=Mul]; +"inception_resnet_v2/conv2d_178/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_175/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_175/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_175/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_175/Round" [op=Round]; +"inception_resnet_v2/conv2d_175/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_175/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_175/mul" [op=Mul]; +"inception_resnet_v2/conv2d_175/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_178/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_178/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_178/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_178/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_178/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_178/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_178/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_178/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_175/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_175/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_175/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_175/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_175/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_175/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_175/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_175/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_175/Relu" [op=Relu]; +"inception_resnet_v2/activation_178/Relu" [op=Relu]; +"inception_resnet_v2/block8_4_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block8_4_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block8_4_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_4_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_4_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block8_4_conv/Round" [op=Round]; +"inception_resnet_v2/block8_4_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_4_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_4_conv/mul" [op=Mul]; +"inception_resnet_v2/block8_4_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block8_4_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_4_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_4_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_33/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_33/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_33/add" [op=AddV2]; +"inception_resnet_v2/block8_4_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_180/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_180/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_180/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_180/Round" [op=Round]; +"inception_resnet_v2/conv2d_180/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_180/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_180/mul" [op=Mul]; +"inception_resnet_v2/conv2d_180/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_180/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_180/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_180/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_180/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_180/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_180/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_180/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_180/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_180/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_181/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_181/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_181/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_181/Round" [op=Round]; +"inception_resnet_v2/conv2d_181/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_181/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_181/mul" [op=Mul]; +"inception_resnet_v2/conv2d_181/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_181/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_181/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_181/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_181/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_181/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_181/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_181/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_181/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_181/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_182/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_182/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_182/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_182/Round" [op=Round]; +"inception_resnet_v2/conv2d_182/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_182/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_182/mul" [op=Mul]; +"inception_resnet_v2/conv2d_182/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_179/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_179/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_179/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_179/Round" [op=Round]; +"inception_resnet_v2/conv2d_179/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_179/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_179/mul" [op=Mul]; +"inception_resnet_v2/conv2d_179/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_182/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_182/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_182/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_182/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_182/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_182/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_182/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_182/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_179/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_179/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_179/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_179/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_179/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_179/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_179/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_179/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_179/Relu" [op=Relu]; +"inception_resnet_v2/activation_182/Relu" [op=Relu]; +"inception_resnet_v2/block8_5_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block8_5_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block8_5_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_5_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_5_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block8_5_conv/Round" [op=Round]; +"inception_resnet_v2/block8_5_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_5_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_5_conv/mul" [op=Mul]; +"inception_resnet_v2/block8_5_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block8_5_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_5_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_5_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_34/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_34/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_34/add" [op=AddV2]; +"inception_resnet_v2/block8_5_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_184/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_184/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_184/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_184/Round" [op=Round]; +"inception_resnet_v2/conv2d_184/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_184/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_184/mul" [op=Mul]; +"inception_resnet_v2/conv2d_184/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_184/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_184/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_184/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_184/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_184/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_184/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_184/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_184/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_184/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_185/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_185/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_185/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_185/Round" [op=Round]; +"inception_resnet_v2/conv2d_185/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_185/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_185/mul" [op=Mul]; +"inception_resnet_v2/conv2d_185/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_185/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_185/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_185/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_185/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_185/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_185/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_185/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_185/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_185/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_186/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_186/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_186/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_186/Round" [op=Round]; +"inception_resnet_v2/conv2d_186/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_186/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_186/mul" [op=Mul]; +"inception_resnet_v2/conv2d_186/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_183/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_183/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_183/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_183/Round" [op=Round]; +"inception_resnet_v2/conv2d_183/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_183/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_183/mul" [op=Mul]; +"inception_resnet_v2/conv2d_183/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_186/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_186/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_186/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_186/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_186/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_186/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_186/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_186/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_183/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_183/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_183/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_183/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_183/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_183/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_183/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_183/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_183/Relu" [op=Relu]; +"inception_resnet_v2/activation_186/Relu" [op=Relu]; +"inception_resnet_v2/block8_6_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block8_6_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block8_6_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_6_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_6_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block8_6_conv/Round" [op=Round]; +"inception_resnet_v2/block8_6_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_6_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_6_conv/mul" [op=Mul]; +"inception_resnet_v2/block8_6_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block8_6_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_6_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_6_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_35/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_35/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_35/add" [op=AddV2]; +"inception_resnet_v2/block8_6_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_188/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_188/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_188/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_188/Round" [op=Round]; +"inception_resnet_v2/conv2d_188/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_188/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_188/mul" [op=Mul]; +"inception_resnet_v2/conv2d_188/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_188/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_188/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_188/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_188/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_188/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_188/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_188/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_188/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_188/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_189/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_189/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_189/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_189/Round" [op=Round]; +"inception_resnet_v2/conv2d_189/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_189/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_189/mul" [op=Mul]; +"inception_resnet_v2/conv2d_189/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_189/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_189/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_189/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_189/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_189/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_189/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_189/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_189/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_189/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_190/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_190/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_190/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_190/Round" [op=Round]; +"inception_resnet_v2/conv2d_190/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_190/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_190/mul" [op=Mul]; +"inception_resnet_v2/conv2d_190/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_187/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_187/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_187/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_187/Round" [op=Round]; +"inception_resnet_v2/conv2d_187/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_187/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_187/mul" [op=Mul]; +"inception_resnet_v2/conv2d_187/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_190/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_190/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_190/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_190/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_190/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_190/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_190/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_190/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_187/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_187/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_187/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_187/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_187/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_187/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_187/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_187/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_187/Relu" [op=Relu]; +"inception_resnet_v2/activation_190/Relu" [op=Relu]; +"inception_resnet_v2/block8_7_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block8_7_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block8_7_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_7_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_7_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block8_7_conv/Round" [op=Round]; +"inception_resnet_v2/block8_7_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_7_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_7_conv/mul" [op=Mul]; +"inception_resnet_v2/block8_7_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block8_7_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_7_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_7_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_36/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_36/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_36/add" [op=AddV2]; +"inception_resnet_v2/block8_7_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_192/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_192/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_192/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_192/Round" [op=Round]; +"inception_resnet_v2/conv2d_192/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_192/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_192/mul" [op=Mul]; +"inception_resnet_v2/conv2d_192/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_192/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_192/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_192/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_192/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_192/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_192/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_192/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_192/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_192/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_193/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_193/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_193/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_193/Round" [op=Round]; +"inception_resnet_v2/conv2d_193/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_193/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_193/mul" [op=Mul]; +"inception_resnet_v2/conv2d_193/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_193/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_193/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_193/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_193/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_193/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_193/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_193/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_193/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_193/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_194/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_194/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_194/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_194/Round" [op=Round]; +"inception_resnet_v2/conv2d_194/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_194/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_194/mul" [op=Mul]; +"inception_resnet_v2/conv2d_194/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_191/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_191/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_191/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_191/Round" [op=Round]; +"inception_resnet_v2/conv2d_191/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_191/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_191/mul" [op=Mul]; +"inception_resnet_v2/conv2d_191/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_194/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_194/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_194/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_194/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_194/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_194/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_194/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_194/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_191/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_191/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_191/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_191/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_191/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_191/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_191/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_191/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_191/Relu" [op=Relu]; +"inception_resnet_v2/activation_194/Relu" [op=Relu]; +"inception_resnet_v2/block8_8_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block8_8_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block8_8_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_8_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_8_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block8_8_conv/Round" [op=Round]; +"inception_resnet_v2/block8_8_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_8_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_8_conv/mul" [op=Mul]; +"inception_resnet_v2/block8_8_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block8_8_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_8_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_8_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_37/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_37/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_37/add" [op=AddV2]; +"inception_resnet_v2/block8_8_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_196/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_196/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_196/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_196/Round" [op=Round]; +"inception_resnet_v2/conv2d_196/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_196/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_196/mul" [op=Mul]; +"inception_resnet_v2/conv2d_196/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_196/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_196/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_196/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_196/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_196/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_196/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_196/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_196/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_196/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_197/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_197/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_197/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_197/Round" [op=Round]; +"inception_resnet_v2/conv2d_197/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_197/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_197/mul" [op=Mul]; +"inception_resnet_v2/conv2d_197/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_197/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_197/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_197/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_197/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_197/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_197/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_197/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_197/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_197/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_198/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_198/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_198/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_198/Round" [op=Round]; +"inception_resnet_v2/conv2d_198/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_198/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_198/mul" [op=Mul]; +"inception_resnet_v2/conv2d_198/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_195/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_195/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_195/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_195/Round" [op=Round]; +"inception_resnet_v2/conv2d_195/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_195/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_195/mul" [op=Mul]; +"inception_resnet_v2/conv2d_195/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_198/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_198/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_198/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_198/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_198/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_198/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_198/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_198/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_195/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_195/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_195/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_195/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_195/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_195/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_195/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_195/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_195/Relu" [op=Relu]; +"inception_resnet_v2/activation_198/Relu" [op=Relu]; +"inception_resnet_v2/block8_9_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block8_9_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block8_9_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_9_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_9_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block8_9_conv/Round" [op=Round]; +"inception_resnet_v2/block8_9_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_9_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_9_conv/mul" [op=Mul]; +"inception_resnet_v2/block8_9_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block8_9_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_9_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_9_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_38/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_38/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_38/add" [op=AddV2]; +"inception_resnet_v2/block8_9_ac/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_200/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_200/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_200/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_200/Round" [op=Round]; +"inception_resnet_v2/conv2d_200/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_200/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_200/mul" [op=Mul]; +"inception_resnet_v2/conv2d_200/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_200/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_200/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_200/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_200/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_200/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_200/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_200/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_200/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_200/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_201/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_201/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_201/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_201/Round" [op=Round]; +"inception_resnet_v2/conv2d_201/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_201/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_201/mul" [op=Mul]; +"inception_resnet_v2/conv2d_201/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_201/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_201/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_201/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_201/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_201/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_201/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_201/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_201/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_201/Relu" [op=Relu]; +"inception_resnet_v2/conv2d_202/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_202/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_202/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_202/Round" [op=Round]; +"inception_resnet_v2/conv2d_202/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_202/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_202/mul" [op=Mul]; +"inception_resnet_v2/conv2d_202/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv2d_199/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_199/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_199/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv2d_199/Round" [op=Round]; +"inception_resnet_v2/conv2d_199/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv2d_199/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv2d_199/mul" [op=Mul]; +"inception_resnet_v2/conv2d_199/Conv2D" [op=Conv2D]; +"inception_resnet_v2/batch_normalization_202/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_202/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_202/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_202/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_202/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_202/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_202/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_202/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/batch_normalization_199/Const" [op=Const]; +"inception_resnet_v2/batch_normalization_199/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_199/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_199/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_199/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_199/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/batch_normalization_199/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/batch_normalization_199/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/activation_199/Relu" [op=Relu]; +"inception_resnet_v2/activation_202/Relu" [op=Relu]; +"inception_resnet_v2/block8_10_mixed/concat/axis" [op=Const]; +"inception_resnet_v2/block8_10_mixed/concat" [op=ConcatV2]; +"inception_resnet_v2/block8_10_conv/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_10_conv/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_10_conv/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/block8_10_conv/Round" [op=Round]; +"inception_resnet_v2/block8_10_conv/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_10_conv/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_10_conv/mul" [op=Mul]; +"inception_resnet_v2/block8_10_conv/Conv2D" [op=Conv2D]; +"inception_resnet_v2/block8_10_conv/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/block8_10_conv/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/block8_10_conv/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/custom_scale_layer_39/mul/y" [op=Const]; +"inception_resnet_v2/custom_scale_layer_39/mul" [op=Mul]; +"inception_resnet_v2/custom_scale_layer_39/add" [op=AddV2]; +"inception_resnet_v2/conv_7b/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv_7b/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv_7b/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/conv_7b/Round" [op=Round]; +"inception_resnet_v2/conv_7b/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv_7b/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv_7b/mul" [op=Mul]; +"inception_resnet_v2/conv_7b/Conv2D" [op=Conv2D]; +"inception_resnet_v2/conv_7b_bn/Const" [op=Const]; +"inception_resnet_v2/conv_7b_bn/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv_7b_bn/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv_7b_bn/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/conv_7b_bn/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/conv_7b_bn/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_resnet_v2/conv_7b_bn/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_resnet_v2/conv_7b_bn/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_resnet_v2/conv_7b_ac/Relu" [op=Relu]; +"inception_resnet_v2/avg_pool/Mean/reduction_indices" [op=Const]; +"inception_resnet_v2/avg_pool/Mean" [op=Mean]; +"inception_resnet_v2/predictions/Sigmoid/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/predictions/Sigmoid/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/predictions/Sigmoid" [op=Sigmoid]; +"inception_resnet_v2/predictions/Round" [op=Round]; +"inception_resnet_v2/predictions/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/predictions/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/predictions/mul" [op=Mul]; +"inception_resnet_v2/predictions/MatMul" [op=MatMul]; +"inception_resnet_v2/predictions/BiasAdd/ReadVariableOp/resource" [op=Placeholder]; +"inception_resnet_v2/predictions/BiasAdd/ReadVariableOp" [op=ReadVariableOp]; +"inception_resnet_v2/predictions/BiasAdd" [op=BiasAdd]; +"inception_resnet_v2/predictions/Softmax" [op=Softmax]; +Identity [op=Identity]; +args_0 -> "inception_resnet_v2/conv2d/Conv2D"; +"inception_resnet_v2/conv2d/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d/Sigmoid"; +"inception_resnet_v2/conv2d/Sigmoid" -> "inception_resnet_v2/conv2d/Round"; +"inception_resnet_v2/conv2d/Round" -> "inception_resnet_v2/conv2d/mul"; +"inception_resnet_v2/conv2d/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d/ReadVariableOp"; +"inception_resnet_v2/conv2d/ReadVariableOp" -> "inception_resnet_v2/conv2d/mul"; +"inception_resnet_v2/conv2d/mul" -> "inception_resnet_v2/conv2d/Conv2D"; +"inception_resnet_v2/conv2d/Conv2D" -> "inception_resnet_v2/batch_normalization/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization/Const" -> "inception_resnet_v2/batch_normalization/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization/ReadVariableOp"; +"inception_resnet_v2/batch_normalization/ReadVariableOp" -> "inception_resnet_v2/batch_normalization/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization/FusedBatchNormV3" -> "inception_resnet_v2/activation/Relu"; +"inception_resnet_v2/activation/Relu" -> "inception_resnet_v2/conv2d_1/Conv2D"; +"inception_resnet_v2/conv2d_1/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_1/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_1/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_1/Sigmoid"; +"inception_resnet_v2/conv2d_1/Sigmoid" -> "inception_resnet_v2/conv2d_1/Round"; +"inception_resnet_v2/conv2d_1/Round" -> "inception_resnet_v2/conv2d_1/mul"; +"inception_resnet_v2/conv2d_1/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_1/ReadVariableOp"; +"inception_resnet_v2/conv2d_1/ReadVariableOp" -> "inception_resnet_v2/conv2d_1/mul"; +"inception_resnet_v2/conv2d_1/mul" -> "inception_resnet_v2/conv2d_1/Conv2D"; +"inception_resnet_v2/conv2d_1/Conv2D" -> "inception_resnet_v2/batch_normalization_1/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_1/Const" -> "inception_resnet_v2/batch_normalization_1/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_1/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_1/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_1/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_1/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_1/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_1/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_1/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_1/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_1/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_1/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_1/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_1/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_1/FusedBatchNormV3" -> "inception_resnet_v2/activation_1/Relu"; +"inception_resnet_v2/activation_1/Relu" -> "inception_resnet_v2/conv2d_2/Conv2D"; +"inception_resnet_v2/conv2d_2/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_2/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_2/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_2/Sigmoid"; +"inception_resnet_v2/conv2d_2/Sigmoid" -> "inception_resnet_v2/conv2d_2/Round"; +"inception_resnet_v2/conv2d_2/Round" -> "inception_resnet_v2/conv2d_2/mul"; +"inception_resnet_v2/conv2d_2/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_2/ReadVariableOp"; +"inception_resnet_v2/conv2d_2/ReadVariableOp" -> "inception_resnet_v2/conv2d_2/mul"; +"inception_resnet_v2/conv2d_2/mul" -> "inception_resnet_v2/conv2d_2/Conv2D"; +"inception_resnet_v2/conv2d_2/Conv2D" -> "inception_resnet_v2/batch_normalization_2/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_2/Const" -> "inception_resnet_v2/batch_normalization_2/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_2/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_2/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_2/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_2/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_2/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_2/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_2/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_2/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_2/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_2/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_2/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_2/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_2/FusedBatchNormV3" -> "inception_resnet_v2/activation_2/Relu"; +"inception_resnet_v2/activation_2/Relu" -> "inception_resnet_v2/max_pooling2d/MaxPool"; +"inception_resnet_v2/max_pooling2d/MaxPool" -> "inception_resnet_v2/conv2d_3/Conv2D"; +"inception_resnet_v2/conv2d_3/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_3/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_3/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_3/Sigmoid"; +"inception_resnet_v2/conv2d_3/Sigmoid" -> "inception_resnet_v2/conv2d_3/Round"; +"inception_resnet_v2/conv2d_3/Round" -> "inception_resnet_v2/conv2d_3/mul"; +"inception_resnet_v2/conv2d_3/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_3/ReadVariableOp"; +"inception_resnet_v2/conv2d_3/ReadVariableOp" -> "inception_resnet_v2/conv2d_3/mul"; +"inception_resnet_v2/conv2d_3/mul" -> "inception_resnet_v2/conv2d_3/Conv2D"; +"inception_resnet_v2/conv2d_3/Conv2D" -> "inception_resnet_v2/batch_normalization_3/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_3/Const" -> "inception_resnet_v2/batch_normalization_3/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_3/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_3/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_3/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_3/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_3/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_3/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_3/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_3/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_3/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_3/FusedBatchNormV3" -> "inception_resnet_v2/activation_3/Relu"; +"inception_resnet_v2/activation_3/Relu" -> "inception_resnet_v2/conv2d_4/Conv2D"; +"inception_resnet_v2/conv2d_4/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_4/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_4/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_4/Sigmoid"; +"inception_resnet_v2/conv2d_4/Sigmoid" -> "inception_resnet_v2/conv2d_4/Round"; +"inception_resnet_v2/conv2d_4/Round" -> "inception_resnet_v2/conv2d_4/mul"; +"inception_resnet_v2/conv2d_4/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_4/ReadVariableOp"; +"inception_resnet_v2/conv2d_4/ReadVariableOp" -> "inception_resnet_v2/conv2d_4/mul"; +"inception_resnet_v2/conv2d_4/mul" -> "inception_resnet_v2/conv2d_4/Conv2D"; +"inception_resnet_v2/conv2d_4/Conv2D" -> "inception_resnet_v2/batch_normalization_4/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_4/Const" -> "inception_resnet_v2/batch_normalization_4/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_4/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_4/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_4/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_4/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_4/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_4/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_4/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_4/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_4/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_4/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_4/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_4/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_4/FusedBatchNormV3" -> "inception_resnet_v2/activation_4/Relu"; +"inception_resnet_v2/activation_4/Relu" -> "inception_resnet_v2/max_pooling2d_1/MaxPool"; +"inception_resnet_v2/max_pooling2d_1/MaxPool" -> "inception_resnet_v2/conv2d_8/Conv2D"; +"inception_resnet_v2/max_pooling2d_1/MaxPool" -> "inception_resnet_v2/conv2d_6/Conv2D"; +"inception_resnet_v2/max_pooling2d_1/MaxPool" -> "inception_resnet_v2/average_pooling2d/AvgPool"; +"inception_resnet_v2/max_pooling2d_1/MaxPool" -> "inception_resnet_v2/conv2d_5/Conv2D"; +"inception_resnet_v2/conv2d_8/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_8/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_8/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_8/Sigmoid"; +"inception_resnet_v2/conv2d_8/Sigmoid" -> "inception_resnet_v2/conv2d_8/Round"; +"inception_resnet_v2/conv2d_8/Round" -> "inception_resnet_v2/conv2d_8/mul"; +"inception_resnet_v2/conv2d_8/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_8/ReadVariableOp"; +"inception_resnet_v2/conv2d_8/ReadVariableOp" -> "inception_resnet_v2/conv2d_8/mul"; +"inception_resnet_v2/conv2d_8/mul" -> "inception_resnet_v2/conv2d_8/Conv2D"; +"inception_resnet_v2/conv2d_8/Conv2D" -> "inception_resnet_v2/batch_normalization_8/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_8/Const" -> "inception_resnet_v2/batch_normalization_8/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_8/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_8/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_8/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_8/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_8/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_8/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_8/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_8/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_8/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_8/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_8/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_8/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_8/FusedBatchNormV3" -> "inception_resnet_v2/activation_8/Relu"; +"inception_resnet_v2/activation_8/Relu" -> "inception_resnet_v2/conv2d_9/Conv2D"; +"inception_resnet_v2/conv2d_9/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_9/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_9/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_9/Sigmoid"; +"inception_resnet_v2/conv2d_9/Sigmoid" -> "inception_resnet_v2/conv2d_9/Round"; +"inception_resnet_v2/conv2d_9/Round" -> "inception_resnet_v2/conv2d_9/mul"; +"inception_resnet_v2/conv2d_9/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_9/ReadVariableOp"; +"inception_resnet_v2/conv2d_9/ReadVariableOp" -> "inception_resnet_v2/conv2d_9/mul"; +"inception_resnet_v2/conv2d_9/mul" -> "inception_resnet_v2/conv2d_9/Conv2D"; +"inception_resnet_v2/conv2d_9/Conv2D" -> "inception_resnet_v2/batch_normalization_9/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_6/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_6/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_6/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_6/Sigmoid"; +"inception_resnet_v2/conv2d_6/Sigmoid" -> "inception_resnet_v2/conv2d_6/Round"; +"inception_resnet_v2/conv2d_6/Round" -> "inception_resnet_v2/conv2d_6/mul"; +"inception_resnet_v2/conv2d_6/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_6/ReadVariableOp"; +"inception_resnet_v2/conv2d_6/ReadVariableOp" -> "inception_resnet_v2/conv2d_6/mul"; +"inception_resnet_v2/conv2d_6/mul" -> "inception_resnet_v2/conv2d_6/Conv2D"; +"inception_resnet_v2/conv2d_6/Conv2D" -> "inception_resnet_v2/batch_normalization_6/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_9/Const" -> "inception_resnet_v2/batch_normalization_9/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_9/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_9/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_9/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_9/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_9/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_9/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_9/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_9/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_9/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_9/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_9/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_9/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_9/FusedBatchNormV3" -> "inception_resnet_v2/activation_9/Relu"; +"inception_resnet_v2/batch_normalization_6/Const" -> "inception_resnet_v2/batch_normalization_6/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_6/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_6/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_6/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_6/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_6/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_6/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_6/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_6/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_6/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_6/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_6/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_6/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_6/FusedBatchNormV3" -> "inception_resnet_v2/activation_6/Relu"; +"inception_resnet_v2/average_pooling2d/AvgPool" -> "inception_resnet_v2/conv2d_11/Conv2D"; +"inception_resnet_v2/activation_9/Relu" -> "inception_resnet_v2/conv2d_10/Conv2D"; +"inception_resnet_v2/activation_6/Relu" -> "inception_resnet_v2/conv2d_7/Conv2D"; +"inception_resnet_v2/conv2d_11/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_11/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_11/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_11/Sigmoid"; +"inception_resnet_v2/conv2d_11/Sigmoid" -> "inception_resnet_v2/conv2d_11/Round"; +"inception_resnet_v2/conv2d_11/Round" -> "inception_resnet_v2/conv2d_11/mul"; +"inception_resnet_v2/conv2d_11/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_11/ReadVariableOp"; +"inception_resnet_v2/conv2d_11/ReadVariableOp" -> "inception_resnet_v2/conv2d_11/mul"; +"inception_resnet_v2/conv2d_11/mul" -> "inception_resnet_v2/conv2d_11/Conv2D"; +"inception_resnet_v2/conv2d_11/Conv2D" -> "inception_resnet_v2/batch_normalization_11/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_10/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_10/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_10/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_10/Sigmoid"; +"inception_resnet_v2/conv2d_10/Sigmoid" -> "inception_resnet_v2/conv2d_10/Round"; +"inception_resnet_v2/conv2d_10/Round" -> "inception_resnet_v2/conv2d_10/mul"; +"inception_resnet_v2/conv2d_10/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_10/ReadVariableOp"; +"inception_resnet_v2/conv2d_10/ReadVariableOp" -> "inception_resnet_v2/conv2d_10/mul"; +"inception_resnet_v2/conv2d_10/mul" -> "inception_resnet_v2/conv2d_10/Conv2D"; +"inception_resnet_v2/conv2d_10/Conv2D" -> "inception_resnet_v2/batch_normalization_10/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_7/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_7/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_7/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_7/Sigmoid"; +"inception_resnet_v2/conv2d_7/Sigmoid" -> "inception_resnet_v2/conv2d_7/Round"; +"inception_resnet_v2/conv2d_7/Round" -> "inception_resnet_v2/conv2d_7/mul"; +"inception_resnet_v2/conv2d_7/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_7/ReadVariableOp"; +"inception_resnet_v2/conv2d_7/ReadVariableOp" -> "inception_resnet_v2/conv2d_7/mul"; +"inception_resnet_v2/conv2d_7/mul" -> "inception_resnet_v2/conv2d_7/Conv2D"; +"inception_resnet_v2/conv2d_7/Conv2D" -> "inception_resnet_v2/batch_normalization_7/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_5/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_5/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_5/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_5/Sigmoid"; +"inception_resnet_v2/conv2d_5/Sigmoid" -> "inception_resnet_v2/conv2d_5/Round"; +"inception_resnet_v2/conv2d_5/Round" -> "inception_resnet_v2/conv2d_5/mul"; +"inception_resnet_v2/conv2d_5/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_5/ReadVariableOp"; +"inception_resnet_v2/conv2d_5/ReadVariableOp" -> "inception_resnet_v2/conv2d_5/mul"; +"inception_resnet_v2/conv2d_5/mul" -> "inception_resnet_v2/conv2d_5/Conv2D"; +"inception_resnet_v2/conv2d_5/Conv2D" -> "inception_resnet_v2/batch_normalization_5/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_11/Const" -> "inception_resnet_v2/batch_normalization_11/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_11/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_11/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_11/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_11/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_11/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_11/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_11/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_11/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_11/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_11/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_11/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_11/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_11/FusedBatchNormV3" -> "inception_resnet_v2/activation_11/Relu"; +"inception_resnet_v2/batch_normalization_10/Const" -> "inception_resnet_v2/batch_normalization_10/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_10/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_10/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_10/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_10/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_10/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_10/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_10/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_10/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_10/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_10/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_10/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_10/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_10/FusedBatchNormV3" -> "inception_resnet_v2/activation_10/Relu"; +"inception_resnet_v2/batch_normalization_7/Const" -> "inception_resnet_v2/batch_normalization_7/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_7/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_7/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_7/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_7/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_7/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_7/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_7/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_7/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_7/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_7/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_7/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_7/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_7/FusedBatchNormV3" -> "inception_resnet_v2/activation_7/Relu"; +"inception_resnet_v2/batch_normalization_5/Const" -> "inception_resnet_v2/batch_normalization_5/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_5/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_5/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_5/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_5/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_5/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_5/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_5/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_5/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_5/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_5/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_5/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_5/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_5/FusedBatchNormV3" -> "inception_resnet_v2/activation_5/Relu"; +"inception_resnet_v2/activation_5/Relu" -> "inception_resnet_v2/mixed_5b/concat"; +"inception_resnet_v2/activation_7/Relu" -> "inception_resnet_v2/mixed_5b/concat"; +"inception_resnet_v2/activation_10/Relu" -> "inception_resnet_v2/mixed_5b/concat"; +"inception_resnet_v2/activation_11/Relu" -> "inception_resnet_v2/mixed_5b/concat"; +"inception_resnet_v2/mixed_5b/concat/axis" -> "inception_resnet_v2/mixed_5b/concat"; +"inception_resnet_v2/mixed_5b/concat" -> "inception_resnet_v2/conv2d_15/Conv2D"; +"inception_resnet_v2/mixed_5b/concat" -> "inception_resnet_v2/conv2d_13/Conv2D"; +"inception_resnet_v2/mixed_5b/concat" -> "inception_resnet_v2/conv2d_12/Conv2D"; +"inception_resnet_v2/mixed_5b/concat" -> "inception_resnet_v2/custom_scale_layer/add"; +"inception_resnet_v2/conv2d_15/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_15/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_15/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_15/Sigmoid"; +"inception_resnet_v2/conv2d_15/Sigmoid" -> "inception_resnet_v2/conv2d_15/Round"; +"inception_resnet_v2/conv2d_15/Round" -> "inception_resnet_v2/conv2d_15/mul"; +"inception_resnet_v2/conv2d_15/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_15/ReadVariableOp"; +"inception_resnet_v2/conv2d_15/ReadVariableOp" -> "inception_resnet_v2/conv2d_15/mul"; +"inception_resnet_v2/conv2d_15/mul" -> "inception_resnet_v2/conv2d_15/Conv2D"; +"inception_resnet_v2/conv2d_15/Conv2D" -> "inception_resnet_v2/batch_normalization_15/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_15/Const" -> "inception_resnet_v2/batch_normalization_15/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_15/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_15/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_15/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_15/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_15/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_15/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_15/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_15/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_15/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_15/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_15/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_15/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_15/FusedBatchNormV3" -> "inception_resnet_v2/activation_15/Relu"; +"inception_resnet_v2/activation_15/Relu" -> "inception_resnet_v2/conv2d_16/Conv2D"; +"inception_resnet_v2/conv2d_16/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_16/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_16/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_16/Sigmoid"; +"inception_resnet_v2/conv2d_16/Sigmoid" -> "inception_resnet_v2/conv2d_16/Round"; +"inception_resnet_v2/conv2d_16/Round" -> "inception_resnet_v2/conv2d_16/mul"; +"inception_resnet_v2/conv2d_16/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_16/ReadVariableOp"; +"inception_resnet_v2/conv2d_16/ReadVariableOp" -> "inception_resnet_v2/conv2d_16/mul"; +"inception_resnet_v2/conv2d_16/mul" -> "inception_resnet_v2/conv2d_16/Conv2D"; +"inception_resnet_v2/conv2d_16/Conv2D" -> "inception_resnet_v2/batch_normalization_16/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_13/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_13/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_13/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_13/Sigmoid"; +"inception_resnet_v2/conv2d_13/Sigmoid" -> "inception_resnet_v2/conv2d_13/Round"; +"inception_resnet_v2/conv2d_13/Round" -> "inception_resnet_v2/conv2d_13/mul"; +"inception_resnet_v2/conv2d_13/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_13/ReadVariableOp"; +"inception_resnet_v2/conv2d_13/ReadVariableOp" -> "inception_resnet_v2/conv2d_13/mul"; +"inception_resnet_v2/conv2d_13/mul" -> "inception_resnet_v2/conv2d_13/Conv2D"; +"inception_resnet_v2/conv2d_13/Conv2D" -> "inception_resnet_v2/batch_normalization_13/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_16/Const" -> "inception_resnet_v2/batch_normalization_16/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_16/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_16/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_16/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_16/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_16/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_16/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_16/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_16/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_16/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_16/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_16/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_16/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_16/FusedBatchNormV3" -> "inception_resnet_v2/activation_16/Relu"; +"inception_resnet_v2/batch_normalization_13/Const" -> "inception_resnet_v2/batch_normalization_13/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_13/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_13/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_13/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_13/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_13/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_13/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_13/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_13/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_13/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_13/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_13/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_13/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_13/FusedBatchNormV3" -> "inception_resnet_v2/activation_13/Relu"; +"inception_resnet_v2/activation_16/Relu" -> "inception_resnet_v2/conv2d_17/Conv2D"; +"inception_resnet_v2/activation_13/Relu" -> "inception_resnet_v2/conv2d_14/Conv2D"; +"inception_resnet_v2/conv2d_17/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_17/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_17/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_17/Sigmoid"; +"inception_resnet_v2/conv2d_17/Sigmoid" -> "inception_resnet_v2/conv2d_17/Round"; +"inception_resnet_v2/conv2d_17/Round" -> "inception_resnet_v2/conv2d_17/mul"; +"inception_resnet_v2/conv2d_17/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_17/ReadVariableOp"; +"inception_resnet_v2/conv2d_17/ReadVariableOp" -> "inception_resnet_v2/conv2d_17/mul"; +"inception_resnet_v2/conv2d_17/mul" -> "inception_resnet_v2/conv2d_17/Conv2D"; +"inception_resnet_v2/conv2d_17/Conv2D" -> "inception_resnet_v2/batch_normalization_17/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_14/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_14/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_14/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_14/Sigmoid"; +"inception_resnet_v2/conv2d_14/Sigmoid" -> "inception_resnet_v2/conv2d_14/Round"; +"inception_resnet_v2/conv2d_14/Round" -> "inception_resnet_v2/conv2d_14/mul"; +"inception_resnet_v2/conv2d_14/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_14/ReadVariableOp"; +"inception_resnet_v2/conv2d_14/ReadVariableOp" -> "inception_resnet_v2/conv2d_14/mul"; +"inception_resnet_v2/conv2d_14/mul" -> "inception_resnet_v2/conv2d_14/Conv2D"; +"inception_resnet_v2/conv2d_14/Conv2D" -> "inception_resnet_v2/batch_normalization_14/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_12/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_12/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_12/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_12/Sigmoid"; +"inception_resnet_v2/conv2d_12/Sigmoid" -> "inception_resnet_v2/conv2d_12/Round"; +"inception_resnet_v2/conv2d_12/Round" -> "inception_resnet_v2/conv2d_12/mul"; +"inception_resnet_v2/conv2d_12/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_12/ReadVariableOp"; +"inception_resnet_v2/conv2d_12/ReadVariableOp" -> "inception_resnet_v2/conv2d_12/mul"; +"inception_resnet_v2/conv2d_12/mul" -> "inception_resnet_v2/conv2d_12/Conv2D"; +"inception_resnet_v2/conv2d_12/Conv2D" -> "inception_resnet_v2/batch_normalization_12/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_17/Const" -> "inception_resnet_v2/batch_normalization_17/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_17/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_17/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_17/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_17/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_17/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_17/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_17/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_17/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_17/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_17/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_17/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_17/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_17/FusedBatchNormV3" -> "inception_resnet_v2/activation_17/Relu"; +"inception_resnet_v2/batch_normalization_14/Const" -> "inception_resnet_v2/batch_normalization_14/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_14/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_14/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_14/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_14/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_14/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_14/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_14/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_14/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_14/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_14/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_14/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_14/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_14/FusedBatchNormV3" -> "inception_resnet_v2/activation_14/Relu"; +"inception_resnet_v2/batch_normalization_12/Const" -> "inception_resnet_v2/batch_normalization_12/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_12/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_12/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_12/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_12/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_12/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_12/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_12/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_12/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_12/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_12/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_12/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_12/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_12/FusedBatchNormV3" -> "inception_resnet_v2/activation_12/Relu"; +"inception_resnet_v2/activation_12/Relu" -> "inception_resnet_v2/block35_1_mixed/concat"; +"inception_resnet_v2/activation_14/Relu" -> "inception_resnet_v2/block35_1_mixed/concat"; +"inception_resnet_v2/activation_17/Relu" -> "inception_resnet_v2/block35_1_mixed/concat"; +"inception_resnet_v2/block35_1_mixed/concat/axis" -> "inception_resnet_v2/block35_1_mixed/concat"; +"inception_resnet_v2/block35_1_mixed/concat" -> "inception_resnet_v2/block35_1_conv/Conv2D"; +"inception_resnet_v2/block35_1_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block35_1_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block35_1_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block35_1_conv/Sigmoid"; +"inception_resnet_v2/block35_1_conv/Sigmoid" -> "inception_resnet_v2/block35_1_conv/Round"; +"inception_resnet_v2/block35_1_conv/Round" -> "inception_resnet_v2/block35_1_conv/mul"; +"inception_resnet_v2/block35_1_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block35_1_conv/ReadVariableOp"; +"inception_resnet_v2/block35_1_conv/ReadVariableOp" -> "inception_resnet_v2/block35_1_conv/mul"; +"inception_resnet_v2/block35_1_conv/mul" -> "inception_resnet_v2/block35_1_conv/Conv2D"; +"inception_resnet_v2/block35_1_conv/Conv2D" -> "inception_resnet_v2/block35_1_conv/BiasAdd"; +"inception_resnet_v2/block35_1_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block35_1_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block35_1_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block35_1_conv/BiasAdd"; +"inception_resnet_v2/block35_1_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer/mul"; +"inception_resnet_v2/custom_scale_layer/mul/y" -> "inception_resnet_v2/custom_scale_layer/mul"; +"inception_resnet_v2/custom_scale_layer/mul" -> "inception_resnet_v2/custom_scale_layer/add"; +"inception_resnet_v2/custom_scale_layer/add" -> "inception_resnet_v2/block35_1_ac/Relu"; +"inception_resnet_v2/block35_1_ac/Relu" -> "inception_resnet_v2/conv2d_21/Conv2D"; +"inception_resnet_v2/block35_1_ac/Relu" -> "inception_resnet_v2/conv2d_19/Conv2D"; +"inception_resnet_v2/block35_1_ac/Relu" -> "inception_resnet_v2/conv2d_18/Conv2D"; +"inception_resnet_v2/block35_1_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_1/add"; +"inception_resnet_v2/conv2d_21/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_21/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_21/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_21/Sigmoid"; +"inception_resnet_v2/conv2d_21/Sigmoid" -> "inception_resnet_v2/conv2d_21/Round"; +"inception_resnet_v2/conv2d_21/Round" -> "inception_resnet_v2/conv2d_21/mul"; +"inception_resnet_v2/conv2d_21/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_21/ReadVariableOp"; +"inception_resnet_v2/conv2d_21/ReadVariableOp" -> "inception_resnet_v2/conv2d_21/mul"; +"inception_resnet_v2/conv2d_21/mul" -> "inception_resnet_v2/conv2d_21/Conv2D"; +"inception_resnet_v2/conv2d_21/Conv2D" -> "inception_resnet_v2/batch_normalization_21/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_21/Const" -> "inception_resnet_v2/batch_normalization_21/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_21/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_21/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_21/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_21/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_21/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_21/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_21/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_21/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_21/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_21/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_21/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_21/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_21/FusedBatchNormV3" -> "inception_resnet_v2/activation_21/Relu"; +"inception_resnet_v2/activation_21/Relu" -> "inception_resnet_v2/conv2d_22/Conv2D"; +"inception_resnet_v2/conv2d_22/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_22/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_22/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_22/Sigmoid"; +"inception_resnet_v2/conv2d_22/Sigmoid" -> "inception_resnet_v2/conv2d_22/Round"; +"inception_resnet_v2/conv2d_22/Round" -> "inception_resnet_v2/conv2d_22/mul"; +"inception_resnet_v2/conv2d_22/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_22/ReadVariableOp"; +"inception_resnet_v2/conv2d_22/ReadVariableOp" -> "inception_resnet_v2/conv2d_22/mul"; +"inception_resnet_v2/conv2d_22/mul" -> "inception_resnet_v2/conv2d_22/Conv2D"; +"inception_resnet_v2/conv2d_22/Conv2D" -> "inception_resnet_v2/batch_normalization_22/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_19/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_19/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_19/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_19/Sigmoid"; +"inception_resnet_v2/conv2d_19/Sigmoid" -> "inception_resnet_v2/conv2d_19/Round"; +"inception_resnet_v2/conv2d_19/Round" -> "inception_resnet_v2/conv2d_19/mul"; +"inception_resnet_v2/conv2d_19/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_19/ReadVariableOp"; +"inception_resnet_v2/conv2d_19/ReadVariableOp" -> "inception_resnet_v2/conv2d_19/mul"; +"inception_resnet_v2/conv2d_19/mul" -> "inception_resnet_v2/conv2d_19/Conv2D"; +"inception_resnet_v2/conv2d_19/Conv2D" -> "inception_resnet_v2/batch_normalization_19/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_22/Const" -> "inception_resnet_v2/batch_normalization_22/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_22/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_22/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_22/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_22/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_22/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_22/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_22/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_22/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_22/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_22/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_22/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_22/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_22/FusedBatchNormV3" -> "inception_resnet_v2/activation_22/Relu"; +"inception_resnet_v2/batch_normalization_19/Const" -> "inception_resnet_v2/batch_normalization_19/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_19/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_19/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_19/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_19/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_19/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_19/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_19/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_19/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_19/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_19/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_19/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_19/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_19/FusedBatchNormV3" -> "inception_resnet_v2/activation_19/Relu"; +"inception_resnet_v2/activation_22/Relu" -> "inception_resnet_v2/conv2d_23/Conv2D"; +"inception_resnet_v2/activation_19/Relu" -> "inception_resnet_v2/conv2d_20/Conv2D"; +"inception_resnet_v2/conv2d_23/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_23/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_23/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_23/Sigmoid"; +"inception_resnet_v2/conv2d_23/Sigmoid" -> "inception_resnet_v2/conv2d_23/Round"; +"inception_resnet_v2/conv2d_23/Round" -> "inception_resnet_v2/conv2d_23/mul"; +"inception_resnet_v2/conv2d_23/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_23/ReadVariableOp"; +"inception_resnet_v2/conv2d_23/ReadVariableOp" -> "inception_resnet_v2/conv2d_23/mul"; +"inception_resnet_v2/conv2d_23/mul" -> "inception_resnet_v2/conv2d_23/Conv2D"; +"inception_resnet_v2/conv2d_23/Conv2D" -> "inception_resnet_v2/batch_normalization_23/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_20/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_20/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_20/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_20/Sigmoid"; +"inception_resnet_v2/conv2d_20/Sigmoid" -> "inception_resnet_v2/conv2d_20/Round"; +"inception_resnet_v2/conv2d_20/Round" -> "inception_resnet_v2/conv2d_20/mul"; +"inception_resnet_v2/conv2d_20/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_20/ReadVariableOp"; +"inception_resnet_v2/conv2d_20/ReadVariableOp" -> "inception_resnet_v2/conv2d_20/mul"; +"inception_resnet_v2/conv2d_20/mul" -> "inception_resnet_v2/conv2d_20/Conv2D"; +"inception_resnet_v2/conv2d_20/Conv2D" -> "inception_resnet_v2/batch_normalization_20/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_18/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_18/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_18/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_18/Sigmoid"; +"inception_resnet_v2/conv2d_18/Sigmoid" -> "inception_resnet_v2/conv2d_18/Round"; +"inception_resnet_v2/conv2d_18/Round" -> "inception_resnet_v2/conv2d_18/mul"; +"inception_resnet_v2/conv2d_18/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_18/ReadVariableOp"; +"inception_resnet_v2/conv2d_18/ReadVariableOp" -> "inception_resnet_v2/conv2d_18/mul"; +"inception_resnet_v2/conv2d_18/mul" -> "inception_resnet_v2/conv2d_18/Conv2D"; +"inception_resnet_v2/conv2d_18/Conv2D" -> "inception_resnet_v2/batch_normalization_18/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_23/Const" -> "inception_resnet_v2/batch_normalization_23/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_23/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_23/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_23/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_23/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_23/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_23/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_23/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_23/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_23/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_23/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_23/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_23/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_23/FusedBatchNormV3" -> "inception_resnet_v2/activation_23/Relu"; +"inception_resnet_v2/batch_normalization_20/Const" -> "inception_resnet_v2/batch_normalization_20/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_20/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_20/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_20/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_20/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_20/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_20/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_20/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_20/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_20/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_20/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_20/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_20/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_20/FusedBatchNormV3" -> "inception_resnet_v2/activation_20/Relu"; +"inception_resnet_v2/batch_normalization_18/Const" -> "inception_resnet_v2/batch_normalization_18/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_18/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_18/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_18/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_18/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_18/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_18/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_18/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_18/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_18/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_18/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_18/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_18/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_18/FusedBatchNormV3" -> "inception_resnet_v2/activation_18/Relu"; +"inception_resnet_v2/activation_18/Relu" -> "inception_resnet_v2/block35_2_mixed/concat"; +"inception_resnet_v2/activation_20/Relu" -> "inception_resnet_v2/block35_2_mixed/concat"; +"inception_resnet_v2/activation_23/Relu" -> "inception_resnet_v2/block35_2_mixed/concat"; +"inception_resnet_v2/block35_2_mixed/concat/axis" -> "inception_resnet_v2/block35_2_mixed/concat"; +"inception_resnet_v2/block35_2_mixed/concat" -> "inception_resnet_v2/block35_2_conv/Conv2D"; +"inception_resnet_v2/block35_2_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block35_2_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block35_2_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block35_2_conv/Sigmoid"; +"inception_resnet_v2/block35_2_conv/Sigmoid" -> "inception_resnet_v2/block35_2_conv/Round"; +"inception_resnet_v2/block35_2_conv/Round" -> "inception_resnet_v2/block35_2_conv/mul"; +"inception_resnet_v2/block35_2_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block35_2_conv/ReadVariableOp"; +"inception_resnet_v2/block35_2_conv/ReadVariableOp" -> "inception_resnet_v2/block35_2_conv/mul"; +"inception_resnet_v2/block35_2_conv/mul" -> "inception_resnet_v2/block35_2_conv/Conv2D"; +"inception_resnet_v2/block35_2_conv/Conv2D" -> "inception_resnet_v2/block35_2_conv/BiasAdd"; +"inception_resnet_v2/block35_2_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block35_2_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block35_2_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block35_2_conv/BiasAdd"; +"inception_resnet_v2/block35_2_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_1/mul"; +"inception_resnet_v2/custom_scale_layer_1/mul/y" -> "inception_resnet_v2/custom_scale_layer_1/mul"; +"inception_resnet_v2/custom_scale_layer_1/mul" -> "inception_resnet_v2/custom_scale_layer_1/add"; +"inception_resnet_v2/custom_scale_layer_1/add" -> "inception_resnet_v2/block35_2_ac/Relu"; +"inception_resnet_v2/block35_2_ac/Relu" -> "inception_resnet_v2/conv2d_27/Conv2D"; +"inception_resnet_v2/block35_2_ac/Relu" -> "inception_resnet_v2/conv2d_25/Conv2D"; +"inception_resnet_v2/block35_2_ac/Relu" -> "inception_resnet_v2/conv2d_24/Conv2D"; +"inception_resnet_v2/block35_2_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_2/add"; +"inception_resnet_v2/conv2d_27/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_27/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_27/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_27/Sigmoid"; +"inception_resnet_v2/conv2d_27/Sigmoid" -> "inception_resnet_v2/conv2d_27/Round"; +"inception_resnet_v2/conv2d_27/Round" -> "inception_resnet_v2/conv2d_27/mul"; +"inception_resnet_v2/conv2d_27/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_27/ReadVariableOp"; +"inception_resnet_v2/conv2d_27/ReadVariableOp" -> "inception_resnet_v2/conv2d_27/mul"; +"inception_resnet_v2/conv2d_27/mul" -> "inception_resnet_v2/conv2d_27/Conv2D"; +"inception_resnet_v2/conv2d_27/Conv2D" -> "inception_resnet_v2/batch_normalization_27/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_27/Const" -> "inception_resnet_v2/batch_normalization_27/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_27/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_27/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_27/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_27/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_27/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_27/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_27/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_27/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_27/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_27/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_27/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_27/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_27/FusedBatchNormV3" -> "inception_resnet_v2/activation_27/Relu"; +"inception_resnet_v2/activation_27/Relu" -> "inception_resnet_v2/conv2d_28/Conv2D"; +"inception_resnet_v2/conv2d_28/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_28/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_28/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_28/Sigmoid"; +"inception_resnet_v2/conv2d_28/Sigmoid" -> "inception_resnet_v2/conv2d_28/Round"; +"inception_resnet_v2/conv2d_28/Round" -> "inception_resnet_v2/conv2d_28/mul"; +"inception_resnet_v2/conv2d_28/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_28/ReadVariableOp"; +"inception_resnet_v2/conv2d_28/ReadVariableOp" -> "inception_resnet_v2/conv2d_28/mul"; +"inception_resnet_v2/conv2d_28/mul" -> "inception_resnet_v2/conv2d_28/Conv2D"; +"inception_resnet_v2/conv2d_28/Conv2D" -> "inception_resnet_v2/batch_normalization_28/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_25/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_25/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_25/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_25/Sigmoid"; +"inception_resnet_v2/conv2d_25/Sigmoid" -> "inception_resnet_v2/conv2d_25/Round"; +"inception_resnet_v2/conv2d_25/Round" -> "inception_resnet_v2/conv2d_25/mul"; +"inception_resnet_v2/conv2d_25/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_25/ReadVariableOp"; +"inception_resnet_v2/conv2d_25/ReadVariableOp" -> "inception_resnet_v2/conv2d_25/mul"; +"inception_resnet_v2/conv2d_25/mul" -> "inception_resnet_v2/conv2d_25/Conv2D"; +"inception_resnet_v2/conv2d_25/Conv2D" -> "inception_resnet_v2/batch_normalization_25/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_28/Const" -> "inception_resnet_v2/batch_normalization_28/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_28/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_28/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_28/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_28/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_28/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_28/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_28/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_28/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_28/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_28/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_28/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_28/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_28/FusedBatchNormV3" -> "inception_resnet_v2/activation_28/Relu"; +"inception_resnet_v2/batch_normalization_25/Const" -> "inception_resnet_v2/batch_normalization_25/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_25/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_25/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_25/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_25/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_25/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_25/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_25/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_25/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_25/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_25/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_25/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_25/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_25/FusedBatchNormV3" -> "inception_resnet_v2/activation_25/Relu"; +"inception_resnet_v2/activation_28/Relu" -> "inception_resnet_v2/conv2d_29/Conv2D"; +"inception_resnet_v2/activation_25/Relu" -> "inception_resnet_v2/conv2d_26/Conv2D"; +"inception_resnet_v2/conv2d_29/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_29/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_29/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_29/Sigmoid"; +"inception_resnet_v2/conv2d_29/Sigmoid" -> "inception_resnet_v2/conv2d_29/Round"; +"inception_resnet_v2/conv2d_29/Round" -> "inception_resnet_v2/conv2d_29/mul"; +"inception_resnet_v2/conv2d_29/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_29/ReadVariableOp"; +"inception_resnet_v2/conv2d_29/ReadVariableOp" -> "inception_resnet_v2/conv2d_29/mul"; +"inception_resnet_v2/conv2d_29/mul" -> "inception_resnet_v2/conv2d_29/Conv2D"; +"inception_resnet_v2/conv2d_29/Conv2D" -> "inception_resnet_v2/batch_normalization_29/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_26/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_26/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_26/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_26/Sigmoid"; +"inception_resnet_v2/conv2d_26/Sigmoid" -> "inception_resnet_v2/conv2d_26/Round"; +"inception_resnet_v2/conv2d_26/Round" -> "inception_resnet_v2/conv2d_26/mul"; +"inception_resnet_v2/conv2d_26/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_26/ReadVariableOp"; +"inception_resnet_v2/conv2d_26/ReadVariableOp" -> "inception_resnet_v2/conv2d_26/mul"; +"inception_resnet_v2/conv2d_26/mul" -> "inception_resnet_v2/conv2d_26/Conv2D"; +"inception_resnet_v2/conv2d_26/Conv2D" -> "inception_resnet_v2/batch_normalization_26/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_24/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_24/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_24/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_24/Sigmoid"; +"inception_resnet_v2/conv2d_24/Sigmoid" -> "inception_resnet_v2/conv2d_24/Round"; +"inception_resnet_v2/conv2d_24/Round" -> "inception_resnet_v2/conv2d_24/mul"; +"inception_resnet_v2/conv2d_24/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_24/ReadVariableOp"; +"inception_resnet_v2/conv2d_24/ReadVariableOp" -> "inception_resnet_v2/conv2d_24/mul"; +"inception_resnet_v2/conv2d_24/mul" -> "inception_resnet_v2/conv2d_24/Conv2D"; +"inception_resnet_v2/conv2d_24/Conv2D" -> "inception_resnet_v2/batch_normalization_24/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_29/Const" -> "inception_resnet_v2/batch_normalization_29/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_29/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_29/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_29/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_29/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_29/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_29/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_29/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_29/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_29/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_29/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_29/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_29/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_29/FusedBatchNormV3" -> "inception_resnet_v2/activation_29/Relu"; +"inception_resnet_v2/batch_normalization_26/Const" -> "inception_resnet_v2/batch_normalization_26/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_26/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_26/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_26/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_26/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_26/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_26/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_26/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_26/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_26/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_26/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_26/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_26/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_26/FusedBatchNormV3" -> "inception_resnet_v2/activation_26/Relu"; +"inception_resnet_v2/batch_normalization_24/Const" -> "inception_resnet_v2/batch_normalization_24/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_24/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_24/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_24/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_24/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_24/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_24/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_24/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_24/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_24/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_24/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_24/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_24/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_24/FusedBatchNormV3" -> "inception_resnet_v2/activation_24/Relu"; +"inception_resnet_v2/activation_24/Relu" -> "inception_resnet_v2/block35_3_mixed/concat"; +"inception_resnet_v2/activation_26/Relu" -> "inception_resnet_v2/block35_3_mixed/concat"; +"inception_resnet_v2/activation_29/Relu" -> "inception_resnet_v2/block35_3_mixed/concat"; +"inception_resnet_v2/block35_3_mixed/concat/axis" -> "inception_resnet_v2/block35_3_mixed/concat"; +"inception_resnet_v2/block35_3_mixed/concat" -> "inception_resnet_v2/block35_3_conv/Conv2D"; +"inception_resnet_v2/block35_3_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block35_3_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block35_3_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block35_3_conv/Sigmoid"; +"inception_resnet_v2/block35_3_conv/Sigmoid" -> "inception_resnet_v2/block35_3_conv/Round"; +"inception_resnet_v2/block35_3_conv/Round" -> "inception_resnet_v2/block35_3_conv/mul"; +"inception_resnet_v2/block35_3_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block35_3_conv/ReadVariableOp"; +"inception_resnet_v2/block35_3_conv/ReadVariableOp" -> "inception_resnet_v2/block35_3_conv/mul"; +"inception_resnet_v2/block35_3_conv/mul" -> "inception_resnet_v2/block35_3_conv/Conv2D"; +"inception_resnet_v2/block35_3_conv/Conv2D" -> "inception_resnet_v2/block35_3_conv/BiasAdd"; +"inception_resnet_v2/block35_3_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block35_3_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block35_3_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block35_3_conv/BiasAdd"; +"inception_resnet_v2/block35_3_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_2/mul"; +"inception_resnet_v2/custom_scale_layer_2/mul/y" -> "inception_resnet_v2/custom_scale_layer_2/mul"; +"inception_resnet_v2/custom_scale_layer_2/mul" -> "inception_resnet_v2/custom_scale_layer_2/add"; +"inception_resnet_v2/custom_scale_layer_2/add" -> "inception_resnet_v2/block35_3_ac/Relu"; +"inception_resnet_v2/block35_3_ac/Relu" -> "inception_resnet_v2/conv2d_33/Conv2D"; +"inception_resnet_v2/block35_3_ac/Relu" -> "inception_resnet_v2/conv2d_31/Conv2D"; +"inception_resnet_v2/block35_3_ac/Relu" -> "inception_resnet_v2/conv2d_30/Conv2D"; +"inception_resnet_v2/block35_3_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_3/add"; +"inception_resnet_v2/conv2d_33/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_33/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_33/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_33/Sigmoid"; +"inception_resnet_v2/conv2d_33/Sigmoid" -> "inception_resnet_v2/conv2d_33/Round"; +"inception_resnet_v2/conv2d_33/Round" -> "inception_resnet_v2/conv2d_33/mul"; +"inception_resnet_v2/conv2d_33/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_33/ReadVariableOp"; +"inception_resnet_v2/conv2d_33/ReadVariableOp" -> "inception_resnet_v2/conv2d_33/mul"; +"inception_resnet_v2/conv2d_33/mul" -> "inception_resnet_v2/conv2d_33/Conv2D"; +"inception_resnet_v2/conv2d_33/Conv2D" -> "inception_resnet_v2/batch_normalization_33/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_33/Const" -> "inception_resnet_v2/batch_normalization_33/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_33/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_33/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_33/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_33/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_33/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_33/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_33/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_33/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_33/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_33/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_33/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_33/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_33/FusedBatchNormV3" -> "inception_resnet_v2/activation_33/Relu"; +"inception_resnet_v2/activation_33/Relu" -> "inception_resnet_v2/conv2d_34/Conv2D"; +"inception_resnet_v2/conv2d_34/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_34/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_34/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_34/Sigmoid"; +"inception_resnet_v2/conv2d_34/Sigmoid" -> "inception_resnet_v2/conv2d_34/Round"; +"inception_resnet_v2/conv2d_34/Round" -> "inception_resnet_v2/conv2d_34/mul"; +"inception_resnet_v2/conv2d_34/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_34/ReadVariableOp"; +"inception_resnet_v2/conv2d_34/ReadVariableOp" -> "inception_resnet_v2/conv2d_34/mul"; +"inception_resnet_v2/conv2d_34/mul" -> "inception_resnet_v2/conv2d_34/Conv2D"; +"inception_resnet_v2/conv2d_34/Conv2D" -> "inception_resnet_v2/batch_normalization_34/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_31/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_31/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_31/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_31/Sigmoid"; +"inception_resnet_v2/conv2d_31/Sigmoid" -> "inception_resnet_v2/conv2d_31/Round"; +"inception_resnet_v2/conv2d_31/Round" -> "inception_resnet_v2/conv2d_31/mul"; +"inception_resnet_v2/conv2d_31/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_31/ReadVariableOp"; +"inception_resnet_v2/conv2d_31/ReadVariableOp" -> "inception_resnet_v2/conv2d_31/mul"; +"inception_resnet_v2/conv2d_31/mul" -> "inception_resnet_v2/conv2d_31/Conv2D"; +"inception_resnet_v2/conv2d_31/Conv2D" -> "inception_resnet_v2/batch_normalization_31/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_34/Const" -> "inception_resnet_v2/batch_normalization_34/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_34/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_34/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_34/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_34/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_34/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_34/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_34/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_34/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_34/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_34/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_34/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_34/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_34/FusedBatchNormV3" -> "inception_resnet_v2/activation_34/Relu"; +"inception_resnet_v2/batch_normalization_31/Const" -> "inception_resnet_v2/batch_normalization_31/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_31/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_31/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_31/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_31/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_31/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_31/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_31/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_31/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_31/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_31/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_31/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_31/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_31/FusedBatchNormV3" -> "inception_resnet_v2/activation_31/Relu"; +"inception_resnet_v2/activation_34/Relu" -> "inception_resnet_v2/conv2d_35/Conv2D"; +"inception_resnet_v2/activation_31/Relu" -> "inception_resnet_v2/conv2d_32/Conv2D"; +"inception_resnet_v2/conv2d_35/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_35/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_35/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_35/Sigmoid"; +"inception_resnet_v2/conv2d_35/Sigmoid" -> "inception_resnet_v2/conv2d_35/Round"; +"inception_resnet_v2/conv2d_35/Round" -> "inception_resnet_v2/conv2d_35/mul"; +"inception_resnet_v2/conv2d_35/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_35/ReadVariableOp"; +"inception_resnet_v2/conv2d_35/ReadVariableOp" -> "inception_resnet_v2/conv2d_35/mul"; +"inception_resnet_v2/conv2d_35/mul" -> "inception_resnet_v2/conv2d_35/Conv2D"; +"inception_resnet_v2/conv2d_35/Conv2D" -> "inception_resnet_v2/batch_normalization_35/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_32/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_32/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_32/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_32/Sigmoid"; +"inception_resnet_v2/conv2d_32/Sigmoid" -> "inception_resnet_v2/conv2d_32/Round"; +"inception_resnet_v2/conv2d_32/Round" -> "inception_resnet_v2/conv2d_32/mul"; +"inception_resnet_v2/conv2d_32/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_32/ReadVariableOp"; +"inception_resnet_v2/conv2d_32/ReadVariableOp" -> "inception_resnet_v2/conv2d_32/mul"; +"inception_resnet_v2/conv2d_32/mul" -> "inception_resnet_v2/conv2d_32/Conv2D"; +"inception_resnet_v2/conv2d_32/Conv2D" -> "inception_resnet_v2/batch_normalization_32/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_30/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_30/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_30/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_30/Sigmoid"; +"inception_resnet_v2/conv2d_30/Sigmoid" -> "inception_resnet_v2/conv2d_30/Round"; +"inception_resnet_v2/conv2d_30/Round" -> "inception_resnet_v2/conv2d_30/mul"; +"inception_resnet_v2/conv2d_30/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_30/ReadVariableOp"; +"inception_resnet_v2/conv2d_30/ReadVariableOp" -> "inception_resnet_v2/conv2d_30/mul"; +"inception_resnet_v2/conv2d_30/mul" -> "inception_resnet_v2/conv2d_30/Conv2D"; +"inception_resnet_v2/conv2d_30/Conv2D" -> "inception_resnet_v2/batch_normalization_30/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_35/Const" -> "inception_resnet_v2/batch_normalization_35/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_35/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_35/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_35/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_35/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_35/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_35/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_35/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_35/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_35/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_35/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_35/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_35/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_35/FusedBatchNormV3" -> "inception_resnet_v2/activation_35/Relu"; +"inception_resnet_v2/batch_normalization_32/Const" -> "inception_resnet_v2/batch_normalization_32/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_32/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_32/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_32/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_32/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_32/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_32/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_32/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_32/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_32/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_32/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_32/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_32/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_32/FusedBatchNormV3" -> "inception_resnet_v2/activation_32/Relu"; +"inception_resnet_v2/batch_normalization_30/Const" -> "inception_resnet_v2/batch_normalization_30/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_30/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_30/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_30/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_30/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_30/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_30/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_30/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_30/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_30/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_30/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_30/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_30/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_30/FusedBatchNormV3" -> "inception_resnet_v2/activation_30/Relu"; +"inception_resnet_v2/activation_30/Relu" -> "inception_resnet_v2/block35_4_mixed/concat"; +"inception_resnet_v2/activation_32/Relu" -> "inception_resnet_v2/block35_4_mixed/concat"; +"inception_resnet_v2/activation_35/Relu" -> "inception_resnet_v2/block35_4_mixed/concat"; +"inception_resnet_v2/block35_4_mixed/concat/axis" -> "inception_resnet_v2/block35_4_mixed/concat"; +"inception_resnet_v2/block35_4_mixed/concat" -> "inception_resnet_v2/block35_4_conv/Conv2D"; +"inception_resnet_v2/block35_4_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block35_4_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block35_4_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block35_4_conv/Sigmoid"; +"inception_resnet_v2/block35_4_conv/Sigmoid" -> "inception_resnet_v2/block35_4_conv/Round"; +"inception_resnet_v2/block35_4_conv/Round" -> "inception_resnet_v2/block35_4_conv/mul"; +"inception_resnet_v2/block35_4_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block35_4_conv/ReadVariableOp"; +"inception_resnet_v2/block35_4_conv/ReadVariableOp" -> "inception_resnet_v2/block35_4_conv/mul"; +"inception_resnet_v2/block35_4_conv/mul" -> "inception_resnet_v2/block35_4_conv/Conv2D"; +"inception_resnet_v2/block35_4_conv/Conv2D" -> "inception_resnet_v2/block35_4_conv/BiasAdd"; +"inception_resnet_v2/block35_4_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block35_4_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block35_4_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block35_4_conv/BiasAdd"; +"inception_resnet_v2/block35_4_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_3/mul"; +"inception_resnet_v2/custom_scale_layer_3/mul/y" -> "inception_resnet_v2/custom_scale_layer_3/mul"; +"inception_resnet_v2/custom_scale_layer_3/mul" -> "inception_resnet_v2/custom_scale_layer_3/add"; +"inception_resnet_v2/custom_scale_layer_3/add" -> "inception_resnet_v2/block35_4_ac/Relu"; +"inception_resnet_v2/block35_4_ac/Relu" -> "inception_resnet_v2/conv2d_39/Conv2D"; +"inception_resnet_v2/block35_4_ac/Relu" -> "inception_resnet_v2/conv2d_37/Conv2D"; +"inception_resnet_v2/block35_4_ac/Relu" -> "inception_resnet_v2/conv2d_36/Conv2D"; +"inception_resnet_v2/block35_4_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_4/add"; +"inception_resnet_v2/conv2d_39/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_39/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_39/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_39/Sigmoid"; +"inception_resnet_v2/conv2d_39/Sigmoid" -> "inception_resnet_v2/conv2d_39/Round"; +"inception_resnet_v2/conv2d_39/Round" -> "inception_resnet_v2/conv2d_39/mul"; +"inception_resnet_v2/conv2d_39/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_39/ReadVariableOp"; +"inception_resnet_v2/conv2d_39/ReadVariableOp" -> "inception_resnet_v2/conv2d_39/mul"; +"inception_resnet_v2/conv2d_39/mul" -> "inception_resnet_v2/conv2d_39/Conv2D"; +"inception_resnet_v2/conv2d_39/Conv2D" -> "inception_resnet_v2/batch_normalization_39/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_39/Const" -> "inception_resnet_v2/batch_normalization_39/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_39/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_39/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_39/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_39/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_39/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_39/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_39/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_39/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_39/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_39/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_39/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_39/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_39/FusedBatchNormV3" -> "inception_resnet_v2/activation_39/Relu"; +"inception_resnet_v2/activation_39/Relu" -> "inception_resnet_v2/conv2d_40/Conv2D"; +"inception_resnet_v2/conv2d_40/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_40/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_40/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_40/Sigmoid"; +"inception_resnet_v2/conv2d_40/Sigmoid" -> "inception_resnet_v2/conv2d_40/Round"; +"inception_resnet_v2/conv2d_40/Round" -> "inception_resnet_v2/conv2d_40/mul"; +"inception_resnet_v2/conv2d_40/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_40/ReadVariableOp"; +"inception_resnet_v2/conv2d_40/ReadVariableOp" -> "inception_resnet_v2/conv2d_40/mul"; +"inception_resnet_v2/conv2d_40/mul" -> "inception_resnet_v2/conv2d_40/Conv2D"; +"inception_resnet_v2/conv2d_40/Conv2D" -> "inception_resnet_v2/batch_normalization_40/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_37/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_37/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_37/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_37/Sigmoid"; +"inception_resnet_v2/conv2d_37/Sigmoid" -> "inception_resnet_v2/conv2d_37/Round"; +"inception_resnet_v2/conv2d_37/Round" -> "inception_resnet_v2/conv2d_37/mul"; +"inception_resnet_v2/conv2d_37/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_37/ReadVariableOp"; +"inception_resnet_v2/conv2d_37/ReadVariableOp" -> "inception_resnet_v2/conv2d_37/mul"; +"inception_resnet_v2/conv2d_37/mul" -> "inception_resnet_v2/conv2d_37/Conv2D"; +"inception_resnet_v2/conv2d_37/Conv2D" -> "inception_resnet_v2/batch_normalization_37/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_40/Const" -> "inception_resnet_v2/batch_normalization_40/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_40/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_40/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_40/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_40/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_40/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_40/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_40/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_40/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_40/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_40/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_40/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_40/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_40/FusedBatchNormV3" -> "inception_resnet_v2/activation_40/Relu"; +"inception_resnet_v2/batch_normalization_37/Const" -> "inception_resnet_v2/batch_normalization_37/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_37/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_37/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_37/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_37/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_37/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_37/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_37/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_37/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_37/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_37/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_37/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_37/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_37/FusedBatchNormV3" -> "inception_resnet_v2/activation_37/Relu"; +"inception_resnet_v2/activation_40/Relu" -> "inception_resnet_v2/conv2d_41/Conv2D"; +"inception_resnet_v2/activation_37/Relu" -> "inception_resnet_v2/conv2d_38/Conv2D"; +"inception_resnet_v2/conv2d_41/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_41/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_41/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_41/Sigmoid"; +"inception_resnet_v2/conv2d_41/Sigmoid" -> "inception_resnet_v2/conv2d_41/Round"; +"inception_resnet_v2/conv2d_41/Round" -> "inception_resnet_v2/conv2d_41/mul"; +"inception_resnet_v2/conv2d_41/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_41/ReadVariableOp"; +"inception_resnet_v2/conv2d_41/ReadVariableOp" -> "inception_resnet_v2/conv2d_41/mul"; +"inception_resnet_v2/conv2d_41/mul" -> "inception_resnet_v2/conv2d_41/Conv2D"; +"inception_resnet_v2/conv2d_41/Conv2D" -> "inception_resnet_v2/batch_normalization_41/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_38/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_38/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_38/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_38/Sigmoid"; +"inception_resnet_v2/conv2d_38/Sigmoid" -> "inception_resnet_v2/conv2d_38/Round"; +"inception_resnet_v2/conv2d_38/Round" -> "inception_resnet_v2/conv2d_38/mul"; +"inception_resnet_v2/conv2d_38/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_38/ReadVariableOp"; +"inception_resnet_v2/conv2d_38/ReadVariableOp" -> "inception_resnet_v2/conv2d_38/mul"; +"inception_resnet_v2/conv2d_38/mul" -> "inception_resnet_v2/conv2d_38/Conv2D"; +"inception_resnet_v2/conv2d_38/Conv2D" -> "inception_resnet_v2/batch_normalization_38/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_36/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_36/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_36/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_36/Sigmoid"; +"inception_resnet_v2/conv2d_36/Sigmoid" -> "inception_resnet_v2/conv2d_36/Round"; +"inception_resnet_v2/conv2d_36/Round" -> "inception_resnet_v2/conv2d_36/mul"; +"inception_resnet_v2/conv2d_36/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_36/ReadVariableOp"; +"inception_resnet_v2/conv2d_36/ReadVariableOp" -> "inception_resnet_v2/conv2d_36/mul"; +"inception_resnet_v2/conv2d_36/mul" -> "inception_resnet_v2/conv2d_36/Conv2D"; +"inception_resnet_v2/conv2d_36/Conv2D" -> "inception_resnet_v2/batch_normalization_36/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_41/Const" -> "inception_resnet_v2/batch_normalization_41/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_41/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_41/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_41/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_41/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_41/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_41/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_41/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_41/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_41/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_41/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_41/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_41/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_41/FusedBatchNormV3" -> "inception_resnet_v2/activation_41/Relu"; +"inception_resnet_v2/batch_normalization_38/Const" -> "inception_resnet_v2/batch_normalization_38/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_38/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_38/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_38/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_38/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_38/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_38/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_38/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_38/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_38/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_38/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_38/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_38/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_38/FusedBatchNormV3" -> "inception_resnet_v2/activation_38/Relu"; +"inception_resnet_v2/batch_normalization_36/Const" -> "inception_resnet_v2/batch_normalization_36/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_36/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_36/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_36/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_36/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_36/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_36/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_36/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_36/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_36/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_36/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_36/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_36/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_36/FusedBatchNormV3" -> "inception_resnet_v2/activation_36/Relu"; +"inception_resnet_v2/activation_36/Relu" -> "inception_resnet_v2/block35_5_mixed/concat"; +"inception_resnet_v2/activation_38/Relu" -> "inception_resnet_v2/block35_5_mixed/concat"; +"inception_resnet_v2/activation_41/Relu" -> "inception_resnet_v2/block35_5_mixed/concat"; +"inception_resnet_v2/block35_5_mixed/concat/axis" -> "inception_resnet_v2/block35_5_mixed/concat"; +"inception_resnet_v2/block35_5_mixed/concat" -> "inception_resnet_v2/block35_5_conv/Conv2D"; +"inception_resnet_v2/block35_5_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block35_5_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block35_5_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block35_5_conv/Sigmoid"; +"inception_resnet_v2/block35_5_conv/Sigmoid" -> "inception_resnet_v2/block35_5_conv/Round"; +"inception_resnet_v2/block35_5_conv/Round" -> "inception_resnet_v2/block35_5_conv/mul"; +"inception_resnet_v2/block35_5_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block35_5_conv/ReadVariableOp"; +"inception_resnet_v2/block35_5_conv/ReadVariableOp" -> "inception_resnet_v2/block35_5_conv/mul"; +"inception_resnet_v2/block35_5_conv/mul" -> "inception_resnet_v2/block35_5_conv/Conv2D"; +"inception_resnet_v2/block35_5_conv/Conv2D" -> "inception_resnet_v2/block35_5_conv/BiasAdd"; +"inception_resnet_v2/block35_5_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block35_5_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block35_5_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block35_5_conv/BiasAdd"; +"inception_resnet_v2/block35_5_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_4/mul"; +"inception_resnet_v2/custom_scale_layer_4/mul/y" -> "inception_resnet_v2/custom_scale_layer_4/mul"; +"inception_resnet_v2/custom_scale_layer_4/mul" -> "inception_resnet_v2/custom_scale_layer_4/add"; +"inception_resnet_v2/custom_scale_layer_4/add" -> "inception_resnet_v2/block35_5_ac/Relu"; +"inception_resnet_v2/block35_5_ac/Relu" -> "inception_resnet_v2/conv2d_45/Conv2D"; +"inception_resnet_v2/block35_5_ac/Relu" -> "inception_resnet_v2/conv2d_43/Conv2D"; +"inception_resnet_v2/block35_5_ac/Relu" -> "inception_resnet_v2/conv2d_42/Conv2D"; +"inception_resnet_v2/block35_5_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_5/add"; +"inception_resnet_v2/conv2d_45/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_45/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_45/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_45/Sigmoid"; +"inception_resnet_v2/conv2d_45/Sigmoid" -> "inception_resnet_v2/conv2d_45/Round"; +"inception_resnet_v2/conv2d_45/Round" -> "inception_resnet_v2/conv2d_45/mul"; +"inception_resnet_v2/conv2d_45/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_45/ReadVariableOp"; +"inception_resnet_v2/conv2d_45/ReadVariableOp" -> "inception_resnet_v2/conv2d_45/mul"; +"inception_resnet_v2/conv2d_45/mul" -> "inception_resnet_v2/conv2d_45/Conv2D"; +"inception_resnet_v2/conv2d_45/Conv2D" -> "inception_resnet_v2/batch_normalization_45/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_45/Const" -> "inception_resnet_v2/batch_normalization_45/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_45/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_45/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_45/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_45/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_45/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_45/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_45/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_45/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_45/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_45/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_45/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_45/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_45/FusedBatchNormV3" -> "inception_resnet_v2/activation_45/Relu"; +"inception_resnet_v2/activation_45/Relu" -> "inception_resnet_v2/conv2d_46/Conv2D"; +"inception_resnet_v2/conv2d_46/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_46/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_46/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_46/Sigmoid"; +"inception_resnet_v2/conv2d_46/Sigmoid" -> "inception_resnet_v2/conv2d_46/Round"; +"inception_resnet_v2/conv2d_46/Round" -> "inception_resnet_v2/conv2d_46/mul"; +"inception_resnet_v2/conv2d_46/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_46/ReadVariableOp"; +"inception_resnet_v2/conv2d_46/ReadVariableOp" -> "inception_resnet_v2/conv2d_46/mul"; +"inception_resnet_v2/conv2d_46/mul" -> "inception_resnet_v2/conv2d_46/Conv2D"; +"inception_resnet_v2/conv2d_46/Conv2D" -> "inception_resnet_v2/batch_normalization_46/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_43/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_43/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_43/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_43/Sigmoid"; +"inception_resnet_v2/conv2d_43/Sigmoid" -> "inception_resnet_v2/conv2d_43/Round"; +"inception_resnet_v2/conv2d_43/Round" -> "inception_resnet_v2/conv2d_43/mul"; +"inception_resnet_v2/conv2d_43/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_43/ReadVariableOp"; +"inception_resnet_v2/conv2d_43/ReadVariableOp" -> "inception_resnet_v2/conv2d_43/mul"; +"inception_resnet_v2/conv2d_43/mul" -> "inception_resnet_v2/conv2d_43/Conv2D"; +"inception_resnet_v2/conv2d_43/Conv2D" -> "inception_resnet_v2/batch_normalization_43/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_46/Const" -> "inception_resnet_v2/batch_normalization_46/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_46/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_46/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_46/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_46/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_46/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_46/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_46/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_46/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_46/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_46/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_46/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_46/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_46/FusedBatchNormV3" -> "inception_resnet_v2/activation_46/Relu"; +"inception_resnet_v2/batch_normalization_43/Const" -> "inception_resnet_v2/batch_normalization_43/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_43/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_43/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_43/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_43/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_43/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_43/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_43/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_43/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_43/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_43/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_43/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_43/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_43/FusedBatchNormV3" -> "inception_resnet_v2/activation_43/Relu"; +"inception_resnet_v2/activation_46/Relu" -> "inception_resnet_v2/conv2d_47/Conv2D"; +"inception_resnet_v2/activation_43/Relu" -> "inception_resnet_v2/conv2d_44/Conv2D"; +"inception_resnet_v2/conv2d_47/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_47/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_47/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_47/Sigmoid"; +"inception_resnet_v2/conv2d_47/Sigmoid" -> "inception_resnet_v2/conv2d_47/Round"; +"inception_resnet_v2/conv2d_47/Round" -> "inception_resnet_v2/conv2d_47/mul"; +"inception_resnet_v2/conv2d_47/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_47/ReadVariableOp"; +"inception_resnet_v2/conv2d_47/ReadVariableOp" -> "inception_resnet_v2/conv2d_47/mul"; +"inception_resnet_v2/conv2d_47/mul" -> "inception_resnet_v2/conv2d_47/Conv2D"; +"inception_resnet_v2/conv2d_47/Conv2D" -> "inception_resnet_v2/batch_normalization_47/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_44/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_44/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_44/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_44/Sigmoid"; +"inception_resnet_v2/conv2d_44/Sigmoid" -> "inception_resnet_v2/conv2d_44/Round"; +"inception_resnet_v2/conv2d_44/Round" -> "inception_resnet_v2/conv2d_44/mul"; +"inception_resnet_v2/conv2d_44/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_44/ReadVariableOp"; +"inception_resnet_v2/conv2d_44/ReadVariableOp" -> "inception_resnet_v2/conv2d_44/mul"; +"inception_resnet_v2/conv2d_44/mul" -> "inception_resnet_v2/conv2d_44/Conv2D"; +"inception_resnet_v2/conv2d_44/Conv2D" -> "inception_resnet_v2/batch_normalization_44/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_42/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_42/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_42/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_42/Sigmoid"; +"inception_resnet_v2/conv2d_42/Sigmoid" -> "inception_resnet_v2/conv2d_42/Round"; +"inception_resnet_v2/conv2d_42/Round" -> "inception_resnet_v2/conv2d_42/mul"; +"inception_resnet_v2/conv2d_42/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_42/ReadVariableOp"; +"inception_resnet_v2/conv2d_42/ReadVariableOp" -> "inception_resnet_v2/conv2d_42/mul"; +"inception_resnet_v2/conv2d_42/mul" -> "inception_resnet_v2/conv2d_42/Conv2D"; +"inception_resnet_v2/conv2d_42/Conv2D" -> "inception_resnet_v2/batch_normalization_42/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_47/Const" -> "inception_resnet_v2/batch_normalization_47/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_47/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_47/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_47/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_47/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_47/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_47/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_47/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_47/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_47/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_47/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_47/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_47/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_47/FusedBatchNormV3" -> "inception_resnet_v2/activation_47/Relu"; +"inception_resnet_v2/batch_normalization_44/Const" -> "inception_resnet_v2/batch_normalization_44/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_44/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_44/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_44/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_44/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_44/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_44/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_44/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_44/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_44/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_44/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_44/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_44/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_44/FusedBatchNormV3" -> "inception_resnet_v2/activation_44/Relu"; +"inception_resnet_v2/batch_normalization_42/Const" -> "inception_resnet_v2/batch_normalization_42/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_42/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_42/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_42/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_42/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_42/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_42/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_42/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_42/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_42/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_42/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_42/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_42/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_42/FusedBatchNormV3" -> "inception_resnet_v2/activation_42/Relu"; +"inception_resnet_v2/activation_42/Relu" -> "inception_resnet_v2/block35_6_mixed/concat"; +"inception_resnet_v2/activation_44/Relu" -> "inception_resnet_v2/block35_6_mixed/concat"; +"inception_resnet_v2/activation_47/Relu" -> "inception_resnet_v2/block35_6_mixed/concat"; +"inception_resnet_v2/block35_6_mixed/concat/axis" -> "inception_resnet_v2/block35_6_mixed/concat"; +"inception_resnet_v2/block35_6_mixed/concat" -> "inception_resnet_v2/block35_6_conv/Conv2D"; +"inception_resnet_v2/block35_6_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block35_6_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block35_6_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block35_6_conv/Sigmoid"; +"inception_resnet_v2/block35_6_conv/Sigmoid" -> "inception_resnet_v2/block35_6_conv/Round"; +"inception_resnet_v2/block35_6_conv/Round" -> "inception_resnet_v2/block35_6_conv/mul"; +"inception_resnet_v2/block35_6_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block35_6_conv/ReadVariableOp"; +"inception_resnet_v2/block35_6_conv/ReadVariableOp" -> "inception_resnet_v2/block35_6_conv/mul"; +"inception_resnet_v2/block35_6_conv/mul" -> "inception_resnet_v2/block35_6_conv/Conv2D"; +"inception_resnet_v2/block35_6_conv/Conv2D" -> "inception_resnet_v2/block35_6_conv/BiasAdd"; +"inception_resnet_v2/block35_6_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block35_6_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block35_6_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block35_6_conv/BiasAdd"; +"inception_resnet_v2/block35_6_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_5/mul"; +"inception_resnet_v2/custom_scale_layer_5/mul/y" -> "inception_resnet_v2/custom_scale_layer_5/mul"; +"inception_resnet_v2/custom_scale_layer_5/mul" -> "inception_resnet_v2/custom_scale_layer_5/add"; +"inception_resnet_v2/custom_scale_layer_5/add" -> "inception_resnet_v2/block35_6_ac/Relu"; +"inception_resnet_v2/block35_6_ac/Relu" -> "inception_resnet_v2/conv2d_51/Conv2D"; +"inception_resnet_v2/block35_6_ac/Relu" -> "inception_resnet_v2/conv2d_49/Conv2D"; +"inception_resnet_v2/block35_6_ac/Relu" -> "inception_resnet_v2/conv2d_48/Conv2D"; +"inception_resnet_v2/block35_6_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_6/add"; +"inception_resnet_v2/conv2d_51/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_51/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_51/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_51/Sigmoid"; +"inception_resnet_v2/conv2d_51/Sigmoid" -> "inception_resnet_v2/conv2d_51/Round"; +"inception_resnet_v2/conv2d_51/Round" -> "inception_resnet_v2/conv2d_51/mul"; +"inception_resnet_v2/conv2d_51/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_51/ReadVariableOp"; +"inception_resnet_v2/conv2d_51/ReadVariableOp" -> "inception_resnet_v2/conv2d_51/mul"; +"inception_resnet_v2/conv2d_51/mul" -> "inception_resnet_v2/conv2d_51/Conv2D"; +"inception_resnet_v2/conv2d_51/Conv2D" -> "inception_resnet_v2/batch_normalization_51/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_51/Const" -> "inception_resnet_v2/batch_normalization_51/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_51/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_51/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_51/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_51/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_51/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_51/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_51/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_51/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_51/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_51/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_51/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_51/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_51/FusedBatchNormV3" -> "inception_resnet_v2/activation_51/Relu"; +"inception_resnet_v2/activation_51/Relu" -> "inception_resnet_v2/conv2d_52/Conv2D"; +"inception_resnet_v2/conv2d_52/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_52/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_52/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_52/Sigmoid"; +"inception_resnet_v2/conv2d_52/Sigmoid" -> "inception_resnet_v2/conv2d_52/Round"; +"inception_resnet_v2/conv2d_52/Round" -> "inception_resnet_v2/conv2d_52/mul"; +"inception_resnet_v2/conv2d_52/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_52/ReadVariableOp"; +"inception_resnet_v2/conv2d_52/ReadVariableOp" -> "inception_resnet_v2/conv2d_52/mul"; +"inception_resnet_v2/conv2d_52/mul" -> "inception_resnet_v2/conv2d_52/Conv2D"; +"inception_resnet_v2/conv2d_52/Conv2D" -> "inception_resnet_v2/batch_normalization_52/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_49/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_49/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_49/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_49/Sigmoid"; +"inception_resnet_v2/conv2d_49/Sigmoid" -> "inception_resnet_v2/conv2d_49/Round"; +"inception_resnet_v2/conv2d_49/Round" -> "inception_resnet_v2/conv2d_49/mul"; +"inception_resnet_v2/conv2d_49/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_49/ReadVariableOp"; +"inception_resnet_v2/conv2d_49/ReadVariableOp" -> "inception_resnet_v2/conv2d_49/mul"; +"inception_resnet_v2/conv2d_49/mul" -> "inception_resnet_v2/conv2d_49/Conv2D"; +"inception_resnet_v2/conv2d_49/Conv2D" -> "inception_resnet_v2/batch_normalization_49/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_52/Const" -> "inception_resnet_v2/batch_normalization_52/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_52/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_52/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_52/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_52/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_52/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_52/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_52/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_52/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_52/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_52/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_52/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_52/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_52/FusedBatchNormV3" -> "inception_resnet_v2/activation_52/Relu"; +"inception_resnet_v2/batch_normalization_49/Const" -> "inception_resnet_v2/batch_normalization_49/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_49/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_49/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_49/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_49/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_49/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_49/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_49/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_49/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_49/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_49/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_49/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_49/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_49/FusedBatchNormV3" -> "inception_resnet_v2/activation_49/Relu"; +"inception_resnet_v2/activation_52/Relu" -> "inception_resnet_v2/conv2d_53/Conv2D"; +"inception_resnet_v2/activation_49/Relu" -> "inception_resnet_v2/conv2d_50/Conv2D"; +"inception_resnet_v2/conv2d_53/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_53/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_53/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_53/Sigmoid"; +"inception_resnet_v2/conv2d_53/Sigmoid" -> "inception_resnet_v2/conv2d_53/Round"; +"inception_resnet_v2/conv2d_53/Round" -> "inception_resnet_v2/conv2d_53/mul"; +"inception_resnet_v2/conv2d_53/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_53/ReadVariableOp"; +"inception_resnet_v2/conv2d_53/ReadVariableOp" -> "inception_resnet_v2/conv2d_53/mul"; +"inception_resnet_v2/conv2d_53/mul" -> "inception_resnet_v2/conv2d_53/Conv2D"; +"inception_resnet_v2/conv2d_53/Conv2D" -> "inception_resnet_v2/batch_normalization_53/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_50/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_50/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_50/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_50/Sigmoid"; +"inception_resnet_v2/conv2d_50/Sigmoid" -> "inception_resnet_v2/conv2d_50/Round"; +"inception_resnet_v2/conv2d_50/Round" -> "inception_resnet_v2/conv2d_50/mul"; +"inception_resnet_v2/conv2d_50/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_50/ReadVariableOp"; +"inception_resnet_v2/conv2d_50/ReadVariableOp" -> "inception_resnet_v2/conv2d_50/mul"; +"inception_resnet_v2/conv2d_50/mul" -> "inception_resnet_v2/conv2d_50/Conv2D"; +"inception_resnet_v2/conv2d_50/Conv2D" -> "inception_resnet_v2/batch_normalization_50/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_48/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_48/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_48/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_48/Sigmoid"; +"inception_resnet_v2/conv2d_48/Sigmoid" -> "inception_resnet_v2/conv2d_48/Round"; +"inception_resnet_v2/conv2d_48/Round" -> "inception_resnet_v2/conv2d_48/mul"; +"inception_resnet_v2/conv2d_48/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_48/ReadVariableOp"; +"inception_resnet_v2/conv2d_48/ReadVariableOp" -> "inception_resnet_v2/conv2d_48/mul"; +"inception_resnet_v2/conv2d_48/mul" -> "inception_resnet_v2/conv2d_48/Conv2D"; +"inception_resnet_v2/conv2d_48/Conv2D" -> "inception_resnet_v2/batch_normalization_48/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_53/Const" -> "inception_resnet_v2/batch_normalization_53/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_53/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_53/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_53/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_53/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_53/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_53/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_53/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_53/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_53/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_53/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_53/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_53/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_53/FusedBatchNormV3" -> "inception_resnet_v2/activation_53/Relu"; +"inception_resnet_v2/batch_normalization_50/Const" -> "inception_resnet_v2/batch_normalization_50/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_50/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_50/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_50/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_50/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_50/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_50/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_50/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_50/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_50/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_50/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_50/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_50/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_50/FusedBatchNormV3" -> "inception_resnet_v2/activation_50/Relu"; +"inception_resnet_v2/batch_normalization_48/Const" -> "inception_resnet_v2/batch_normalization_48/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_48/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_48/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_48/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_48/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_48/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_48/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_48/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_48/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_48/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_48/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_48/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_48/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_48/FusedBatchNormV3" -> "inception_resnet_v2/activation_48/Relu"; +"inception_resnet_v2/activation_48/Relu" -> "inception_resnet_v2/block35_7_mixed/concat"; +"inception_resnet_v2/activation_50/Relu" -> "inception_resnet_v2/block35_7_mixed/concat"; +"inception_resnet_v2/activation_53/Relu" -> "inception_resnet_v2/block35_7_mixed/concat"; +"inception_resnet_v2/block35_7_mixed/concat/axis" -> "inception_resnet_v2/block35_7_mixed/concat"; +"inception_resnet_v2/block35_7_mixed/concat" -> "inception_resnet_v2/block35_7_conv/Conv2D"; +"inception_resnet_v2/block35_7_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block35_7_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block35_7_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block35_7_conv/Sigmoid"; +"inception_resnet_v2/block35_7_conv/Sigmoid" -> "inception_resnet_v2/block35_7_conv/Round"; +"inception_resnet_v2/block35_7_conv/Round" -> "inception_resnet_v2/block35_7_conv/mul"; +"inception_resnet_v2/block35_7_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block35_7_conv/ReadVariableOp"; +"inception_resnet_v2/block35_7_conv/ReadVariableOp" -> "inception_resnet_v2/block35_7_conv/mul"; +"inception_resnet_v2/block35_7_conv/mul" -> "inception_resnet_v2/block35_7_conv/Conv2D"; +"inception_resnet_v2/block35_7_conv/Conv2D" -> "inception_resnet_v2/block35_7_conv/BiasAdd"; +"inception_resnet_v2/block35_7_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block35_7_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block35_7_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block35_7_conv/BiasAdd"; +"inception_resnet_v2/block35_7_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_6/mul"; +"inception_resnet_v2/custom_scale_layer_6/mul/y" -> "inception_resnet_v2/custom_scale_layer_6/mul"; +"inception_resnet_v2/custom_scale_layer_6/mul" -> "inception_resnet_v2/custom_scale_layer_6/add"; +"inception_resnet_v2/custom_scale_layer_6/add" -> "inception_resnet_v2/block35_7_ac/Relu"; +"inception_resnet_v2/block35_7_ac/Relu" -> "inception_resnet_v2/conv2d_57/Conv2D"; +"inception_resnet_v2/block35_7_ac/Relu" -> "inception_resnet_v2/conv2d_55/Conv2D"; +"inception_resnet_v2/block35_7_ac/Relu" -> "inception_resnet_v2/conv2d_54/Conv2D"; +"inception_resnet_v2/block35_7_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_7/add"; +"inception_resnet_v2/conv2d_57/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_57/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_57/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_57/Sigmoid"; +"inception_resnet_v2/conv2d_57/Sigmoid" -> "inception_resnet_v2/conv2d_57/Round"; +"inception_resnet_v2/conv2d_57/Round" -> "inception_resnet_v2/conv2d_57/mul"; +"inception_resnet_v2/conv2d_57/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_57/ReadVariableOp"; +"inception_resnet_v2/conv2d_57/ReadVariableOp" -> "inception_resnet_v2/conv2d_57/mul"; +"inception_resnet_v2/conv2d_57/mul" -> "inception_resnet_v2/conv2d_57/Conv2D"; +"inception_resnet_v2/conv2d_57/Conv2D" -> "inception_resnet_v2/batch_normalization_57/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_57/Const" -> "inception_resnet_v2/batch_normalization_57/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_57/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_57/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_57/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_57/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_57/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_57/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_57/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_57/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_57/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_57/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_57/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_57/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_57/FusedBatchNormV3" -> "inception_resnet_v2/activation_57/Relu"; +"inception_resnet_v2/activation_57/Relu" -> "inception_resnet_v2/conv2d_58/Conv2D"; +"inception_resnet_v2/conv2d_58/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_58/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_58/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_58/Sigmoid"; +"inception_resnet_v2/conv2d_58/Sigmoid" -> "inception_resnet_v2/conv2d_58/Round"; +"inception_resnet_v2/conv2d_58/Round" -> "inception_resnet_v2/conv2d_58/mul"; +"inception_resnet_v2/conv2d_58/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_58/ReadVariableOp"; +"inception_resnet_v2/conv2d_58/ReadVariableOp" -> "inception_resnet_v2/conv2d_58/mul"; +"inception_resnet_v2/conv2d_58/mul" -> "inception_resnet_v2/conv2d_58/Conv2D"; +"inception_resnet_v2/conv2d_58/Conv2D" -> "inception_resnet_v2/batch_normalization_58/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_55/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_55/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_55/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_55/Sigmoid"; +"inception_resnet_v2/conv2d_55/Sigmoid" -> "inception_resnet_v2/conv2d_55/Round"; +"inception_resnet_v2/conv2d_55/Round" -> "inception_resnet_v2/conv2d_55/mul"; +"inception_resnet_v2/conv2d_55/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_55/ReadVariableOp"; +"inception_resnet_v2/conv2d_55/ReadVariableOp" -> "inception_resnet_v2/conv2d_55/mul"; +"inception_resnet_v2/conv2d_55/mul" -> "inception_resnet_v2/conv2d_55/Conv2D"; +"inception_resnet_v2/conv2d_55/Conv2D" -> "inception_resnet_v2/batch_normalization_55/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_58/Const" -> "inception_resnet_v2/batch_normalization_58/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_58/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_58/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_58/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_58/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_58/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_58/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_58/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_58/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_58/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_58/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_58/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_58/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_58/FusedBatchNormV3" -> "inception_resnet_v2/activation_58/Relu"; +"inception_resnet_v2/batch_normalization_55/Const" -> "inception_resnet_v2/batch_normalization_55/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_55/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_55/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_55/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_55/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_55/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_55/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_55/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_55/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_55/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_55/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_55/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_55/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_55/FusedBatchNormV3" -> "inception_resnet_v2/activation_55/Relu"; +"inception_resnet_v2/activation_58/Relu" -> "inception_resnet_v2/conv2d_59/Conv2D"; +"inception_resnet_v2/activation_55/Relu" -> "inception_resnet_v2/conv2d_56/Conv2D"; +"inception_resnet_v2/conv2d_59/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_59/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_59/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_59/Sigmoid"; +"inception_resnet_v2/conv2d_59/Sigmoid" -> "inception_resnet_v2/conv2d_59/Round"; +"inception_resnet_v2/conv2d_59/Round" -> "inception_resnet_v2/conv2d_59/mul"; +"inception_resnet_v2/conv2d_59/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_59/ReadVariableOp"; +"inception_resnet_v2/conv2d_59/ReadVariableOp" -> "inception_resnet_v2/conv2d_59/mul"; +"inception_resnet_v2/conv2d_59/mul" -> "inception_resnet_v2/conv2d_59/Conv2D"; +"inception_resnet_v2/conv2d_59/Conv2D" -> "inception_resnet_v2/batch_normalization_59/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_56/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_56/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_56/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_56/Sigmoid"; +"inception_resnet_v2/conv2d_56/Sigmoid" -> "inception_resnet_v2/conv2d_56/Round"; +"inception_resnet_v2/conv2d_56/Round" -> "inception_resnet_v2/conv2d_56/mul"; +"inception_resnet_v2/conv2d_56/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_56/ReadVariableOp"; +"inception_resnet_v2/conv2d_56/ReadVariableOp" -> "inception_resnet_v2/conv2d_56/mul"; +"inception_resnet_v2/conv2d_56/mul" -> "inception_resnet_v2/conv2d_56/Conv2D"; +"inception_resnet_v2/conv2d_56/Conv2D" -> "inception_resnet_v2/batch_normalization_56/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_54/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_54/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_54/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_54/Sigmoid"; +"inception_resnet_v2/conv2d_54/Sigmoid" -> "inception_resnet_v2/conv2d_54/Round"; +"inception_resnet_v2/conv2d_54/Round" -> "inception_resnet_v2/conv2d_54/mul"; +"inception_resnet_v2/conv2d_54/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_54/ReadVariableOp"; +"inception_resnet_v2/conv2d_54/ReadVariableOp" -> "inception_resnet_v2/conv2d_54/mul"; +"inception_resnet_v2/conv2d_54/mul" -> "inception_resnet_v2/conv2d_54/Conv2D"; +"inception_resnet_v2/conv2d_54/Conv2D" -> "inception_resnet_v2/batch_normalization_54/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_59/Const" -> "inception_resnet_v2/batch_normalization_59/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_59/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_59/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_59/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_59/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_59/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_59/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_59/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_59/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_59/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_59/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_59/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_59/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_59/FusedBatchNormV3" -> "inception_resnet_v2/activation_59/Relu"; +"inception_resnet_v2/batch_normalization_56/Const" -> "inception_resnet_v2/batch_normalization_56/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_56/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_56/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_56/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_56/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_56/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_56/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_56/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_56/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_56/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_56/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_56/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_56/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_56/FusedBatchNormV3" -> "inception_resnet_v2/activation_56/Relu"; +"inception_resnet_v2/batch_normalization_54/Const" -> "inception_resnet_v2/batch_normalization_54/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_54/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_54/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_54/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_54/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_54/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_54/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_54/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_54/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_54/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_54/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_54/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_54/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_54/FusedBatchNormV3" -> "inception_resnet_v2/activation_54/Relu"; +"inception_resnet_v2/activation_54/Relu" -> "inception_resnet_v2/block35_8_mixed/concat"; +"inception_resnet_v2/activation_56/Relu" -> "inception_resnet_v2/block35_8_mixed/concat"; +"inception_resnet_v2/activation_59/Relu" -> "inception_resnet_v2/block35_8_mixed/concat"; +"inception_resnet_v2/block35_8_mixed/concat/axis" -> "inception_resnet_v2/block35_8_mixed/concat"; +"inception_resnet_v2/block35_8_mixed/concat" -> "inception_resnet_v2/block35_8_conv/Conv2D"; +"inception_resnet_v2/block35_8_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block35_8_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block35_8_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block35_8_conv/Sigmoid"; +"inception_resnet_v2/block35_8_conv/Sigmoid" -> "inception_resnet_v2/block35_8_conv/Round"; +"inception_resnet_v2/block35_8_conv/Round" -> "inception_resnet_v2/block35_8_conv/mul"; +"inception_resnet_v2/block35_8_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block35_8_conv/ReadVariableOp"; +"inception_resnet_v2/block35_8_conv/ReadVariableOp" -> "inception_resnet_v2/block35_8_conv/mul"; +"inception_resnet_v2/block35_8_conv/mul" -> "inception_resnet_v2/block35_8_conv/Conv2D"; +"inception_resnet_v2/block35_8_conv/Conv2D" -> "inception_resnet_v2/block35_8_conv/BiasAdd"; +"inception_resnet_v2/block35_8_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block35_8_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block35_8_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block35_8_conv/BiasAdd"; +"inception_resnet_v2/block35_8_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_7/mul"; +"inception_resnet_v2/custom_scale_layer_7/mul/y" -> "inception_resnet_v2/custom_scale_layer_7/mul"; +"inception_resnet_v2/custom_scale_layer_7/mul" -> "inception_resnet_v2/custom_scale_layer_7/add"; +"inception_resnet_v2/custom_scale_layer_7/add" -> "inception_resnet_v2/block35_8_ac/Relu"; +"inception_resnet_v2/block35_8_ac/Relu" -> "inception_resnet_v2/conv2d_63/Conv2D"; +"inception_resnet_v2/block35_8_ac/Relu" -> "inception_resnet_v2/conv2d_61/Conv2D"; +"inception_resnet_v2/block35_8_ac/Relu" -> "inception_resnet_v2/conv2d_60/Conv2D"; +"inception_resnet_v2/block35_8_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_8/add"; +"inception_resnet_v2/conv2d_63/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_63/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_63/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_63/Sigmoid"; +"inception_resnet_v2/conv2d_63/Sigmoid" -> "inception_resnet_v2/conv2d_63/Round"; +"inception_resnet_v2/conv2d_63/Round" -> "inception_resnet_v2/conv2d_63/mul"; +"inception_resnet_v2/conv2d_63/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_63/ReadVariableOp"; +"inception_resnet_v2/conv2d_63/ReadVariableOp" -> "inception_resnet_v2/conv2d_63/mul"; +"inception_resnet_v2/conv2d_63/mul" -> "inception_resnet_v2/conv2d_63/Conv2D"; +"inception_resnet_v2/conv2d_63/Conv2D" -> "inception_resnet_v2/batch_normalization_63/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_63/Const" -> "inception_resnet_v2/batch_normalization_63/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_63/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_63/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_63/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_63/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_63/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_63/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_63/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_63/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_63/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_63/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_63/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_63/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_63/FusedBatchNormV3" -> "inception_resnet_v2/activation_63/Relu"; +"inception_resnet_v2/activation_63/Relu" -> "inception_resnet_v2/conv2d_64/Conv2D"; +"inception_resnet_v2/conv2d_64/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_64/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_64/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_64/Sigmoid"; +"inception_resnet_v2/conv2d_64/Sigmoid" -> "inception_resnet_v2/conv2d_64/Round"; +"inception_resnet_v2/conv2d_64/Round" -> "inception_resnet_v2/conv2d_64/mul"; +"inception_resnet_v2/conv2d_64/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_64/ReadVariableOp"; +"inception_resnet_v2/conv2d_64/ReadVariableOp" -> "inception_resnet_v2/conv2d_64/mul"; +"inception_resnet_v2/conv2d_64/mul" -> "inception_resnet_v2/conv2d_64/Conv2D"; +"inception_resnet_v2/conv2d_64/Conv2D" -> "inception_resnet_v2/batch_normalization_64/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_61/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_61/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_61/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_61/Sigmoid"; +"inception_resnet_v2/conv2d_61/Sigmoid" -> "inception_resnet_v2/conv2d_61/Round"; +"inception_resnet_v2/conv2d_61/Round" -> "inception_resnet_v2/conv2d_61/mul"; +"inception_resnet_v2/conv2d_61/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_61/ReadVariableOp"; +"inception_resnet_v2/conv2d_61/ReadVariableOp" -> "inception_resnet_v2/conv2d_61/mul"; +"inception_resnet_v2/conv2d_61/mul" -> "inception_resnet_v2/conv2d_61/Conv2D"; +"inception_resnet_v2/conv2d_61/Conv2D" -> "inception_resnet_v2/batch_normalization_61/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_64/Const" -> "inception_resnet_v2/batch_normalization_64/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_64/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_64/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_64/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_64/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_64/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_64/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_64/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_64/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_64/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_64/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_64/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_64/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_64/FusedBatchNormV3" -> "inception_resnet_v2/activation_64/Relu"; +"inception_resnet_v2/batch_normalization_61/Const" -> "inception_resnet_v2/batch_normalization_61/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_61/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_61/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_61/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_61/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_61/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_61/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_61/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_61/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_61/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_61/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_61/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_61/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_61/FusedBatchNormV3" -> "inception_resnet_v2/activation_61/Relu"; +"inception_resnet_v2/activation_64/Relu" -> "inception_resnet_v2/conv2d_65/Conv2D"; +"inception_resnet_v2/activation_61/Relu" -> "inception_resnet_v2/conv2d_62/Conv2D"; +"inception_resnet_v2/conv2d_65/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_65/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_65/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_65/Sigmoid"; +"inception_resnet_v2/conv2d_65/Sigmoid" -> "inception_resnet_v2/conv2d_65/Round"; +"inception_resnet_v2/conv2d_65/Round" -> "inception_resnet_v2/conv2d_65/mul"; +"inception_resnet_v2/conv2d_65/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_65/ReadVariableOp"; +"inception_resnet_v2/conv2d_65/ReadVariableOp" -> "inception_resnet_v2/conv2d_65/mul"; +"inception_resnet_v2/conv2d_65/mul" -> "inception_resnet_v2/conv2d_65/Conv2D"; +"inception_resnet_v2/conv2d_65/Conv2D" -> "inception_resnet_v2/batch_normalization_65/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_62/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_62/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_62/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_62/Sigmoid"; +"inception_resnet_v2/conv2d_62/Sigmoid" -> "inception_resnet_v2/conv2d_62/Round"; +"inception_resnet_v2/conv2d_62/Round" -> "inception_resnet_v2/conv2d_62/mul"; +"inception_resnet_v2/conv2d_62/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_62/ReadVariableOp"; +"inception_resnet_v2/conv2d_62/ReadVariableOp" -> "inception_resnet_v2/conv2d_62/mul"; +"inception_resnet_v2/conv2d_62/mul" -> "inception_resnet_v2/conv2d_62/Conv2D"; +"inception_resnet_v2/conv2d_62/Conv2D" -> "inception_resnet_v2/batch_normalization_62/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_60/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_60/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_60/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_60/Sigmoid"; +"inception_resnet_v2/conv2d_60/Sigmoid" -> "inception_resnet_v2/conv2d_60/Round"; +"inception_resnet_v2/conv2d_60/Round" -> "inception_resnet_v2/conv2d_60/mul"; +"inception_resnet_v2/conv2d_60/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_60/ReadVariableOp"; +"inception_resnet_v2/conv2d_60/ReadVariableOp" -> "inception_resnet_v2/conv2d_60/mul"; +"inception_resnet_v2/conv2d_60/mul" -> "inception_resnet_v2/conv2d_60/Conv2D"; +"inception_resnet_v2/conv2d_60/Conv2D" -> "inception_resnet_v2/batch_normalization_60/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_65/Const" -> "inception_resnet_v2/batch_normalization_65/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_65/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_65/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_65/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_65/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_65/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_65/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_65/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_65/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_65/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_65/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_65/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_65/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_65/FusedBatchNormV3" -> "inception_resnet_v2/activation_65/Relu"; +"inception_resnet_v2/batch_normalization_62/Const" -> "inception_resnet_v2/batch_normalization_62/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_62/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_62/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_62/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_62/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_62/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_62/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_62/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_62/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_62/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_62/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_62/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_62/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_62/FusedBatchNormV3" -> "inception_resnet_v2/activation_62/Relu"; +"inception_resnet_v2/batch_normalization_60/Const" -> "inception_resnet_v2/batch_normalization_60/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_60/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_60/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_60/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_60/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_60/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_60/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_60/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_60/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_60/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_60/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_60/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_60/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_60/FusedBatchNormV3" -> "inception_resnet_v2/activation_60/Relu"; +"inception_resnet_v2/activation_60/Relu" -> "inception_resnet_v2/block35_9_mixed/concat"; +"inception_resnet_v2/activation_62/Relu" -> "inception_resnet_v2/block35_9_mixed/concat"; +"inception_resnet_v2/activation_65/Relu" -> "inception_resnet_v2/block35_9_mixed/concat"; +"inception_resnet_v2/block35_9_mixed/concat/axis" -> "inception_resnet_v2/block35_9_mixed/concat"; +"inception_resnet_v2/block35_9_mixed/concat" -> "inception_resnet_v2/block35_9_conv/Conv2D"; +"inception_resnet_v2/block35_9_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block35_9_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block35_9_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block35_9_conv/Sigmoid"; +"inception_resnet_v2/block35_9_conv/Sigmoid" -> "inception_resnet_v2/block35_9_conv/Round"; +"inception_resnet_v2/block35_9_conv/Round" -> "inception_resnet_v2/block35_9_conv/mul"; +"inception_resnet_v2/block35_9_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block35_9_conv/ReadVariableOp"; +"inception_resnet_v2/block35_9_conv/ReadVariableOp" -> "inception_resnet_v2/block35_9_conv/mul"; +"inception_resnet_v2/block35_9_conv/mul" -> "inception_resnet_v2/block35_9_conv/Conv2D"; +"inception_resnet_v2/block35_9_conv/Conv2D" -> "inception_resnet_v2/block35_9_conv/BiasAdd"; +"inception_resnet_v2/block35_9_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block35_9_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block35_9_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block35_9_conv/BiasAdd"; +"inception_resnet_v2/block35_9_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_8/mul"; +"inception_resnet_v2/custom_scale_layer_8/mul/y" -> "inception_resnet_v2/custom_scale_layer_8/mul"; +"inception_resnet_v2/custom_scale_layer_8/mul" -> "inception_resnet_v2/custom_scale_layer_8/add"; +"inception_resnet_v2/custom_scale_layer_8/add" -> "inception_resnet_v2/block35_9_ac/Relu"; +"inception_resnet_v2/block35_9_ac/Relu" -> "inception_resnet_v2/conv2d_69/Conv2D"; +"inception_resnet_v2/block35_9_ac/Relu" -> "inception_resnet_v2/conv2d_67/Conv2D"; +"inception_resnet_v2/block35_9_ac/Relu" -> "inception_resnet_v2/conv2d_66/Conv2D"; +"inception_resnet_v2/block35_9_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_9/add"; +"inception_resnet_v2/conv2d_69/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_69/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_69/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_69/Sigmoid"; +"inception_resnet_v2/conv2d_69/Sigmoid" -> "inception_resnet_v2/conv2d_69/Round"; +"inception_resnet_v2/conv2d_69/Round" -> "inception_resnet_v2/conv2d_69/mul"; +"inception_resnet_v2/conv2d_69/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_69/ReadVariableOp"; +"inception_resnet_v2/conv2d_69/ReadVariableOp" -> "inception_resnet_v2/conv2d_69/mul"; +"inception_resnet_v2/conv2d_69/mul" -> "inception_resnet_v2/conv2d_69/Conv2D"; +"inception_resnet_v2/conv2d_69/Conv2D" -> "inception_resnet_v2/batch_normalization_69/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_69/Const" -> "inception_resnet_v2/batch_normalization_69/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_69/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_69/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_69/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_69/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_69/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_69/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_69/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_69/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_69/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_69/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_69/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_69/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_69/FusedBatchNormV3" -> "inception_resnet_v2/activation_69/Relu"; +"inception_resnet_v2/activation_69/Relu" -> "inception_resnet_v2/conv2d_70/Conv2D"; +"inception_resnet_v2/conv2d_70/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_70/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_70/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_70/Sigmoid"; +"inception_resnet_v2/conv2d_70/Sigmoid" -> "inception_resnet_v2/conv2d_70/Round"; +"inception_resnet_v2/conv2d_70/Round" -> "inception_resnet_v2/conv2d_70/mul"; +"inception_resnet_v2/conv2d_70/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_70/ReadVariableOp"; +"inception_resnet_v2/conv2d_70/ReadVariableOp" -> "inception_resnet_v2/conv2d_70/mul"; +"inception_resnet_v2/conv2d_70/mul" -> "inception_resnet_v2/conv2d_70/Conv2D"; +"inception_resnet_v2/conv2d_70/Conv2D" -> "inception_resnet_v2/batch_normalization_70/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_67/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_67/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_67/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_67/Sigmoid"; +"inception_resnet_v2/conv2d_67/Sigmoid" -> "inception_resnet_v2/conv2d_67/Round"; +"inception_resnet_v2/conv2d_67/Round" -> "inception_resnet_v2/conv2d_67/mul"; +"inception_resnet_v2/conv2d_67/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_67/ReadVariableOp"; +"inception_resnet_v2/conv2d_67/ReadVariableOp" -> "inception_resnet_v2/conv2d_67/mul"; +"inception_resnet_v2/conv2d_67/mul" -> "inception_resnet_v2/conv2d_67/Conv2D"; +"inception_resnet_v2/conv2d_67/Conv2D" -> "inception_resnet_v2/batch_normalization_67/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_70/Const" -> "inception_resnet_v2/batch_normalization_70/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_70/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_70/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_70/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_70/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_70/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_70/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_70/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_70/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_70/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_70/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_70/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_70/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_70/FusedBatchNormV3" -> "inception_resnet_v2/activation_70/Relu"; +"inception_resnet_v2/batch_normalization_67/Const" -> "inception_resnet_v2/batch_normalization_67/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_67/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_67/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_67/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_67/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_67/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_67/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_67/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_67/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_67/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_67/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_67/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_67/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_67/FusedBatchNormV3" -> "inception_resnet_v2/activation_67/Relu"; +"inception_resnet_v2/activation_70/Relu" -> "inception_resnet_v2/conv2d_71/Conv2D"; +"inception_resnet_v2/activation_67/Relu" -> "inception_resnet_v2/conv2d_68/Conv2D"; +"inception_resnet_v2/conv2d_71/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_71/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_71/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_71/Sigmoid"; +"inception_resnet_v2/conv2d_71/Sigmoid" -> "inception_resnet_v2/conv2d_71/Round"; +"inception_resnet_v2/conv2d_71/Round" -> "inception_resnet_v2/conv2d_71/mul"; +"inception_resnet_v2/conv2d_71/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_71/ReadVariableOp"; +"inception_resnet_v2/conv2d_71/ReadVariableOp" -> "inception_resnet_v2/conv2d_71/mul"; +"inception_resnet_v2/conv2d_71/mul" -> "inception_resnet_v2/conv2d_71/Conv2D"; +"inception_resnet_v2/conv2d_71/Conv2D" -> "inception_resnet_v2/batch_normalization_71/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_68/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_68/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_68/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_68/Sigmoid"; +"inception_resnet_v2/conv2d_68/Sigmoid" -> "inception_resnet_v2/conv2d_68/Round"; +"inception_resnet_v2/conv2d_68/Round" -> "inception_resnet_v2/conv2d_68/mul"; +"inception_resnet_v2/conv2d_68/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_68/ReadVariableOp"; +"inception_resnet_v2/conv2d_68/ReadVariableOp" -> "inception_resnet_v2/conv2d_68/mul"; +"inception_resnet_v2/conv2d_68/mul" -> "inception_resnet_v2/conv2d_68/Conv2D"; +"inception_resnet_v2/conv2d_68/Conv2D" -> "inception_resnet_v2/batch_normalization_68/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_66/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_66/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_66/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_66/Sigmoid"; +"inception_resnet_v2/conv2d_66/Sigmoid" -> "inception_resnet_v2/conv2d_66/Round"; +"inception_resnet_v2/conv2d_66/Round" -> "inception_resnet_v2/conv2d_66/mul"; +"inception_resnet_v2/conv2d_66/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_66/ReadVariableOp"; +"inception_resnet_v2/conv2d_66/ReadVariableOp" -> "inception_resnet_v2/conv2d_66/mul"; +"inception_resnet_v2/conv2d_66/mul" -> "inception_resnet_v2/conv2d_66/Conv2D"; +"inception_resnet_v2/conv2d_66/Conv2D" -> "inception_resnet_v2/batch_normalization_66/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_71/Const" -> "inception_resnet_v2/batch_normalization_71/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_71/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_71/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_71/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_71/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_71/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_71/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_71/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_71/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_71/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_71/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_71/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_71/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_71/FusedBatchNormV3" -> "inception_resnet_v2/activation_71/Relu"; +"inception_resnet_v2/batch_normalization_68/Const" -> "inception_resnet_v2/batch_normalization_68/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_68/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_68/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_68/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_68/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_68/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_68/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_68/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_68/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_68/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_68/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_68/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_68/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_68/FusedBatchNormV3" -> "inception_resnet_v2/activation_68/Relu"; +"inception_resnet_v2/batch_normalization_66/Const" -> "inception_resnet_v2/batch_normalization_66/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_66/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_66/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_66/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_66/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_66/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_66/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_66/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_66/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_66/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_66/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_66/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_66/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_66/FusedBatchNormV3" -> "inception_resnet_v2/activation_66/Relu"; +"inception_resnet_v2/activation_66/Relu" -> "inception_resnet_v2/block35_10_mixed/concat"; +"inception_resnet_v2/activation_68/Relu" -> "inception_resnet_v2/block35_10_mixed/concat"; +"inception_resnet_v2/activation_71/Relu" -> "inception_resnet_v2/block35_10_mixed/concat"; +"inception_resnet_v2/block35_10_mixed/concat/axis" -> "inception_resnet_v2/block35_10_mixed/concat"; +"inception_resnet_v2/block35_10_mixed/concat" -> "inception_resnet_v2/block35_10_conv/Conv2D"; +"inception_resnet_v2/block35_10_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block35_10_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block35_10_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block35_10_conv/Sigmoid"; +"inception_resnet_v2/block35_10_conv/Sigmoid" -> "inception_resnet_v2/block35_10_conv/Round"; +"inception_resnet_v2/block35_10_conv/Round" -> "inception_resnet_v2/block35_10_conv/mul"; +"inception_resnet_v2/block35_10_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block35_10_conv/ReadVariableOp"; +"inception_resnet_v2/block35_10_conv/ReadVariableOp" -> "inception_resnet_v2/block35_10_conv/mul"; +"inception_resnet_v2/block35_10_conv/mul" -> "inception_resnet_v2/block35_10_conv/Conv2D"; +"inception_resnet_v2/block35_10_conv/Conv2D" -> "inception_resnet_v2/block35_10_conv/BiasAdd"; +"inception_resnet_v2/block35_10_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block35_10_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block35_10_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block35_10_conv/BiasAdd"; +"inception_resnet_v2/block35_10_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_9/mul"; +"inception_resnet_v2/custom_scale_layer_9/mul/y" -> "inception_resnet_v2/custom_scale_layer_9/mul"; +"inception_resnet_v2/custom_scale_layer_9/mul" -> "inception_resnet_v2/custom_scale_layer_9/add"; +"inception_resnet_v2/custom_scale_layer_9/add" -> "inception_resnet_v2/block35_10_ac/Relu"; +"inception_resnet_v2/block35_10_ac/Relu" -> "inception_resnet_v2/conv2d_73/Conv2D"; +"inception_resnet_v2/block35_10_ac/Relu" -> "inception_resnet_v2/conv2d_72/Conv2D"; +"inception_resnet_v2/block35_10_ac/Relu" -> "inception_resnet_v2/max_pooling2d_2/MaxPool"; +"inception_resnet_v2/conv2d_73/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_73/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_73/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_73/Sigmoid"; +"inception_resnet_v2/conv2d_73/Sigmoid" -> "inception_resnet_v2/conv2d_73/Round"; +"inception_resnet_v2/conv2d_73/Round" -> "inception_resnet_v2/conv2d_73/mul"; +"inception_resnet_v2/conv2d_73/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_73/ReadVariableOp"; +"inception_resnet_v2/conv2d_73/ReadVariableOp" -> "inception_resnet_v2/conv2d_73/mul"; +"inception_resnet_v2/conv2d_73/mul" -> "inception_resnet_v2/conv2d_73/Conv2D"; +"inception_resnet_v2/conv2d_73/Conv2D" -> "inception_resnet_v2/batch_normalization_73/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_73/Const" -> "inception_resnet_v2/batch_normalization_73/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_73/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_73/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_73/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_73/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_73/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_73/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_73/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_73/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_73/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_73/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_73/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_73/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_73/FusedBatchNormV3" -> "inception_resnet_v2/activation_73/Relu"; +"inception_resnet_v2/activation_73/Relu" -> "inception_resnet_v2/conv2d_74/Conv2D"; +"inception_resnet_v2/conv2d_74/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_74/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_74/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_74/Sigmoid"; +"inception_resnet_v2/conv2d_74/Sigmoid" -> "inception_resnet_v2/conv2d_74/Round"; +"inception_resnet_v2/conv2d_74/Round" -> "inception_resnet_v2/conv2d_74/mul"; +"inception_resnet_v2/conv2d_74/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_74/ReadVariableOp"; +"inception_resnet_v2/conv2d_74/ReadVariableOp" -> "inception_resnet_v2/conv2d_74/mul"; +"inception_resnet_v2/conv2d_74/mul" -> "inception_resnet_v2/conv2d_74/Conv2D"; +"inception_resnet_v2/conv2d_74/Conv2D" -> "inception_resnet_v2/batch_normalization_74/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_74/Const" -> "inception_resnet_v2/batch_normalization_74/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_74/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_74/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_74/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_74/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_74/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_74/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_74/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_74/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_74/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_74/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_74/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_74/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_74/FusedBatchNormV3" -> "inception_resnet_v2/activation_74/Relu"; +"inception_resnet_v2/activation_74/Relu" -> "inception_resnet_v2/conv2d_75/Conv2D"; +"inception_resnet_v2/conv2d_75/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_75/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_75/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_75/Sigmoid"; +"inception_resnet_v2/conv2d_75/Sigmoid" -> "inception_resnet_v2/conv2d_75/Round"; +"inception_resnet_v2/conv2d_75/Round" -> "inception_resnet_v2/conv2d_75/mul"; +"inception_resnet_v2/conv2d_75/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_75/ReadVariableOp"; +"inception_resnet_v2/conv2d_75/ReadVariableOp" -> "inception_resnet_v2/conv2d_75/mul"; +"inception_resnet_v2/conv2d_75/mul" -> "inception_resnet_v2/conv2d_75/Conv2D"; +"inception_resnet_v2/conv2d_75/Conv2D" -> "inception_resnet_v2/batch_normalization_75/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_72/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_72/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_72/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_72/Sigmoid"; +"inception_resnet_v2/conv2d_72/Sigmoid" -> "inception_resnet_v2/conv2d_72/Round"; +"inception_resnet_v2/conv2d_72/Round" -> "inception_resnet_v2/conv2d_72/mul"; +"inception_resnet_v2/conv2d_72/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_72/ReadVariableOp"; +"inception_resnet_v2/conv2d_72/ReadVariableOp" -> "inception_resnet_v2/conv2d_72/mul"; +"inception_resnet_v2/conv2d_72/mul" -> "inception_resnet_v2/conv2d_72/Conv2D"; +"inception_resnet_v2/conv2d_72/Conv2D" -> "inception_resnet_v2/batch_normalization_72/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_75/Const" -> "inception_resnet_v2/batch_normalization_75/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_75/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_75/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_75/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_75/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_75/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_75/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_75/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_75/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_75/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_75/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_75/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_75/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_75/FusedBatchNormV3" -> "inception_resnet_v2/activation_75/Relu"; +"inception_resnet_v2/batch_normalization_72/Const" -> "inception_resnet_v2/batch_normalization_72/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_72/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_72/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_72/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_72/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_72/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_72/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_72/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_72/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_72/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_72/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_72/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_72/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_72/FusedBatchNormV3" -> "inception_resnet_v2/activation_72/Relu"; +"inception_resnet_v2/activation_72/Relu" -> "inception_resnet_v2/mixed_6a/concat"; +"inception_resnet_v2/activation_75/Relu" -> "inception_resnet_v2/mixed_6a/concat"; +"inception_resnet_v2/max_pooling2d_2/MaxPool" -> "inception_resnet_v2/mixed_6a/concat"; +"inception_resnet_v2/mixed_6a/concat/axis" -> "inception_resnet_v2/mixed_6a/concat"; +"inception_resnet_v2/mixed_6a/concat" -> "inception_resnet_v2/conv2d_77/Conv2D"; +"inception_resnet_v2/mixed_6a/concat" -> "inception_resnet_v2/conv2d_76/Conv2D"; +"inception_resnet_v2/mixed_6a/concat" -> "inception_resnet_v2/custom_scale_layer_10/add"; +"inception_resnet_v2/conv2d_77/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_77/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_77/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_77/Sigmoid"; +"inception_resnet_v2/conv2d_77/Sigmoid" -> "inception_resnet_v2/conv2d_77/Round"; +"inception_resnet_v2/conv2d_77/Round" -> "inception_resnet_v2/conv2d_77/mul"; +"inception_resnet_v2/conv2d_77/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_77/ReadVariableOp"; +"inception_resnet_v2/conv2d_77/ReadVariableOp" -> "inception_resnet_v2/conv2d_77/mul"; +"inception_resnet_v2/conv2d_77/mul" -> "inception_resnet_v2/conv2d_77/Conv2D"; +"inception_resnet_v2/conv2d_77/Conv2D" -> "inception_resnet_v2/batch_normalization_77/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_77/Const" -> "inception_resnet_v2/batch_normalization_77/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_77/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_77/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_77/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_77/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_77/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_77/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_77/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_77/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_77/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_77/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_77/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_77/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_77/FusedBatchNormV3" -> "inception_resnet_v2/activation_77/Relu"; +"inception_resnet_v2/activation_77/Relu" -> "inception_resnet_v2/conv2d_78/Conv2D"; +"inception_resnet_v2/conv2d_78/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_78/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_78/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_78/Sigmoid"; +"inception_resnet_v2/conv2d_78/Sigmoid" -> "inception_resnet_v2/conv2d_78/Round"; +"inception_resnet_v2/conv2d_78/Round" -> "inception_resnet_v2/conv2d_78/mul"; +"inception_resnet_v2/conv2d_78/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_78/ReadVariableOp"; +"inception_resnet_v2/conv2d_78/ReadVariableOp" -> "inception_resnet_v2/conv2d_78/mul"; +"inception_resnet_v2/conv2d_78/mul" -> "inception_resnet_v2/conv2d_78/Conv2D"; +"inception_resnet_v2/conv2d_78/Conv2D" -> "inception_resnet_v2/batch_normalization_78/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_78/Const" -> "inception_resnet_v2/batch_normalization_78/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_78/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_78/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_78/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_78/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_78/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_78/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_78/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_78/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_78/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_78/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_78/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_78/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_78/FusedBatchNormV3" -> "inception_resnet_v2/activation_78/Relu"; +"inception_resnet_v2/activation_78/Relu" -> "inception_resnet_v2/conv2d_79/Conv2D"; +"inception_resnet_v2/conv2d_79/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_79/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_79/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_79/Sigmoid"; +"inception_resnet_v2/conv2d_79/Sigmoid" -> "inception_resnet_v2/conv2d_79/Round"; +"inception_resnet_v2/conv2d_79/Round" -> "inception_resnet_v2/conv2d_79/mul"; +"inception_resnet_v2/conv2d_79/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_79/ReadVariableOp"; +"inception_resnet_v2/conv2d_79/ReadVariableOp" -> "inception_resnet_v2/conv2d_79/mul"; +"inception_resnet_v2/conv2d_79/mul" -> "inception_resnet_v2/conv2d_79/Conv2D"; +"inception_resnet_v2/conv2d_79/Conv2D" -> "inception_resnet_v2/batch_normalization_79/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_76/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_76/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_76/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_76/Sigmoid"; +"inception_resnet_v2/conv2d_76/Sigmoid" -> "inception_resnet_v2/conv2d_76/Round"; +"inception_resnet_v2/conv2d_76/Round" -> "inception_resnet_v2/conv2d_76/mul"; +"inception_resnet_v2/conv2d_76/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_76/ReadVariableOp"; +"inception_resnet_v2/conv2d_76/ReadVariableOp" -> "inception_resnet_v2/conv2d_76/mul"; +"inception_resnet_v2/conv2d_76/mul" -> "inception_resnet_v2/conv2d_76/Conv2D"; +"inception_resnet_v2/conv2d_76/Conv2D" -> "inception_resnet_v2/batch_normalization_76/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_79/Const" -> "inception_resnet_v2/batch_normalization_79/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_79/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_79/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_79/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_79/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_79/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_79/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_79/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_79/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_79/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_79/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_79/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_79/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_79/FusedBatchNormV3" -> "inception_resnet_v2/activation_79/Relu"; +"inception_resnet_v2/batch_normalization_76/Const" -> "inception_resnet_v2/batch_normalization_76/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_76/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_76/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_76/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_76/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_76/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_76/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_76/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_76/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_76/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_76/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_76/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_76/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_76/FusedBatchNormV3" -> "inception_resnet_v2/activation_76/Relu"; +"inception_resnet_v2/activation_76/Relu" -> "inception_resnet_v2/block17_1_mixed/concat"; +"inception_resnet_v2/activation_79/Relu" -> "inception_resnet_v2/block17_1_mixed/concat"; +"inception_resnet_v2/block17_1_mixed/concat/axis" -> "inception_resnet_v2/block17_1_mixed/concat"; +"inception_resnet_v2/block17_1_mixed/concat" -> "inception_resnet_v2/block17_1_conv/Conv2D"; +"inception_resnet_v2/block17_1_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_1_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_1_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_1_conv/Sigmoid"; +"inception_resnet_v2/block17_1_conv/Sigmoid" -> "inception_resnet_v2/block17_1_conv/Round"; +"inception_resnet_v2/block17_1_conv/Round" -> "inception_resnet_v2/block17_1_conv/mul"; +"inception_resnet_v2/block17_1_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_1_conv/ReadVariableOp"; +"inception_resnet_v2/block17_1_conv/ReadVariableOp" -> "inception_resnet_v2/block17_1_conv/mul"; +"inception_resnet_v2/block17_1_conv/mul" -> "inception_resnet_v2/block17_1_conv/Conv2D"; +"inception_resnet_v2/block17_1_conv/Conv2D" -> "inception_resnet_v2/block17_1_conv/BiasAdd"; +"inception_resnet_v2/block17_1_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_1_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_1_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_1_conv/BiasAdd"; +"inception_resnet_v2/block17_1_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_10/mul"; +"inception_resnet_v2/custom_scale_layer_10/mul/y" -> "inception_resnet_v2/custom_scale_layer_10/mul"; +"inception_resnet_v2/custom_scale_layer_10/mul" -> "inception_resnet_v2/custom_scale_layer_10/add"; +"inception_resnet_v2/custom_scale_layer_10/add" -> "inception_resnet_v2/block17_1_ac/Relu"; +"inception_resnet_v2/block17_1_ac/Relu" -> "inception_resnet_v2/conv2d_81/Conv2D"; +"inception_resnet_v2/block17_1_ac/Relu" -> "inception_resnet_v2/conv2d_80/Conv2D"; +"inception_resnet_v2/block17_1_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_11/add"; +"inception_resnet_v2/conv2d_81/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_81/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_81/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_81/Sigmoid"; +"inception_resnet_v2/conv2d_81/Sigmoid" -> "inception_resnet_v2/conv2d_81/Round"; +"inception_resnet_v2/conv2d_81/Round" -> "inception_resnet_v2/conv2d_81/mul"; +"inception_resnet_v2/conv2d_81/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_81/ReadVariableOp"; +"inception_resnet_v2/conv2d_81/ReadVariableOp" -> "inception_resnet_v2/conv2d_81/mul"; +"inception_resnet_v2/conv2d_81/mul" -> "inception_resnet_v2/conv2d_81/Conv2D"; +"inception_resnet_v2/conv2d_81/Conv2D" -> "inception_resnet_v2/batch_normalization_81/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_81/Const" -> "inception_resnet_v2/batch_normalization_81/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_81/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_81/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_81/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_81/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_81/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_81/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_81/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_81/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_81/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_81/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_81/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_81/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_81/FusedBatchNormV3" -> "inception_resnet_v2/activation_81/Relu"; +"inception_resnet_v2/activation_81/Relu" -> "inception_resnet_v2/conv2d_82/Conv2D"; +"inception_resnet_v2/conv2d_82/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_82/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_82/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_82/Sigmoid"; +"inception_resnet_v2/conv2d_82/Sigmoid" -> "inception_resnet_v2/conv2d_82/Round"; +"inception_resnet_v2/conv2d_82/Round" -> "inception_resnet_v2/conv2d_82/mul"; +"inception_resnet_v2/conv2d_82/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_82/ReadVariableOp"; +"inception_resnet_v2/conv2d_82/ReadVariableOp" -> "inception_resnet_v2/conv2d_82/mul"; +"inception_resnet_v2/conv2d_82/mul" -> "inception_resnet_v2/conv2d_82/Conv2D"; +"inception_resnet_v2/conv2d_82/Conv2D" -> "inception_resnet_v2/batch_normalization_82/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_82/Const" -> "inception_resnet_v2/batch_normalization_82/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_82/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_82/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_82/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_82/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_82/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_82/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_82/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_82/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_82/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_82/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_82/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_82/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_82/FusedBatchNormV3" -> "inception_resnet_v2/activation_82/Relu"; +"inception_resnet_v2/activation_82/Relu" -> "inception_resnet_v2/conv2d_83/Conv2D"; +"inception_resnet_v2/conv2d_83/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_83/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_83/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_83/Sigmoid"; +"inception_resnet_v2/conv2d_83/Sigmoid" -> "inception_resnet_v2/conv2d_83/Round"; +"inception_resnet_v2/conv2d_83/Round" -> "inception_resnet_v2/conv2d_83/mul"; +"inception_resnet_v2/conv2d_83/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_83/ReadVariableOp"; +"inception_resnet_v2/conv2d_83/ReadVariableOp" -> "inception_resnet_v2/conv2d_83/mul"; +"inception_resnet_v2/conv2d_83/mul" -> "inception_resnet_v2/conv2d_83/Conv2D"; +"inception_resnet_v2/conv2d_83/Conv2D" -> "inception_resnet_v2/batch_normalization_83/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_80/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_80/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_80/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_80/Sigmoid"; +"inception_resnet_v2/conv2d_80/Sigmoid" -> "inception_resnet_v2/conv2d_80/Round"; +"inception_resnet_v2/conv2d_80/Round" -> "inception_resnet_v2/conv2d_80/mul"; +"inception_resnet_v2/conv2d_80/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_80/ReadVariableOp"; +"inception_resnet_v2/conv2d_80/ReadVariableOp" -> "inception_resnet_v2/conv2d_80/mul"; +"inception_resnet_v2/conv2d_80/mul" -> "inception_resnet_v2/conv2d_80/Conv2D"; +"inception_resnet_v2/conv2d_80/Conv2D" -> "inception_resnet_v2/batch_normalization_80/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_83/Const" -> "inception_resnet_v2/batch_normalization_83/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_83/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_83/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_83/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_83/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_83/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_83/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_83/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_83/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_83/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_83/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_83/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_83/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_83/FusedBatchNormV3" -> "inception_resnet_v2/activation_83/Relu"; +"inception_resnet_v2/batch_normalization_80/Const" -> "inception_resnet_v2/batch_normalization_80/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_80/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_80/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_80/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_80/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_80/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_80/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_80/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_80/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_80/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_80/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_80/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_80/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_80/FusedBatchNormV3" -> "inception_resnet_v2/activation_80/Relu"; +"inception_resnet_v2/activation_80/Relu" -> "inception_resnet_v2/block17_2_mixed/concat"; +"inception_resnet_v2/activation_83/Relu" -> "inception_resnet_v2/block17_2_mixed/concat"; +"inception_resnet_v2/block17_2_mixed/concat/axis" -> "inception_resnet_v2/block17_2_mixed/concat"; +"inception_resnet_v2/block17_2_mixed/concat" -> "inception_resnet_v2/block17_2_conv/Conv2D"; +"inception_resnet_v2/block17_2_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_2_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_2_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_2_conv/Sigmoid"; +"inception_resnet_v2/block17_2_conv/Sigmoid" -> "inception_resnet_v2/block17_2_conv/Round"; +"inception_resnet_v2/block17_2_conv/Round" -> "inception_resnet_v2/block17_2_conv/mul"; +"inception_resnet_v2/block17_2_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_2_conv/ReadVariableOp"; +"inception_resnet_v2/block17_2_conv/ReadVariableOp" -> "inception_resnet_v2/block17_2_conv/mul"; +"inception_resnet_v2/block17_2_conv/mul" -> "inception_resnet_v2/block17_2_conv/Conv2D"; +"inception_resnet_v2/block17_2_conv/Conv2D" -> "inception_resnet_v2/block17_2_conv/BiasAdd"; +"inception_resnet_v2/block17_2_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_2_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_2_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_2_conv/BiasAdd"; +"inception_resnet_v2/block17_2_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_11/mul"; +"inception_resnet_v2/custom_scale_layer_11/mul/y" -> "inception_resnet_v2/custom_scale_layer_11/mul"; +"inception_resnet_v2/custom_scale_layer_11/mul" -> "inception_resnet_v2/custom_scale_layer_11/add"; +"inception_resnet_v2/custom_scale_layer_11/add" -> "inception_resnet_v2/block17_2_ac/Relu"; +"inception_resnet_v2/block17_2_ac/Relu" -> "inception_resnet_v2/conv2d_85/Conv2D"; +"inception_resnet_v2/block17_2_ac/Relu" -> "inception_resnet_v2/conv2d_84/Conv2D"; +"inception_resnet_v2/block17_2_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_12/add"; +"inception_resnet_v2/conv2d_85/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_85/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_85/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_85/Sigmoid"; +"inception_resnet_v2/conv2d_85/Sigmoid" -> "inception_resnet_v2/conv2d_85/Round"; +"inception_resnet_v2/conv2d_85/Round" -> "inception_resnet_v2/conv2d_85/mul"; +"inception_resnet_v2/conv2d_85/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_85/ReadVariableOp"; +"inception_resnet_v2/conv2d_85/ReadVariableOp" -> "inception_resnet_v2/conv2d_85/mul"; +"inception_resnet_v2/conv2d_85/mul" -> "inception_resnet_v2/conv2d_85/Conv2D"; +"inception_resnet_v2/conv2d_85/Conv2D" -> "inception_resnet_v2/batch_normalization_85/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_85/Const" -> "inception_resnet_v2/batch_normalization_85/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_85/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_85/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_85/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_85/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_85/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_85/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_85/FusedBatchNormV3" -> "inception_resnet_v2/activation_85/Relu"; +"inception_resnet_v2/activation_85/Relu" -> "inception_resnet_v2/conv2d_86/Conv2D"; +"inception_resnet_v2/conv2d_86/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_86/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_86/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_86/Sigmoid"; +"inception_resnet_v2/conv2d_86/Sigmoid" -> "inception_resnet_v2/conv2d_86/Round"; +"inception_resnet_v2/conv2d_86/Round" -> "inception_resnet_v2/conv2d_86/mul"; +"inception_resnet_v2/conv2d_86/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_86/ReadVariableOp"; +"inception_resnet_v2/conv2d_86/ReadVariableOp" -> "inception_resnet_v2/conv2d_86/mul"; +"inception_resnet_v2/conv2d_86/mul" -> "inception_resnet_v2/conv2d_86/Conv2D"; +"inception_resnet_v2/conv2d_86/Conv2D" -> "inception_resnet_v2/batch_normalization_86/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_86/Const" -> "inception_resnet_v2/batch_normalization_86/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_86/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_86/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_86/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_86/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_86/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_86/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_86/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_86/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_86/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_86/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_86/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_86/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_86/FusedBatchNormV3" -> "inception_resnet_v2/activation_86/Relu"; +"inception_resnet_v2/activation_86/Relu" -> "inception_resnet_v2/conv2d_87/Conv2D"; +"inception_resnet_v2/conv2d_87/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_87/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_87/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_87/Sigmoid"; +"inception_resnet_v2/conv2d_87/Sigmoid" -> "inception_resnet_v2/conv2d_87/Round"; +"inception_resnet_v2/conv2d_87/Round" -> "inception_resnet_v2/conv2d_87/mul"; +"inception_resnet_v2/conv2d_87/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_87/ReadVariableOp"; +"inception_resnet_v2/conv2d_87/ReadVariableOp" -> "inception_resnet_v2/conv2d_87/mul"; +"inception_resnet_v2/conv2d_87/mul" -> "inception_resnet_v2/conv2d_87/Conv2D"; +"inception_resnet_v2/conv2d_87/Conv2D" -> "inception_resnet_v2/batch_normalization_87/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_84/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_84/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_84/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_84/Sigmoid"; +"inception_resnet_v2/conv2d_84/Sigmoid" -> "inception_resnet_v2/conv2d_84/Round"; +"inception_resnet_v2/conv2d_84/Round" -> "inception_resnet_v2/conv2d_84/mul"; +"inception_resnet_v2/conv2d_84/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_84/ReadVariableOp"; +"inception_resnet_v2/conv2d_84/ReadVariableOp" -> "inception_resnet_v2/conv2d_84/mul"; +"inception_resnet_v2/conv2d_84/mul" -> "inception_resnet_v2/conv2d_84/Conv2D"; +"inception_resnet_v2/conv2d_84/Conv2D" -> "inception_resnet_v2/batch_normalization_84/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_87/Const" -> "inception_resnet_v2/batch_normalization_87/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_87/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_87/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_87/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_87/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_87/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_87/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_87/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_87/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_87/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_87/FusedBatchNormV3" -> "inception_resnet_v2/activation_87/Relu"; +"inception_resnet_v2/batch_normalization_84/Const" -> "inception_resnet_v2/batch_normalization_84/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_84/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_84/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_84/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_84/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_84/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_84/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_84/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_84/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_84/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_84/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_84/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_84/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_84/FusedBatchNormV3" -> "inception_resnet_v2/activation_84/Relu"; +"inception_resnet_v2/activation_84/Relu" -> "inception_resnet_v2/block17_3_mixed/concat"; +"inception_resnet_v2/activation_87/Relu" -> "inception_resnet_v2/block17_3_mixed/concat"; +"inception_resnet_v2/block17_3_mixed/concat/axis" -> "inception_resnet_v2/block17_3_mixed/concat"; +"inception_resnet_v2/block17_3_mixed/concat" -> "inception_resnet_v2/block17_3_conv/Conv2D"; +"inception_resnet_v2/block17_3_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_3_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_3_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_3_conv/Sigmoid"; +"inception_resnet_v2/block17_3_conv/Sigmoid" -> "inception_resnet_v2/block17_3_conv/Round"; +"inception_resnet_v2/block17_3_conv/Round" -> "inception_resnet_v2/block17_3_conv/mul"; +"inception_resnet_v2/block17_3_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_3_conv/ReadVariableOp"; +"inception_resnet_v2/block17_3_conv/ReadVariableOp" -> "inception_resnet_v2/block17_3_conv/mul"; +"inception_resnet_v2/block17_3_conv/mul" -> "inception_resnet_v2/block17_3_conv/Conv2D"; +"inception_resnet_v2/block17_3_conv/Conv2D" -> "inception_resnet_v2/block17_3_conv/BiasAdd"; +"inception_resnet_v2/block17_3_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_3_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_3_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_3_conv/BiasAdd"; +"inception_resnet_v2/block17_3_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_12/mul"; +"inception_resnet_v2/custom_scale_layer_12/mul/y" -> "inception_resnet_v2/custom_scale_layer_12/mul"; +"inception_resnet_v2/custom_scale_layer_12/mul" -> "inception_resnet_v2/custom_scale_layer_12/add"; +"inception_resnet_v2/custom_scale_layer_12/add" -> "inception_resnet_v2/block17_3_ac/Relu"; +"inception_resnet_v2/block17_3_ac/Relu" -> "inception_resnet_v2/conv2d_89/Conv2D"; +"inception_resnet_v2/block17_3_ac/Relu" -> "inception_resnet_v2/conv2d_88/Conv2D"; +"inception_resnet_v2/block17_3_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_13/add"; +"inception_resnet_v2/conv2d_89/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_89/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_89/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_89/Sigmoid"; +"inception_resnet_v2/conv2d_89/Sigmoid" -> "inception_resnet_v2/conv2d_89/Round"; +"inception_resnet_v2/conv2d_89/Round" -> "inception_resnet_v2/conv2d_89/mul"; +"inception_resnet_v2/conv2d_89/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_89/ReadVariableOp"; +"inception_resnet_v2/conv2d_89/ReadVariableOp" -> "inception_resnet_v2/conv2d_89/mul"; +"inception_resnet_v2/conv2d_89/mul" -> "inception_resnet_v2/conv2d_89/Conv2D"; +"inception_resnet_v2/conv2d_89/Conv2D" -> "inception_resnet_v2/batch_normalization_89/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_89/Const" -> "inception_resnet_v2/batch_normalization_89/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_89/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_89/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_89/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_89/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_89/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_89/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_89/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_89/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_89/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_89/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_89/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_89/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_89/FusedBatchNormV3" -> "inception_resnet_v2/activation_89/Relu"; +"inception_resnet_v2/activation_89/Relu" -> "inception_resnet_v2/conv2d_90/Conv2D"; +"inception_resnet_v2/conv2d_90/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_90/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_90/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_90/Sigmoid"; +"inception_resnet_v2/conv2d_90/Sigmoid" -> "inception_resnet_v2/conv2d_90/Round"; +"inception_resnet_v2/conv2d_90/Round" -> "inception_resnet_v2/conv2d_90/mul"; +"inception_resnet_v2/conv2d_90/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_90/ReadVariableOp"; +"inception_resnet_v2/conv2d_90/ReadVariableOp" -> "inception_resnet_v2/conv2d_90/mul"; +"inception_resnet_v2/conv2d_90/mul" -> "inception_resnet_v2/conv2d_90/Conv2D"; +"inception_resnet_v2/conv2d_90/Conv2D" -> "inception_resnet_v2/batch_normalization_90/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_90/Const" -> "inception_resnet_v2/batch_normalization_90/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_90/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_90/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_90/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_90/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_90/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_90/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_90/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_90/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_90/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_90/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_90/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_90/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_90/FusedBatchNormV3" -> "inception_resnet_v2/activation_90/Relu"; +"inception_resnet_v2/activation_90/Relu" -> "inception_resnet_v2/conv2d_91/Conv2D"; +"inception_resnet_v2/conv2d_91/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_91/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_91/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_91/Sigmoid"; +"inception_resnet_v2/conv2d_91/Sigmoid" -> "inception_resnet_v2/conv2d_91/Round"; +"inception_resnet_v2/conv2d_91/Round" -> "inception_resnet_v2/conv2d_91/mul"; +"inception_resnet_v2/conv2d_91/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_91/ReadVariableOp"; +"inception_resnet_v2/conv2d_91/ReadVariableOp" -> "inception_resnet_v2/conv2d_91/mul"; +"inception_resnet_v2/conv2d_91/mul" -> "inception_resnet_v2/conv2d_91/Conv2D"; +"inception_resnet_v2/conv2d_91/Conv2D" -> "inception_resnet_v2/batch_normalization_91/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_88/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_88/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_88/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_88/Sigmoid"; +"inception_resnet_v2/conv2d_88/Sigmoid" -> "inception_resnet_v2/conv2d_88/Round"; +"inception_resnet_v2/conv2d_88/Round" -> "inception_resnet_v2/conv2d_88/mul"; +"inception_resnet_v2/conv2d_88/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_88/ReadVariableOp"; +"inception_resnet_v2/conv2d_88/ReadVariableOp" -> "inception_resnet_v2/conv2d_88/mul"; +"inception_resnet_v2/conv2d_88/mul" -> "inception_resnet_v2/conv2d_88/Conv2D"; +"inception_resnet_v2/conv2d_88/Conv2D" -> "inception_resnet_v2/batch_normalization_88/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_91/Const" -> "inception_resnet_v2/batch_normalization_91/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_91/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_91/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_91/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_91/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_91/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_91/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_91/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_91/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_91/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_91/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_91/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_91/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_91/FusedBatchNormV3" -> "inception_resnet_v2/activation_91/Relu"; +"inception_resnet_v2/batch_normalization_88/Const" -> "inception_resnet_v2/batch_normalization_88/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_88/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_88/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_88/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_88/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_88/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_88/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_88/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_88/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_88/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_88/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_88/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_88/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_88/FusedBatchNormV3" -> "inception_resnet_v2/activation_88/Relu"; +"inception_resnet_v2/activation_88/Relu" -> "inception_resnet_v2/block17_4_mixed/concat"; +"inception_resnet_v2/activation_91/Relu" -> "inception_resnet_v2/block17_4_mixed/concat"; +"inception_resnet_v2/block17_4_mixed/concat/axis" -> "inception_resnet_v2/block17_4_mixed/concat"; +"inception_resnet_v2/block17_4_mixed/concat" -> "inception_resnet_v2/block17_4_conv/Conv2D"; +"inception_resnet_v2/block17_4_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_4_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_4_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_4_conv/Sigmoid"; +"inception_resnet_v2/block17_4_conv/Sigmoid" -> "inception_resnet_v2/block17_4_conv/Round"; +"inception_resnet_v2/block17_4_conv/Round" -> "inception_resnet_v2/block17_4_conv/mul"; +"inception_resnet_v2/block17_4_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_4_conv/ReadVariableOp"; +"inception_resnet_v2/block17_4_conv/ReadVariableOp" -> "inception_resnet_v2/block17_4_conv/mul"; +"inception_resnet_v2/block17_4_conv/mul" -> "inception_resnet_v2/block17_4_conv/Conv2D"; +"inception_resnet_v2/block17_4_conv/Conv2D" -> "inception_resnet_v2/block17_4_conv/BiasAdd"; +"inception_resnet_v2/block17_4_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_4_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_4_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_4_conv/BiasAdd"; +"inception_resnet_v2/block17_4_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_13/mul"; +"inception_resnet_v2/custom_scale_layer_13/mul/y" -> "inception_resnet_v2/custom_scale_layer_13/mul"; +"inception_resnet_v2/custom_scale_layer_13/mul" -> "inception_resnet_v2/custom_scale_layer_13/add"; +"inception_resnet_v2/custom_scale_layer_13/add" -> "inception_resnet_v2/block17_4_ac/Relu"; +"inception_resnet_v2/block17_4_ac/Relu" -> "inception_resnet_v2/conv2d_93/Conv2D"; +"inception_resnet_v2/block17_4_ac/Relu" -> "inception_resnet_v2/conv2d_92/Conv2D"; +"inception_resnet_v2/block17_4_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_14/add"; +"inception_resnet_v2/conv2d_93/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_93/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_93/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_93/Sigmoid"; +"inception_resnet_v2/conv2d_93/Sigmoid" -> "inception_resnet_v2/conv2d_93/Round"; +"inception_resnet_v2/conv2d_93/Round" -> "inception_resnet_v2/conv2d_93/mul"; +"inception_resnet_v2/conv2d_93/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_93/ReadVariableOp"; +"inception_resnet_v2/conv2d_93/ReadVariableOp" -> "inception_resnet_v2/conv2d_93/mul"; +"inception_resnet_v2/conv2d_93/mul" -> "inception_resnet_v2/conv2d_93/Conv2D"; +"inception_resnet_v2/conv2d_93/Conv2D" -> "inception_resnet_v2/batch_normalization_93/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_93/Const" -> "inception_resnet_v2/batch_normalization_93/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_93/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_93/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_93/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_93/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_93/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_93/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_93/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_93/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_93/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_93/FusedBatchNormV3" -> "inception_resnet_v2/activation_93/Relu"; +"inception_resnet_v2/activation_93/Relu" -> "inception_resnet_v2/conv2d_94/Conv2D"; +"inception_resnet_v2/conv2d_94/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_94/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_94/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_94/Sigmoid"; +"inception_resnet_v2/conv2d_94/Sigmoid" -> "inception_resnet_v2/conv2d_94/Round"; +"inception_resnet_v2/conv2d_94/Round" -> "inception_resnet_v2/conv2d_94/mul"; +"inception_resnet_v2/conv2d_94/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_94/ReadVariableOp"; +"inception_resnet_v2/conv2d_94/ReadVariableOp" -> "inception_resnet_v2/conv2d_94/mul"; +"inception_resnet_v2/conv2d_94/mul" -> "inception_resnet_v2/conv2d_94/Conv2D"; +"inception_resnet_v2/conv2d_94/Conv2D" -> "inception_resnet_v2/batch_normalization_94/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_94/Const" -> "inception_resnet_v2/batch_normalization_94/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_94/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_94/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_94/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_94/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_94/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_94/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_94/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_94/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_94/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_94/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_94/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_94/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_94/FusedBatchNormV3" -> "inception_resnet_v2/activation_94/Relu"; +"inception_resnet_v2/activation_94/Relu" -> "inception_resnet_v2/conv2d_95/Conv2D"; +"inception_resnet_v2/conv2d_95/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_95/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_95/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_95/Sigmoid"; +"inception_resnet_v2/conv2d_95/Sigmoid" -> "inception_resnet_v2/conv2d_95/Round"; +"inception_resnet_v2/conv2d_95/Round" -> "inception_resnet_v2/conv2d_95/mul"; +"inception_resnet_v2/conv2d_95/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_95/ReadVariableOp"; +"inception_resnet_v2/conv2d_95/ReadVariableOp" -> "inception_resnet_v2/conv2d_95/mul"; +"inception_resnet_v2/conv2d_95/mul" -> "inception_resnet_v2/conv2d_95/Conv2D"; +"inception_resnet_v2/conv2d_95/Conv2D" -> "inception_resnet_v2/batch_normalization_95/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_92/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_92/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_92/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_92/Sigmoid"; +"inception_resnet_v2/conv2d_92/Sigmoid" -> "inception_resnet_v2/conv2d_92/Round"; +"inception_resnet_v2/conv2d_92/Round" -> "inception_resnet_v2/conv2d_92/mul"; +"inception_resnet_v2/conv2d_92/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_92/ReadVariableOp"; +"inception_resnet_v2/conv2d_92/ReadVariableOp" -> "inception_resnet_v2/conv2d_92/mul"; +"inception_resnet_v2/conv2d_92/mul" -> "inception_resnet_v2/conv2d_92/Conv2D"; +"inception_resnet_v2/conv2d_92/Conv2D" -> "inception_resnet_v2/batch_normalization_92/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_95/Const" -> "inception_resnet_v2/batch_normalization_95/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_95/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_95/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_95/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_95/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_95/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_95/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_95/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_95/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_95/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_95/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_95/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_95/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_95/FusedBatchNormV3" -> "inception_resnet_v2/activation_95/Relu"; +"inception_resnet_v2/batch_normalization_92/Const" -> "inception_resnet_v2/batch_normalization_92/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_92/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_92/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_92/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_92/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_92/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_92/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_92/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_92/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_92/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_92/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_92/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_92/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_92/FusedBatchNormV3" -> "inception_resnet_v2/activation_92/Relu"; +"inception_resnet_v2/activation_92/Relu" -> "inception_resnet_v2/block17_5_mixed/concat"; +"inception_resnet_v2/activation_95/Relu" -> "inception_resnet_v2/block17_5_mixed/concat"; +"inception_resnet_v2/block17_5_mixed/concat/axis" -> "inception_resnet_v2/block17_5_mixed/concat"; +"inception_resnet_v2/block17_5_mixed/concat" -> "inception_resnet_v2/block17_5_conv/Conv2D"; +"inception_resnet_v2/block17_5_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_5_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_5_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_5_conv/Sigmoid"; +"inception_resnet_v2/block17_5_conv/Sigmoid" -> "inception_resnet_v2/block17_5_conv/Round"; +"inception_resnet_v2/block17_5_conv/Round" -> "inception_resnet_v2/block17_5_conv/mul"; +"inception_resnet_v2/block17_5_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_5_conv/ReadVariableOp"; +"inception_resnet_v2/block17_5_conv/ReadVariableOp" -> "inception_resnet_v2/block17_5_conv/mul"; +"inception_resnet_v2/block17_5_conv/mul" -> "inception_resnet_v2/block17_5_conv/Conv2D"; +"inception_resnet_v2/block17_5_conv/Conv2D" -> "inception_resnet_v2/block17_5_conv/BiasAdd"; +"inception_resnet_v2/block17_5_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_5_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_5_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_5_conv/BiasAdd"; +"inception_resnet_v2/block17_5_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_14/mul"; +"inception_resnet_v2/custom_scale_layer_14/mul/y" -> "inception_resnet_v2/custom_scale_layer_14/mul"; +"inception_resnet_v2/custom_scale_layer_14/mul" -> "inception_resnet_v2/custom_scale_layer_14/add"; +"inception_resnet_v2/custom_scale_layer_14/add" -> "inception_resnet_v2/block17_5_ac/Relu"; +"inception_resnet_v2/block17_5_ac/Relu" -> "inception_resnet_v2/conv2d_97/Conv2D"; +"inception_resnet_v2/block17_5_ac/Relu" -> "inception_resnet_v2/conv2d_96/Conv2D"; +"inception_resnet_v2/block17_5_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_15/add"; +"inception_resnet_v2/conv2d_97/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_97/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_97/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_97/Sigmoid"; +"inception_resnet_v2/conv2d_97/Sigmoid" -> "inception_resnet_v2/conv2d_97/Round"; +"inception_resnet_v2/conv2d_97/Round" -> "inception_resnet_v2/conv2d_97/mul"; +"inception_resnet_v2/conv2d_97/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_97/ReadVariableOp"; +"inception_resnet_v2/conv2d_97/ReadVariableOp" -> "inception_resnet_v2/conv2d_97/mul"; +"inception_resnet_v2/conv2d_97/mul" -> "inception_resnet_v2/conv2d_97/Conv2D"; +"inception_resnet_v2/conv2d_97/Conv2D" -> "inception_resnet_v2/batch_normalization_97/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_97/Const" -> "inception_resnet_v2/batch_normalization_97/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_97/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_97/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_97/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_97/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_97/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_97/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_97/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_97/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_97/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_97/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_97/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_97/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_97/FusedBatchNormV3" -> "inception_resnet_v2/activation_97/Relu"; +"inception_resnet_v2/activation_97/Relu" -> "inception_resnet_v2/conv2d_98/Conv2D"; +"inception_resnet_v2/conv2d_98/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_98/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_98/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_98/Sigmoid"; +"inception_resnet_v2/conv2d_98/Sigmoid" -> "inception_resnet_v2/conv2d_98/Round"; +"inception_resnet_v2/conv2d_98/Round" -> "inception_resnet_v2/conv2d_98/mul"; +"inception_resnet_v2/conv2d_98/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_98/ReadVariableOp"; +"inception_resnet_v2/conv2d_98/ReadVariableOp" -> "inception_resnet_v2/conv2d_98/mul"; +"inception_resnet_v2/conv2d_98/mul" -> "inception_resnet_v2/conv2d_98/Conv2D"; +"inception_resnet_v2/conv2d_98/Conv2D" -> "inception_resnet_v2/batch_normalization_98/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_98/Const" -> "inception_resnet_v2/batch_normalization_98/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_98/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_98/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_98/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_98/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_98/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_98/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_98/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_98/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_98/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_98/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_98/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_98/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_98/FusedBatchNormV3" -> "inception_resnet_v2/activation_98/Relu"; +"inception_resnet_v2/activation_98/Relu" -> "inception_resnet_v2/conv2d_99/Conv2D"; +"inception_resnet_v2/conv2d_99/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_99/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_99/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_99/Sigmoid"; +"inception_resnet_v2/conv2d_99/Sigmoid" -> "inception_resnet_v2/conv2d_99/Round"; +"inception_resnet_v2/conv2d_99/Round" -> "inception_resnet_v2/conv2d_99/mul"; +"inception_resnet_v2/conv2d_99/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_99/ReadVariableOp"; +"inception_resnet_v2/conv2d_99/ReadVariableOp" -> "inception_resnet_v2/conv2d_99/mul"; +"inception_resnet_v2/conv2d_99/mul" -> "inception_resnet_v2/conv2d_99/Conv2D"; +"inception_resnet_v2/conv2d_99/Conv2D" -> "inception_resnet_v2/batch_normalization_99/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_96/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_96/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_96/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_96/Sigmoid"; +"inception_resnet_v2/conv2d_96/Sigmoid" -> "inception_resnet_v2/conv2d_96/Round"; +"inception_resnet_v2/conv2d_96/Round" -> "inception_resnet_v2/conv2d_96/mul"; +"inception_resnet_v2/conv2d_96/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_96/ReadVariableOp"; +"inception_resnet_v2/conv2d_96/ReadVariableOp" -> "inception_resnet_v2/conv2d_96/mul"; +"inception_resnet_v2/conv2d_96/mul" -> "inception_resnet_v2/conv2d_96/Conv2D"; +"inception_resnet_v2/conv2d_96/Conv2D" -> "inception_resnet_v2/batch_normalization_96/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_99/Const" -> "inception_resnet_v2/batch_normalization_99/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_99/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_99/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_99/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_99/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_99/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_99/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_99/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_99/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_99/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_99/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_99/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_99/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_99/FusedBatchNormV3" -> "inception_resnet_v2/activation_99/Relu"; +"inception_resnet_v2/batch_normalization_96/Const" -> "inception_resnet_v2/batch_normalization_96/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_96/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_96/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_96/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_96/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_96/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_96/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_96/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_96/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_96/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_96/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_96/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_96/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_96/FusedBatchNormV3" -> "inception_resnet_v2/activation_96/Relu"; +"inception_resnet_v2/activation_96/Relu" -> "inception_resnet_v2/block17_6_mixed/concat"; +"inception_resnet_v2/activation_99/Relu" -> "inception_resnet_v2/block17_6_mixed/concat"; +"inception_resnet_v2/block17_6_mixed/concat/axis" -> "inception_resnet_v2/block17_6_mixed/concat"; +"inception_resnet_v2/block17_6_mixed/concat" -> "inception_resnet_v2/block17_6_conv/Conv2D"; +"inception_resnet_v2/block17_6_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_6_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_6_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_6_conv/Sigmoid"; +"inception_resnet_v2/block17_6_conv/Sigmoid" -> "inception_resnet_v2/block17_6_conv/Round"; +"inception_resnet_v2/block17_6_conv/Round" -> "inception_resnet_v2/block17_6_conv/mul"; +"inception_resnet_v2/block17_6_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_6_conv/ReadVariableOp"; +"inception_resnet_v2/block17_6_conv/ReadVariableOp" -> "inception_resnet_v2/block17_6_conv/mul"; +"inception_resnet_v2/block17_6_conv/mul" -> "inception_resnet_v2/block17_6_conv/Conv2D"; +"inception_resnet_v2/block17_6_conv/Conv2D" -> "inception_resnet_v2/block17_6_conv/BiasAdd"; +"inception_resnet_v2/block17_6_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_6_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_6_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_6_conv/BiasAdd"; +"inception_resnet_v2/block17_6_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_15/mul"; +"inception_resnet_v2/custom_scale_layer_15/mul/y" -> "inception_resnet_v2/custom_scale_layer_15/mul"; +"inception_resnet_v2/custom_scale_layer_15/mul" -> "inception_resnet_v2/custom_scale_layer_15/add"; +"inception_resnet_v2/custom_scale_layer_15/add" -> "inception_resnet_v2/block17_6_ac/Relu"; +"inception_resnet_v2/block17_6_ac/Relu" -> "inception_resnet_v2/conv2d_101/Conv2D"; +"inception_resnet_v2/block17_6_ac/Relu" -> "inception_resnet_v2/conv2d_100/Conv2D"; +"inception_resnet_v2/block17_6_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_16/add"; +"inception_resnet_v2/conv2d_101/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_101/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_101/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_101/Sigmoid"; +"inception_resnet_v2/conv2d_101/Sigmoid" -> "inception_resnet_v2/conv2d_101/Round"; +"inception_resnet_v2/conv2d_101/Round" -> "inception_resnet_v2/conv2d_101/mul"; +"inception_resnet_v2/conv2d_101/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_101/ReadVariableOp"; +"inception_resnet_v2/conv2d_101/ReadVariableOp" -> "inception_resnet_v2/conv2d_101/mul"; +"inception_resnet_v2/conv2d_101/mul" -> "inception_resnet_v2/conv2d_101/Conv2D"; +"inception_resnet_v2/conv2d_101/Conv2D" -> "inception_resnet_v2/batch_normalization_101/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_101/Const" -> "inception_resnet_v2/batch_normalization_101/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_101/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_101/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_101/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_101/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_101/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_101/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_101/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_101/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_101/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_101/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_101/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_101/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_101/FusedBatchNormV3" -> "inception_resnet_v2/activation_101/Relu"; +"inception_resnet_v2/activation_101/Relu" -> "inception_resnet_v2/conv2d_102/Conv2D"; +"inception_resnet_v2/conv2d_102/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_102/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_102/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_102/Sigmoid"; +"inception_resnet_v2/conv2d_102/Sigmoid" -> "inception_resnet_v2/conv2d_102/Round"; +"inception_resnet_v2/conv2d_102/Round" -> "inception_resnet_v2/conv2d_102/mul"; +"inception_resnet_v2/conv2d_102/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_102/ReadVariableOp"; +"inception_resnet_v2/conv2d_102/ReadVariableOp" -> "inception_resnet_v2/conv2d_102/mul"; +"inception_resnet_v2/conv2d_102/mul" -> "inception_resnet_v2/conv2d_102/Conv2D"; +"inception_resnet_v2/conv2d_102/Conv2D" -> "inception_resnet_v2/batch_normalization_102/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_102/Const" -> "inception_resnet_v2/batch_normalization_102/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_102/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_102/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_102/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_102/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_102/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_102/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_102/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_102/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_102/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_102/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_102/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_102/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_102/FusedBatchNormV3" -> "inception_resnet_v2/activation_102/Relu"; +"inception_resnet_v2/activation_102/Relu" -> "inception_resnet_v2/conv2d_103/Conv2D"; +"inception_resnet_v2/conv2d_103/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_103/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_103/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_103/Sigmoid"; +"inception_resnet_v2/conv2d_103/Sigmoid" -> "inception_resnet_v2/conv2d_103/Round"; +"inception_resnet_v2/conv2d_103/Round" -> "inception_resnet_v2/conv2d_103/mul"; +"inception_resnet_v2/conv2d_103/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_103/ReadVariableOp"; +"inception_resnet_v2/conv2d_103/ReadVariableOp" -> "inception_resnet_v2/conv2d_103/mul"; +"inception_resnet_v2/conv2d_103/mul" -> "inception_resnet_v2/conv2d_103/Conv2D"; +"inception_resnet_v2/conv2d_103/Conv2D" -> "inception_resnet_v2/batch_normalization_103/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_100/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_100/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_100/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_100/Sigmoid"; +"inception_resnet_v2/conv2d_100/Sigmoid" -> "inception_resnet_v2/conv2d_100/Round"; +"inception_resnet_v2/conv2d_100/Round" -> "inception_resnet_v2/conv2d_100/mul"; +"inception_resnet_v2/conv2d_100/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_100/ReadVariableOp"; +"inception_resnet_v2/conv2d_100/ReadVariableOp" -> "inception_resnet_v2/conv2d_100/mul"; +"inception_resnet_v2/conv2d_100/mul" -> "inception_resnet_v2/conv2d_100/Conv2D"; +"inception_resnet_v2/conv2d_100/Conv2D" -> "inception_resnet_v2/batch_normalization_100/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_103/Const" -> "inception_resnet_v2/batch_normalization_103/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_103/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_103/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_103/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_103/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_103/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_103/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_103/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_103/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_103/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_103/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_103/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_103/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_103/FusedBatchNormV3" -> "inception_resnet_v2/activation_103/Relu"; +"inception_resnet_v2/batch_normalization_100/Const" -> "inception_resnet_v2/batch_normalization_100/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_100/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_100/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_100/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_100/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_100/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_100/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_100/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_100/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_100/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_100/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_100/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_100/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_100/FusedBatchNormV3" -> "inception_resnet_v2/activation_100/Relu"; +"inception_resnet_v2/activation_100/Relu" -> "inception_resnet_v2/block17_7_mixed/concat"; +"inception_resnet_v2/activation_103/Relu" -> "inception_resnet_v2/block17_7_mixed/concat"; +"inception_resnet_v2/block17_7_mixed/concat/axis" -> "inception_resnet_v2/block17_7_mixed/concat"; +"inception_resnet_v2/block17_7_mixed/concat" -> "inception_resnet_v2/block17_7_conv/Conv2D"; +"inception_resnet_v2/block17_7_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_7_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_7_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_7_conv/Sigmoid"; +"inception_resnet_v2/block17_7_conv/Sigmoid" -> "inception_resnet_v2/block17_7_conv/Round"; +"inception_resnet_v2/block17_7_conv/Round" -> "inception_resnet_v2/block17_7_conv/mul"; +"inception_resnet_v2/block17_7_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_7_conv/ReadVariableOp"; +"inception_resnet_v2/block17_7_conv/ReadVariableOp" -> "inception_resnet_v2/block17_7_conv/mul"; +"inception_resnet_v2/block17_7_conv/mul" -> "inception_resnet_v2/block17_7_conv/Conv2D"; +"inception_resnet_v2/block17_7_conv/Conv2D" -> "inception_resnet_v2/block17_7_conv/BiasAdd"; +"inception_resnet_v2/block17_7_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_7_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_7_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_7_conv/BiasAdd"; +"inception_resnet_v2/block17_7_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_16/mul"; +"inception_resnet_v2/custom_scale_layer_16/mul/y" -> "inception_resnet_v2/custom_scale_layer_16/mul"; +"inception_resnet_v2/custom_scale_layer_16/mul" -> "inception_resnet_v2/custom_scale_layer_16/add"; +"inception_resnet_v2/custom_scale_layer_16/add" -> "inception_resnet_v2/block17_7_ac/Relu"; +"inception_resnet_v2/block17_7_ac/Relu" -> "inception_resnet_v2/conv2d_105/Conv2D"; +"inception_resnet_v2/block17_7_ac/Relu" -> "inception_resnet_v2/conv2d_104/Conv2D"; +"inception_resnet_v2/block17_7_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_17/add"; +"inception_resnet_v2/conv2d_105/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_105/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_105/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_105/Sigmoid"; +"inception_resnet_v2/conv2d_105/Sigmoid" -> "inception_resnet_v2/conv2d_105/Round"; +"inception_resnet_v2/conv2d_105/Round" -> "inception_resnet_v2/conv2d_105/mul"; +"inception_resnet_v2/conv2d_105/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_105/ReadVariableOp"; +"inception_resnet_v2/conv2d_105/ReadVariableOp" -> "inception_resnet_v2/conv2d_105/mul"; +"inception_resnet_v2/conv2d_105/mul" -> "inception_resnet_v2/conv2d_105/Conv2D"; +"inception_resnet_v2/conv2d_105/Conv2D" -> "inception_resnet_v2/batch_normalization_105/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_105/Const" -> "inception_resnet_v2/batch_normalization_105/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_105/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_105/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_105/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_105/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_105/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_105/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_105/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_105/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_105/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_105/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_105/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_105/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_105/FusedBatchNormV3" -> "inception_resnet_v2/activation_105/Relu"; +"inception_resnet_v2/activation_105/Relu" -> "inception_resnet_v2/conv2d_106/Conv2D"; +"inception_resnet_v2/conv2d_106/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_106/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_106/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_106/Sigmoid"; +"inception_resnet_v2/conv2d_106/Sigmoid" -> "inception_resnet_v2/conv2d_106/Round"; +"inception_resnet_v2/conv2d_106/Round" -> "inception_resnet_v2/conv2d_106/mul"; +"inception_resnet_v2/conv2d_106/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_106/ReadVariableOp"; +"inception_resnet_v2/conv2d_106/ReadVariableOp" -> "inception_resnet_v2/conv2d_106/mul"; +"inception_resnet_v2/conv2d_106/mul" -> "inception_resnet_v2/conv2d_106/Conv2D"; +"inception_resnet_v2/conv2d_106/Conv2D" -> "inception_resnet_v2/batch_normalization_106/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_106/Const" -> "inception_resnet_v2/batch_normalization_106/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_106/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_106/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_106/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_106/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_106/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_106/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_106/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_106/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_106/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_106/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_106/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_106/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_106/FusedBatchNormV3" -> "inception_resnet_v2/activation_106/Relu"; +"inception_resnet_v2/activation_106/Relu" -> "inception_resnet_v2/conv2d_107/Conv2D"; +"inception_resnet_v2/conv2d_107/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_107/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_107/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_107/Sigmoid"; +"inception_resnet_v2/conv2d_107/Sigmoid" -> "inception_resnet_v2/conv2d_107/Round"; +"inception_resnet_v2/conv2d_107/Round" -> "inception_resnet_v2/conv2d_107/mul"; +"inception_resnet_v2/conv2d_107/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_107/ReadVariableOp"; +"inception_resnet_v2/conv2d_107/ReadVariableOp" -> "inception_resnet_v2/conv2d_107/mul"; +"inception_resnet_v2/conv2d_107/mul" -> "inception_resnet_v2/conv2d_107/Conv2D"; +"inception_resnet_v2/conv2d_107/Conv2D" -> "inception_resnet_v2/batch_normalization_107/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_104/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_104/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_104/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_104/Sigmoid"; +"inception_resnet_v2/conv2d_104/Sigmoid" -> "inception_resnet_v2/conv2d_104/Round"; +"inception_resnet_v2/conv2d_104/Round" -> "inception_resnet_v2/conv2d_104/mul"; +"inception_resnet_v2/conv2d_104/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_104/ReadVariableOp"; +"inception_resnet_v2/conv2d_104/ReadVariableOp" -> "inception_resnet_v2/conv2d_104/mul"; +"inception_resnet_v2/conv2d_104/mul" -> "inception_resnet_v2/conv2d_104/Conv2D"; +"inception_resnet_v2/conv2d_104/Conv2D" -> "inception_resnet_v2/batch_normalization_104/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_107/Const" -> "inception_resnet_v2/batch_normalization_107/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_107/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_107/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_107/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_107/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_107/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_107/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_107/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_107/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_107/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_107/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_107/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_107/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_107/FusedBatchNormV3" -> "inception_resnet_v2/activation_107/Relu"; +"inception_resnet_v2/batch_normalization_104/Const" -> "inception_resnet_v2/batch_normalization_104/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_104/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_104/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_104/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_104/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_104/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_104/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_104/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_104/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_104/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_104/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_104/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_104/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_104/FusedBatchNormV3" -> "inception_resnet_v2/activation_104/Relu"; +"inception_resnet_v2/activation_104/Relu" -> "inception_resnet_v2/block17_8_mixed/concat"; +"inception_resnet_v2/activation_107/Relu" -> "inception_resnet_v2/block17_8_mixed/concat"; +"inception_resnet_v2/block17_8_mixed/concat/axis" -> "inception_resnet_v2/block17_8_mixed/concat"; +"inception_resnet_v2/block17_8_mixed/concat" -> "inception_resnet_v2/block17_8_conv/Conv2D"; +"inception_resnet_v2/block17_8_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_8_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_8_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_8_conv/Sigmoid"; +"inception_resnet_v2/block17_8_conv/Sigmoid" -> "inception_resnet_v2/block17_8_conv/Round"; +"inception_resnet_v2/block17_8_conv/Round" -> "inception_resnet_v2/block17_8_conv/mul"; +"inception_resnet_v2/block17_8_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_8_conv/ReadVariableOp"; +"inception_resnet_v2/block17_8_conv/ReadVariableOp" -> "inception_resnet_v2/block17_8_conv/mul"; +"inception_resnet_v2/block17_8_conv/mul" -> "inception_resnet_v2/block17_8_conv/Conv2D"; +"inception_resnet_v2/block17_8_conv/Conv2D" -> "inception_resnet_v2/block17_8_conv/BiasAdd"; +"inception_resnet_v2/block17_8_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_8_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_8_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_8_conv/BiasAdd"; +"inception_resnet_v2/block17_8_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_17/mul"; +"inception_resnet_v2/custom_scale_layer_17/mul/y" -> "inception_resnet_v2/custom_scale_layer_17/mul"; +"inception_resnet_v2/custom_scale_layer_17/mul" -> "inception_resnet_v2/custom_scale_layer_17/add"; +"inception_resnet_v2/custom_scale_layer_17/add" -> "inception_resnet_v2/block17_8_ac/Relu"; +"inception_resnet_v2/block17_8_ac/Relu" -> "inception_resnet_v2/conv2d_109/Conv2D"; +"inception_resnet_v2/block17_8_ac/Relu" -> "inception_resnet_v2/conv2d_108/Conv2D"; +"inception_resnet_v2/block17_8_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_18/add"; +"inception_resnet_v2/conv2d_109/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_109/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_109/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_109/Sigmoid"; +"inception_resnet_v2/conv2d_109/Sigmoid" -> "inception_resnet_v2/conv2d_109/Round"; +"inception_resnet_v2/conv2d_109/Round" -> "inception_resnet_v2/conv2d_109/mul"; +"inception_resnet_v2/conv2d_109/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_109/ReadVariableOp"; +"inception_resnet_v2/conv2d_109/ReadVariableOp" -> "inception_resnet_v2/conv2d_109/mul"; +"inception_resnet_v2/conv2d_109/mul" -> "inception_resnet_v2/conv2d_109/Conv2D"; +"inception_resnet_v2/conv2d_109/Conv2D" -> "inception_resnet_v2/batch_normalization_109/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_109/Const" -> "inception_resnet_v2/batch_normalization_109/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_109/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_109/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_109/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_109/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_109/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_109/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_109/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_109/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_109/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_109/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_109/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_109/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_109/FusedBatchNormV3" -> "inception_resnet_v2/activation_109/Relu"; +"inception_resnet_v2/activation_109/Relu" -> "inception_resnet_v2/conv2d_110/Conv2D"; +"inception_resnet_v2/conv2d_110/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_110/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_110/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_110/Sigmoid"; +"inception_resnet_v2/conv2d_110/Sigmoid" -> "inception_resnet_v2/conv2d_110/Round"; +"inception_resnet_v2/conv2d_110/Round" -> "inception_resnet_v2/conv2d_110/mul"; +"inception_resnet_v2/conv2d_110/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_110/ReadVariableOp"; +"inception_resnet_v2/conv2d_110/ReadVariableOp" -> "inception_resnet_v2/conv2d_110/mul"; +"inception_resnet_v2/conv2d_110/mul" -> "inception_resnet_v2/conv2d_110/Conv2D"; +"inception_resnet_v2/conv2d_110/Conv2D" -> "inception_resnet_v2/batch_normalization_110/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_110/Const" -> "inception_resnet_v2/batch_normalization_110/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_110/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_110/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_110/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_110/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_110/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_110/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_110/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_110/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_110/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_110/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_110/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_110/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_110/FusedBatchNormV3" -> "inception_resnet_v2/activation_110/Relu"; +"inception_resnet_v2/activation_110/Relu" -> "inception_resnet_v2/conv2d_111/Conv2D"; +"inception_resnet_v2/conv2d_111/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_111/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_111/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_111/Sigmoid"; +"inception_resnet_v2/conv2d_111/Sigmoid" -> "inception_resnet_v2/conv2d_111/Round"; +"inception_resnet_v2/conv2d_111/Round" -> "inception_resnet_v2/conv2d_111/mul"; +"inception_resnet_v2/conv2d_111/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_111/ReadVariableOp"; +"inception_resnet_v2/conv2d_111/ReadVariableOp" -> "inception_resnet_v2/conv2d_111/mul"; +"inception_resnet_v2/conv2d_111/mul" -> "inception_resnet_v2/conv2d_111/Conv2D"; +"inception_resnet_v2/conv2d_111/Conv2D" -> "inception_resnet_v2/batch_normalization_111/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_108/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_108/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_108/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_108/Sigmoid"; +"inception_resnet_v2/conv2d_108/Sigmoid" -> "inception_resnet_v2/conv2d_108/Round"; +"inception_resnet_v2/conv2d_108/Round" -> "inception_resnet_v2/conv2d_108/mul"; +"inception_resnet_v2/conv2d_108/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_108/ReadVariableOp"; +"inception_resnet_v2/conv2d_108/ReadVariableOp" -> "inception_resnet_v2/conv2d_108/mul"; +"inception_resnet_v2/conv2d_108/mul" -> "inception_resnet_v2/conv2d_108/Conv2D"; +"inception_resnet_v2/conv2d_108/Conv2D" -> "inception_resnet_v2/batch_normalization_108/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_111/Const" -> "inception_resnet_v2/batch_normalization_111/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_111/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_111/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_111/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_111/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_111/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_111/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_111/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_111/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_111/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_111/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_111/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_111/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_111/FusedBatchNormV3" -> "inception_resnet_v2/activation_111/Relu"; +"inception_resnet_v2/batch_normalization_108/Const" -> "inception_resnet_v2/batch_normalization_108/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_108/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_108/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_108/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_108/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_108/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_108/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_108/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_108/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_108/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_108/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_108/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_108/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_108/FusedBatchNormV3" -> "inception_resnet_v2/activation_108/Relu"; +"inception_resnet_v2/activation_108/Relu" -> "inception_resnet_v2/block17_9_mixed/concat"; +"inception_resnet_v2/activation_111/Relu" -> "inception_resnet_v2/block17_9_mixed/concat"; +"inception_resnet_v2/block17_9_mixed/concat/axis" -> "inception_resnet_v2/block17_9_mixed/concat"; +"inception_resnet_v2/block17_9_mixed/concat" -> "inception_resnet_v2/block17_9_conv/Conv2D"; +"inception_resnet_v2/block17_9_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_9_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_9_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_9_conv/Sigmoid"; +"inception_resnet_v2/block17_9_conv/Sigmoid" -> "inception_resnet_v2/block17_9_conv/Round"; +"inception_resnet_v2/block17_9_conv/Round" -> "inception_resnet_v2/block17_9_conv/mul"; +"inception_resnet_v2/block17_9_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_9_conv/ReadVariableOp"; +"inception_resnet_v2/block17_9_conv/ReadVariableOp" -> "inception_resnet_v2/block17_9_conv/mul"; +"inception_resnet_v2/block17_9_conv/mul" -> "inception_resnet_v2/block17_9_conv/Conv2D"; +"inception_resnet_v2/block17_9_conv/Conv2D" -> "inception_resnet_v2/block17_9_conv/BiasAdd"; +"inception_resnet_v2/block17_9_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_9_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_9_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_9_conv/BiasAdd"; +"inception_resnet_v2/block17_9_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_18/mul"; +"inception_resnet_v2/custom_scale_layer_18/mul/y" -> "inception_resnet_v2/custom_scale_layer_18/mul"; +"inception_resnet_v2/custom_scale_layer_18/mul" -> "inception_resnet_v2/custom_scale_layer_18/add"; +"inception_resnet_v2/custom_scale_layer_18/add" -> "inception_resnet_v2/block17_9_ac/Relu"; +"inception_resnet_v2/block17_9_ac/Relu" -> "inception_resnet_v2/conv2d_113/Conv2D"; +"inception_resnet_v2/block17_9_ac/Relu" -> "inception_resnet_v2/conv2d_112/Conv2D"; +"inception_resnet_v2/block17_9_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_19/add"; +"inception_resnet_v2/conv2d_113/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_113/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_113/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_113/Sigmoid"; +"inception_resnet_v2/conv2d_113/Sigmoid" -> "inception_resnet_v2/conv2d_113/Round"; +"inception_resnet_v2/conv2d_113/Round" -> "inception_resnet_v2/conv2d_113/mul"; +"inception_resnet_v2/conv2d_113/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_113/ReadVariableOp"; +"inception_resnet_v2/conv2d_113/ReadVariableOp" -> "inception_resnet_v2/conv2d_113/mul"; +"inception_resnet_v2/conv2d_113/mul" -> "inception_resnet_v2/conv2d_113/Conv2D"; +"inception_resnet_v2/conv2d_113/Conv2D" -> "inception_resnet_v2/batch_normalization_113/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_113/Const" -> "inception_resnet_v2/batch_normalization_113/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_113/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_113/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_113/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_113/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_113/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_113/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_113/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_113/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_113/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_113/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_113/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_113/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_113/FusedBatchNormV3" -> "inception_resnet_v2/activation_113/Relu"; +"inception_resnet_v2/activation_113/Relu" -> "inception_resnet_v2/conv2d_114/Conv2D"; +"inception_resnet_v2/conv2d_114/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_114/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_114/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_114/Sigmoid"; +"inception_resnet_v2/conv2d_114/Sigmoid" -> "inception_resnet_v2/conv2d_114/Round"; +"inception_resnet_v2/conv2d_114/Round" -> "inception_resnet_v2/conv2d_114/mul"; +"inception_resnet_v2/conv2d_114/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_114/ReadVariableOp"; +"inception_resnet_v2/conv2d_114/ReadVariableOp" -> "inception_resnet_v2/conv2d_114/mul"; +"inception_resnet_v2/conv2d_114/mul" -> "inception_resnet_v2/conv2d_114/Conv2D"; +"inception_resnet_v2/conv2d_114/Conv2D" -> "inception_resnet_v2/batch_normalization_114/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_114/Const" -> "inception_resnet_v2/batch_normalization_114/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_114/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_114/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_114/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_114/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_114/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_114/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_114/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_114/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_114/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_114/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_114/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_114/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_114/FusedBatchNormV3" -> "inception_resnet_v2/activation_114/Relu"; +"inception_resnet_v2/activation_114/Relu" -> "inception_resnet_v2/conv2d_115/Conv2D"; +"inception_resnet_v2/conv2d_115/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_115/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_115/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_115/Sigmoid"; +"inception_resnet_v2/conv2d_115/Sigmoid" -> "inception_resnet_v2/conv2d_115/Round"; +"inception_resnet_v2/conv2d_115/Round" -> "inception_resnet_v2/conv2d_115/mul"; +"inception_resnet_v2/conv2d_115/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_115/ReadVariableOp"; +"inception_resnet_v2/conv2d_115/ReadVariableOp" -> "inception_resnet_v2/conv2d_115/mul"; +"inception_resnet_v2/conv2d_115/mul" -> "inception_resnet_v2/conv2d_115/Conv2D"; +"inception_resnet_v2/conv2d_115/Conv2D" -> "inception_resnet_v2/batch_normalization_115/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_112/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_112/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_112/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_112/Sigmoid"; +"inception_resnet_v2/conv2d_112/Sigmoid" -> "inception_resnet_v2/conv2d_112/Round"; +"inception_resnet_v2/conv2d_112/Round" -> "inception_resnet_v2/conv2d_112/mul"; +"inception_resnet_v2/conv2d_112/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_112/ReadVariableOp"; +"inception_resnet_v2/conv2d_112/ReadVariableOp" -> "inception_resnet_v2/conv2d_112/mul"; +"inception_resnet_v2/conv2d_112/mul" -> "inception_resnet_v2/conv2d_112/Conv2D"; +"inception_resnet_v2/conv2d_112/Conv2D" -> "inception_resnet_v2/batch_normalization_112/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_115/Const" -> "inception_resnet_v2/batch_normalization_115/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_115/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_115/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_115/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_115/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_115/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_115/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_115/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_115/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_115/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_115/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_115/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_115/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_115/FusedBatchNormV3" -> "inception_resnet_v2/activation_115/Relu"; +"inception_resnet_v2/batch_normalization_112/Const" -> "inception_resnet_v2/batch_normalization_112/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_112/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_112/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_112/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_112/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_112/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_112/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_112/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_112/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_112/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_112/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_112/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_112/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_112/FusedBatchNormV3" -> "inception_resnet_v2/activation_112/Relu"; +"inception_resnet_v2/activation_112/Relu" -> "inception_resnet_v2/block17_10_mixed/concat"; +"inception_resnet_v2/activation_115/Relu" -> "inception_resnet_v2/block17_10_mixed/concat"; +"inception_resnet_v2/block17_10_mixed/concat/axis" -> "inception_resnet_v2/block17_10_mixed/concat"; +"inception_resnet_v2/block17_10_mixed/concat" -> "inception_resnet_v2/block17_10_conv/Conv2D"; +"inception_resnet_v2/block17_10_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_10_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_10_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_10_conv/Sigmoid"; +"inception_resnet_v2/block17_10_conv/Sigmoid" -> "inception_resnet_v2/block17_10_conv/Round"; +"inception_resnet_v2/block17_10_conv/Round" -> "inception_resnet_v2/block17_10_conv/mul"; +"inception_resnet_v2/block17_10_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_10_conv/ReadVariableOp"; +"inception_resnet_v2/block17_10_conv/ReadVariableOp" -> "inception_resnet_v2/block17_10_conv/mul"; +"inception_resnet_v2/block17_10_conv/mul" -> "inception_resnet_v2/block17_10_conv/Conv2D"; +"inception_resnet_v2/block17_10_conv/Conv2D" -> "inception_resnet_v2/block17_10_conv/BiasAdd"; +"inception_resnet_v2/block17_10_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_10_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_10_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_10_conv/BiasAdd"; +"inception_resnet_v2/block17_10_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_19/mul"; +"inception_resnet_v2/custom_scale_layer_19/mul/y" -> "inception_resnet_v2/custom_scale_layer_19/mul"; +"inception_resnet_v2/custom_scale_layer_19/mul" -> "inception_resnet_v2/custom_scale_layer_19/add"; +"inception_resnet_v2/custom_scale_layer_19/add" -> "inception_resnet_v2/block17_10_ac/Relu"; +"inception_resnet_v2/block17_10_ac/Relu" -> "inception_resnet_v2/conv2d_117/Conv2D"; +"inception_resnet_v2/block17_10_ac/Relu" -> "inception_resnet_v2/conv2d_116/Conv2D"; +"inception_resnet_v2/block17_10_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_20/add"; +"inception_resnet_v2/conv2d_117/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_117/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_117/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_117/Sigmoid"; +"inception_resnet_v2/conv2d_117/Sigmoid" -> "inception_resnet_v2/conv2d_117/Round"; +"inception_resnet_v2/conv2d_117/Round" -> "inception_resnet_v2/conv2d_117/mul"; +"inception_resnet_v2/conv2d_117/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_117/ReadVariableOp"; +"inception_resnet_v2/conv2d_117/ReadVariableOp" -> "inception_resnet_v2/conv2d_117/mul"; +"inception_resnet_v2/conv2d_117/mul" -> "inception_resnet_v2/conv2d_117/Conv2D"; +"inception_resnet_v2/conv2d_117/Conv2D" -> "inception_resnet_v2/batch_normalization_117/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_117/Const" -> "inception_resnet_v2/batch_normalization_117/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_117/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_117/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_117/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_117/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_117/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_117/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_117/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_117/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_117/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_117/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_117/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_117/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_117/FusedBatchNormV3" -> "inception_resnet_v2/activation_117/Relu"; +"inception_resnet_v2/activation_117/Relu" -> "inception_resnet_v2/conv2d_118/Conv2D"; +"inception_resnet_v2/conv2d_118/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_118/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_118/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_118/Sigmoid"; +"inception_resnet_v2/conv2d_118/Sigmoid" -> "inception_resnet_v2/conv2d_118/Round"; +"inception_resnet_v2/conv2d_118/Round" -> "inception_resnet_v2/conv2d_118/mul"; +"inception_resnet_v2/conv2d_118/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_118/ReadVariableOp"; +"inception_resnet_v2/conv2d_118/ReadVariableOp" -> "inception_resnet_v2/conv2d_118/mul"; +"inception_resnet_v2/conv2d_118/mul" -> "inception_resnet_v2/conv2d_118/Conv2D"; +"inception_resnet_v2/conv2d_118/Conv2D" -> "inception_resnet_v2/batch_normalization_118/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_118/Const" -> "inception_resnet_v2/batch_normalization_118/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_118/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_118/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_118/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_118/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_118/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_118/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_118/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_118/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_118/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_118/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_118/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_118/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_118/FusedBatchNormV3" -> "inception_resnet_v2/activation_118/Relu"; +"inception_resnet_v2/activation_118/Relu" -> "inception_resnet_v2/conv2d_119/Conv2D"; +"inception_resnet_v2/conv2d_119/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_119/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_119/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_119/Sigmoid"; +"inception_resnet_v2/conv2d_119/Sigmoid" -> "inception_resnet_v2/conv2d_119/Round"; +"inception_resnet_v2/conv2d_119/Round" -> "inception_resnet_v2/conv2d_119/mul"; +"inception_resnet_v2/conv2d_119/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_119/ReadVariableOp"; +"inception_resnet_v2/conv2d_119/ReadVariableOp" -> "inception_resnet_v2/conv2d_119/mul"; +"inception_resnet_v2/conv2d_119/mul" -> "inception_resnet_v2/conv2d_119/Conv2D"; +"inception_resnet_v2/conv2d_119/Conv2D" -> "inception_resnet_v2/batch_normalization_119/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_116/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_116/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_116/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_116/Sigmoid"; +"inception_resnet_v2/conv2d_116/Sigmoid" -> "inception_resnet_v2/conv2d_116/Round"; +"inception_resnet_v2/conv2d_116/Round" -> "inception_resnet_v2/conv2d_116/mul"; +"inception_resnet_v2/conv2d_116/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_116/ReadVariableOp"; +"inception_resnet_v2/conv2d_116/ReadVariableOp" -> "inception_resnet_v2/conv2d_116/mul"; +"inception_resnet_v2/conv2d_116/mul" -> "inception_resnet_v2/conv2d_116/Conv2D"; +"inception_resnet_v2/conv2d_116/Conv2D" -> "inception_resnet_v2/batch_normalization_116/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_119/Const" -> "inception_resnet_v2/batch_normalization_119/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_119/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_119/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_119/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_119/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_119/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_119/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_119/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_119/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_119/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_119/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_119/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_119/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_119/FusedBatchNormV3" -> "inception_resnet_v2/activation_119/Relu"; +"inception_resnet_v2/batch_normalization_116/Const" -> "inception_resnet_v2/batch_normalization_116/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_116/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_116/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_116/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_116/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_116/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_116/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_116/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_116/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_116/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_116/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_116/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_116/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_116/FusedBatchNormV3" -> "inception_resnet_v2/activation_116/Relu"; +"inception_resnet_v2/activation_116/Relu" -> "inception_resnet_v2/block17_11_mixed/concat"; +"inception_resnet_v2/activation_119/Relu" -> "inception_resnet_v2/block17_11_mixed/concat"; +"inception_resnet_v2/block17_11_mixed/concat/axis" -> "inception_resnet_v2/block17_11_mixed/concat"; +"inception_resnet_v2/block17_11_mixed/concat" -> "inception_resnet_v2/block17_11_conv/Conv2D"; +"inception_resnet_v2/block17_11_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_11_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_11_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_11_conv/Sigmoid"; +"inception_resnet_v2/block17_11_conv/Sigmoid" -> "inception_resnet_v2/block17_11_conv/Round"; +"inception_resnet_v2/block17_11_conv/Round" -> "inception_resnet_v2/block17_11_conv/mul"; +"inception_resnet_v2/block17_11_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_11_conv/ReadVariableOp"; +"inception_resnet_v2/block17_11_conv/ReadVariableOp" -> "inception_resnet_v2/block17_11_conv/mul"; +"inception_resnet_v2/block17_11_conv/mul" -> "inception_resnet_v2/block17_11_conv/Conv2D"; +"inception_resnet_v2/block17_11_conv/Conv2D" -> "inception_resnet_v2/block17_11_conv/BiasAdd"; +"inception_resnet_v2/block17_11_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_11_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_11_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_11_conv/BiasAdd"; +"inception_resnet_v2/block17_11_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_20/mul"; +"inception_resnet_v2/custom_scale_layer_20/mul/y" -> "inception_resnet_v2/custom_scale_layer_20/mul"; +"inception_resnet_v2/custom_scale_layer_20/mul" -> "inception_resnet_v2/custom_scale_layer_20/add"; +"inception_resnet_v2/custom_scale_layer_20/add" -> "inception_resnet_v2/block17_11_ac/Relu"; +"inception_resnet_v2/block17_11_ac/Relu" -> "inception_resnet_v2/conv2d_121/Conv2D"; +"inception_resnet_v2/block17_11_ac/Relu" -> "inception_resnet_v2/conv2d_120/Conv2D"; +"inception_resnet_v2/block17_11_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_21/add"; +"inception_resnet_v2/conv2d_121/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_121/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_121/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_121/Sigmoid"; +"inception_resnet_v2/conv2d_121/Sigmoid" -> "inception_resnet_v2/conv2d_121/Round"; +"inception_resnet_v2/conv2d_121/Round" -> "inception_resnet_v2/conv2d_121/mul"; +"inception_resnet_v2/conv2d_121/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_121/ReadVariableOp"; +"inception_resnet_v2/conv2d_121/ReadVariableOp" -> "inception_resnet_v2/conv2d_121/mul"; +"inception_resnet_v2/conv2d_121/mul" -> "inception_resnet_v2/conv2d_121/Conv2D"; +"inception_resnet_v2/conv2d_121/Conv2D" -> "inception_resnet_v2/batch_normalization_121/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_121/Const" -> "inception_resnet_v2/batch_normalization_121/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_121/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_121/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_121/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_121/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_121/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_121/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_121/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_121/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_121/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_121/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_121/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_121/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_121/FusedBatchNormV3" -> "inception_resnet_v2/activation_121/Relu"; +"inception_resnet_v2/activation_121/Relu" -> "inception_resnet_v2/conv2d_122/Conv2D"; +"inception_resnet_v2/conv2d_122/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_122/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_122/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_122/Sigmoid"; +"inception_resnet_v2/conv2d_122/Sigmoid" -> "inception_resnet_v2/conv2d_122/Round"; +"inception_resnet_v2/conv2d_122/Round" -> "inception_resnet_v2/conv2d_122/mul"; +"inception_resnet_v2/conv2d_122/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_122/ReadVariableOp"; +"inception_resnet_v2/conv2d_122/ReadVariableOp" -> "inception_resnet_v2/conv2d_122/mul"; +"inception_resnet_v2/conv2d_122/mul" -> "inception_resnet_v2/conv2d_122/Conv2D"; +"inception_resnet_v2/conv2d_122/Conv2D" -> "inception_resnet_v2/batch_normalization_122/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_122/Const" -> "inception_resnet_v2/batch_normalization_122/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_122/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_122/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_122/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_122/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_122/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_122/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_122/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_122/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_122/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_122/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_122/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_122/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_122/FusedBatchNormV3" -> "inception_resnet_v2/activation_122/Relu"; +"inception_resnet_v2/activation_122/Relu" -> "inception_resnet_v2/conv2d_123/Conv2D"; +"inception_resnet_v2/conv2d_123/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_123/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_123/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_123/Sigmoid"; +"inception_resnet_v2/conv2d_123/Sigmoid" -> "inception_resnet_v2/conv2d_123/Round"; +"inception_resnet_v2/conv2d_123/Round" -> "inception_resnet_v2/conv2d_123/mul"; +"inception_resnet_v2/conv2d_123/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_123/ReadVariableOp"; +"inception_resnet_v2/conv2d_123/ReadVariableOp" -> "inception_resnet_v2/conv2d_123/mul"; +"inception_resnet_v2/conv2d_123/mul" -> "inception_resnet_v2/conv2d_123/Conv2D"; +"inception_resnet_v2/conv2d_123/Conv2D" -> "inception_resnet_v2/batch_normalization_123/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_120/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_120/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_120/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_120/Sigmoid"; +"inception_resnet_v2/conv2d_120/Sigmoid" -> "inception_resnet_v2/conv2d_120/Round"; +"inception_resnet_v2/conv2d_120/Round" -> "inception_resnet_v2/conv2d_120/mul"; +"inception_resnet_v2/conv2d_120/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_120/ReadVariableOp"; +"inception_resnet_v2/conv2d_120/ReadVariableOp" -> "inception_resnet_v2/conv2d_120/mul"; +"inception_resnet_v2/conv2d_120/mul" -> "inception_resnet_v2/conv2d_120/Conv2D"; +"inception_resnet_v2/conv2d_120/Conv2D" -> "inception_resnet_v2/batch_normalization_120/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_123/Const" -> "inception_resnet_v2/batch_normalization_123/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_123/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_123/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_123/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_123/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_123/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_123/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_123/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_123/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_123/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_123/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_123/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_123/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_123/FusedBatchNormV3" -> "inception_resnet_v2/activation_123/Relu"; +"inception_resnet_v2/batch_normalization_120/Const" -> "inception_resnet_v2/batch_normalization_120/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_120/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_120/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_120/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_120/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_120/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_120/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_120/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_120/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_120/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_120/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_120/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_120/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_120/FusedBatchNormV3" -> "inception_resnet_v2/activation_120/Relu"; +"inception_resnet_v2/activation_120/Relu" -> "inception_resnet_v2/block17_12_mixed/concat"; +"inception_resnet_v2/activation_123/Relu" -> "inception_resnet_v2/block17_12_mixed/concat"; +"inception_resnet_v2/block17_12_mixed/concat/axis" -> "inception_resnet_v2/block17_12_mixed/concat"; +"inception_resnet_v2/block17_12_mixed/concat" -> "inception_resnet_v2/block17_12_conv/Conv2D"; +"inception_resnet_v2/block17_12_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_12_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_12_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_12_conv/Sigmoid"; +"inception_resnet_v2/block17_12_conv/Sigmoid" -> "inception_resnet_v2/block17_12_conv/Round"; +"inception_resnet_v2/block17_12_conv/Round" -> "inception_resnet_v2/block17_12_conv/mul"; +"inception_resnet_v2/block17_12_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_12_conv/ReadVariableOp"; +"inception_resnet_v2/block17_12_conv/ReadVariableOp" -> "inception_resnet_v2/block17_12_conv/mul"; +"inception_resnet_v2/block17_12_conv/mul" -> "inception_resnet_v2/block17_12_conv/Conv2D"; +"inception_resnet_v2/block17_12_conv/Conv2D" -> "inception_resnet_v2/block17_12_conv/BiasAdd"; +"inception_resnet_v2/block17_12_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_12_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_12_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_12_conv/BiasAdd"; +"inception_resnet_v2/block17_12_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_21/mul"; +"inception_resnet_v2/custom_scale_layer_21/mul/y" -> "inception_resnet_v2/custom_scale_layer_21/mul"; +"inception_resnet_v2/custom_scale_layer_21/mul" -> "inception_resnet_v2/custom_scale_layer_21/add"; +"inception_resnet_v2/custom_scale_layer_21/add" -> "inception_resnet_v2/block17_12_ac/Relu"; +"inception_resnet_v2/block17_12_ac/Relu" -> "inception_resnet_v2/conv2d_125/Conv2D"; +"inception_resnet_v2/block17_12_ac/Relu" -> "inception_resnet_v2/conv2d_124/Conv2D"; +"inception_resnet_v2/block17_12_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_22/add"; +"inception_resnet_v2/conv2d_125/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_125/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_125/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_125/Sigmoid"; +"inception_resnet_v2/conv2d_125/Sigmoid" -> "inception_resnet_v2/conv2d_125/Round"; +"inception_resnet_v2/conv2d_125/Round" -> "inception_resnet_v2/conv2d_125/mul"; +"inception_resnet_v2/conv2d_125/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_125/ReadVariableOp"; +"inception_resnet_v2/conv2d_125/ReadVariableOp" -> "inception_resnet_v2/conv2d_125/mul"; +"inception_resnet_v2/conv2d_125/mul" -> "inception_resnet_v2/conv2d_125/Conv2D"; +"inception_resnet_v2/conv2d_125/Conv2D" -> "inception_resnet_v2/batch_normalization_125/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_125/Const" -> "inception_resnet_v2/batch_normalization_125/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_125/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_125/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_125/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_125/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_125/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_125/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_125/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_125/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_125/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_125/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_125/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_125/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_125/FusedBatchNormV3" -> "inception_resnet_v2/activation_125/Relu"; +"inception_resnet_v2/activation_125/Relu" -> "inception_resnet_v2/conv2d_126/Conv2D"; +"inception_resnet_v2/conv2d_126/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_126/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_126/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_126/Sigmoid"; +"inception_resnet_v2/conv2d_126/Sigmoid" -> "inception_resnet_v2/conv2d_126/Round"; +"inception_resnet_v2/conv2d_126/Round" -> "inception_resnet_v2/conv2d_126/mul"; +"inception_resnet_v2/conv2d_126/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_126/ReadVariableOp"; +"inception_resnet_v2/conv2d_126/ReadVariableOp" -> "inception_resnet_v2/conv2d_126/mul"; +"inception_resnet_v2/conv2d_126/mul" -> "inception_resnet_v2/conv2d_126/Conv2D"; +"inception_resnet_v2/conv2d_126/Conv2D" -> "inception_resnet_v2/batch_normalization_126/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_126/Const" -> "inception_resnet_v2/batch_normalization_126/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_126/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_126/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_126/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_126/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_126/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_126/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_126/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_126/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_126/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_126/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_126/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_126/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_126/FusedBatchNormV3" -> "inception_resnet_v2/activation_126/Relu"; +"inception_resnet_v2/activation_126/Relu" -> "inception_resnet_v2/conv2d_127/Conv2D"; +"inception_resnet_v2/conv2d_127/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_127/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_127/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_127/Sigmoid"; +"inception_resnet_v2/conv2d_127/Sigmoid" -> "inception_resnet_v2/conv2d_127/Round"; +"inception_resnet_v2/conv2d_127/Round" -> "inception_resnet_v2/conv2d_127/mul"; +"inception_resnet_v2/conv2d_127/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_127/ReadVariableOp"; +"inception_resnet_v2/conv2d_127/ReadVariableOp" -> "inception_resnet_v2/conv2d_127/mul"; +"inception_resnet_v2/conv2d_127/mul" -> "inception_resnet_v2/conv2d_127/Conv2D"; +"inception_resnet_v2/conv2d_127/Conv2D" -> "inception_resnet_v2/batch_normalization_127/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_124/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_124/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_124/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_124/Sigmoid"; +"inception_resnet_v2/conv2d_124/Sigmoid" -> "inception_resnet_v2/conv2d_124/Round"; +"inception_resnet_v2/conv2d_124/Round" -> "inception_resnet_v2/conv2d_124/mul"; +"inception_resnet_v2/conv2d_124/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_124/ReadVariableOp"; +"inception_resnet_v2/conv2d_124/ReadVariableOp" -> "inception_resnet_v2/conv2d_124/mul"; +"inception_resnet_v2/conv2d_124/mul" -> "inception_resnet_v2/conv2d_124/Conv2D"; +"inception_resnet_v2/conv2d_124/Conv2D" -> "inception_resnet_v2/batch_normalization_124/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_127/Const" -> "inception_resnet_v2/batch_normalization_127/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_127/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_127/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_127/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_127/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_127/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_127/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_127/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_127/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_127/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_127/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_127/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_127/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_127/FusedBatchNormV3" -> "inception_resnet_v2/activation_127/Relu"; +"inception_resnet_v2/batch_normalization_124/Const" -> "inception_resnet_v2/batch_normalization_124/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_124/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_124/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_124/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_124/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_124/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_124/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_124/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_124/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_124/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_124/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_124/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_124/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_124/FusedBatchNormV3" -> "inception_resnet_v2/activation_124/Relu"; +"inception_resnet_v2/activation_124/Relu" -> "inception_resnet_v2/block17_13_mixed/concat"; +"inception_resnet_v2/activation_127/Relu" -> "inception_resnet_v2/block17_13_mixed/concat"; +"inception_resnet_v2/block17_13_mixed/concat/axis" -> "inception_resnet_v2/block17_13_mixed/concat"; +"inception_resnet_v2/block17_13_mixed/concat" -> "inception_resnet_v2/block17_13_conv/Conv2D"; +"inception_resnet_v2/block17_13_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_13_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_13_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_13_conv/Sigmoid"; +"inception_resnet_v2/block17_13_conv/Sigmoid" -> "inception_resnet_v2/block17_13_conv/Round"; +"inception_resnet_v2/block17_13_conv/Round" -> "inception_resnet_v2/block17_13_conv/mul"; +"inception_resnet_v2/block17_13_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_13_conv/ReadVariableOp"; +"inception_resnet_v2/block17_13_conv/ReadVariableOp" -> "inception_resnet_v2/block17_13_conv/mul"; +"inception_resnet_v2/block17_13_conv/mul" -> "inception_resnet_v2/block17_13_conv/Conv2D"; +"inception_resnet_v2/block17_13_conv/Conv2D" -> "inception_resnet_v2/block17_13_conv/BiasAdd"; +"inception_resnet_v2/block17_13_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_13_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_13_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_13_conv/BiasAdd"; +"inception_resnet_v2/block17_13_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_22/mul"; +"inception_resnet_v2/custom_scale_layer_22/mul/y" -> "inception_resnet_v2/custom_scale_layer_22/mul"; +"inception_resnet_v2/custom_scale_layer_22/mul" -> "inception_resnet_v2/custom_scale_layer_22/add"; +"inception_resnet_v2/custom_scale_layer_22/add" -> "inception_resnet_v2/block17_13_ac/Relu"; +"inception_resnet_v2/block17_13_ac/Relu" -> "inception_resnet_v2/conv2d_129/Conv2D"; +"inception_resnet_v2/block17_13_ac/Relu" -> "inception_resnet_v2/conv2d_128/Conv2D"; +"inception_resnet_v2/block17_13_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_23/add"; +"inception_resnet_v2/conv2d_129/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_129/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_129/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_129/Sigmoid"; +"inception_resnet_v2/conv2d_129/Sigmoid" -> "inception_resnet_v2/conv2d_129/Round"; +"inception_resnet_v2/conv2d_129/Round" -> "inception_resnet_v2/conv2d_129/mul"; +"inception_resnet_v2/conv2d_129/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_129/ReadVariableOp"; +"inception_resnet_v2/conv2d_129/ReadVariableOp" -> "inception_resnet_v2/conv2d_129/mul"; +"inception_resnet_v2/conv2d_129/mul" -> "inception_resnet_v2/conv2d_129/Conv2D"; +"inception_resnet_v2/conv2d_129/Conv2D" -> "inception_resnet_v2/batch_normalization_129/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_129/Const" -> "inception_resnet_v2/batch_normalization_129/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_129/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_129/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_129/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_129/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_129/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_129/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_129/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_129/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_129/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_129/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_129/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_129/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_129/FusedBatchNormV3" -> "inception_resnet_v2/activation_129/Relu"; +"inception_resnet_v2/activation_129/Relu" -> "inception_resnet_v2/conv2d_130/Conv2D"; +"inception_resnet_v2/conv2d_130/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_130/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_130/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_130/Sigmoid"; +"inception_resnet_v2/conv2d_130/Sigmoid" -> "inception_resnet_v2/conv2d_130/Round"; +"inception_resnet_v2/conv2d_130/Round" -> "inception_resnet_v2/conv2d_130/mul"; +"inception_resnet_v2/conv2d_130/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_130/ReadVariableOp"; +"inception_resnet_v2/conv2d_130/ReadVariableOp" -> "inception_resnet_v2/conv2d_130/mul"; +"inception_resnet_v2/conv2d_130/mul" -> "inception_resnet_v2/conv2d_130/Conv2D"; +"inception_resnet_v2/conv2d_130/Conv2D" -> "inception_resnet_v2/batch_normalization_130/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_130/Const" -> "inception_resnet_v2/batch_normalization_130/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_130/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_130/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_130/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_130/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_130/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_130/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_130/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_130/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_130/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_130/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_130/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_130/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_130/FusedBatchNormV3" -> "inception_resnet_v2/activation_130/Relu"; +"inception_resnet_v2/activation_130/Relu" -> "inception_resnet_v2/conv2d_131/Conv2D"; +"inception_resnet_v2/conv2d_131/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_131/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_131/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_131/Sigmoid"; +"inception_resnet_v2/conv2d_131/Sigmoid" -> "inception_resnet_v2/conv2d_131/Round"; +"inception_resnet_v2/conv2d_131/Round" -> "inception_resnet_v2/conv2d_131/mul"; +"inception_resnet_v2/conv2d_131/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_131/ReadVariableOp"; +"inception_resnet_v2/conv2d_131/ReadVariableOp" -> "inception_resnet_v2/conv2d_131/mul"; +"inception_resnet_v2/conv2d_131/mul" -> "inception_resnet_v2/conv2d_131/Conv2D"; +"inception_resnet_v2/conv2d_131/Conv2D" -> "inception_resnet_v2/batch_normalization_131/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_128/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_128/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_128/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_128/Sigmoid"; +"inception_resnet_v2/conv2d_128/Sigmoid" -> "inception_resnet_v2/conv2d_128/Round"; +"inception_resnet_v2/conv2d_128/Round" -> "inception_resnet_v2/conv2d_128/mul"; +"inception_resnet_v2/conv2d_128/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_128/ReadVariableOp"; +"inception_resnet_v2/conv2d_128/ReadVariableOp" -> "inception_resnet_v2/conv2d_128/mul"; +"inception_resnet_v2/conv2d_128/mul" -> "inception_resnet_v2/conv2d_128/Conv2D"; +"inception_resnet_v2/conv2d_128/Conv2D" -> "inception_resnet_v2/batch_normalization_128/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_131/Const" -> "inception_resnet_v2/batch_normalization_131/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_131/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_131/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_131/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_131/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_131/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_131/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_131/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_131/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_131/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_131/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_131/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_131/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_131/FusedBatchNormV3" -> "inception_resnet_v2/activation_131/Relu"; +"inception_resnet_v2/batch_normalization_128/Const" -> "inception_resnet_v2/batch_normalization_128/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_128/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_128/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_128/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_128/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_128/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_128/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_128/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_128/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_128/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_128/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_128/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_128/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_128/FusedBatchNormV3" -> "inception_resnet_v2/activation_128/Relu"; +"inception_resnet_v2/activation_128/Relu" -> "inception_resnet_v2/block17_14_mixed/concat"; +"inception_resnet_v2/activation_131/Relu" -> "inception_resnet_v2/block17_14_mixed/concat"; +"inception_resnet_v2/block17_14_mixed/concat/axis" -> "inception_resnet_v2/block17_14_mixed/concat"; +"inception_resnet_v2/block17_14_mixed/concat" -> "inception_resnet_v2/block17_14_conv/Conv2D"; +"inception_resnet_v2/block17_14_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_14_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_14_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_14_conv/Sigmoid"; +"inception_resnet_v2/block17_14_conv/Sigmoid" -> "inception_resnet_v2/block17_14_conv/Round"; +"inception_resnet_v2/block17_14_conv/Round" -> "inception_resnet_v2/block17_14_conv/mul"; +"inception_resnet_v2/block17_14_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_14_conv/ReadVariableOp"; +"inception_resnet_v2/block17_14_conv/ReadVariableOp" -> "inception_resnet_v2/block17_14_conv/mul"; +"inception_resnet_v2/block17_14_conv/mul" -> "inception_resnet_v2/block17_14_conv/Conv2D"; +"inception_resnet_v2/block17_14_conv/Conv2D" -> "inception_resnet_v2/block17_14_conv/BiasAdd"; +"inception_resnet_v2/block17_14_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_14_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_14_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_14_conv/BiasAdd"; +"inception_resnet_v2/block17_14_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_23/mul"; +"inception_resnet_v2/custom_scale_layer_23/mul/y" -> "inception_resnet_v2/custom_scale_layer_23/mul"; +"inception_resnet_v2/custom_scale_layer_23/mul" -> "inception_resnet_v2/custom_scale_layer_23/add"; +"inception_resnet_v2/custom_scale_layer_23/add" -> "inception_resnet_v2/block17_14_ac/Relu"; +"inception_resnet_v2/block17_14_ac/Relu" -> "inception_resnet_v2/conv2d_133/Conv2D"; +"inception_resnet_v2/block17_14_ac/Relu" -> "inception_resnet_v2/conv2d_132/Conv2D"; +"inception_resnet_v2/block17_14_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_24/add"; +"inception_resnet_v2/conv2d_133/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_133/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_133/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_133/Sigmoid"; +"inception_resnet_v2/conv2d_133/Sigmoid" -> "inception_resnet_v2/conv2d_133/Round"; +"inception_resnet_v2/conv2d_133/Round" -> "inception_resnet_v2/conv2d_133/mul"; +"inception_resnet_v2/conv2d_133/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_133/ReadVariableOp"; +"inception_resnet_v2/conv2d_133/ReadVariableOp" -> "inception_resnet_v2/conv2d_133/mul"; +"inception_resnet_v2/conv2d_133/mul" -> "inception_resnet_v2/conv2d_133/Conv2D"; +"inception_resnet_v2/conv2d_133/Conv2D" -> "inception_resnet_v2/batch_normalization_133/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_133/Const" -> "inception_resnet_v2/batch_normalization_133/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_133/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_133/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_133/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_133/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_133/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_133/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_133/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_133/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_133/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_133/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_133/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_133/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_133/FusedBatchNormV3" -> "inception_resnet_v2/activation_133/Relu"; +"inception_resnet_v2/activation_133/Relu" -> "inception_resnet_v2/conv2d_134/Conv2D"; +"inception_resnet_v2/conv2d_134/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_134/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_134/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_134/Sigmoid"; +"inception_resnet_v2/conv2d_134/Sigmoid" -> "inception_resnet_v2/conv2d_134/Round"; +"inception_resnet_v2/conv2d_134/Round" -> "inception_resnet_v2/conv2d_134/mul"; +"inception_resnet_v2/conv2d_134/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_134/ReadVariableOp"; +"inception_resnet_v2/conv2d_134/ReadVariableOp" -> "inception_resnet_v2/conv2d_134/mul"; +"inception_resnet_v2/conv2d_134/mul" -> "inception_resnet_v2/conv2d_134/Conv2D"; +"inception_resnet_v2/conv2d_134/Conv2D" -> "inception_resnet_v2/batch_normalization_134/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_134/Const" -> "inception_resnet_v2/batch_normalization_134/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_134/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_134/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_134/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_134/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_134/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_134/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_134/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_134/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_134/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_134/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_134/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_134/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_134/FusedBatchNormV3" -> "inception_resnet_v2/activation_134/Relu"; +"inception_resnet_v2/activation_134/Relu" -> "inception_resnet_v2/conv2d_135/Conv2D"; +"inception_resnet_v2/conv2d_135/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_135/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_135/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_135/Sigmoid"; +"inception_resnet_v2/conv2d_135/Sigmoid" -> "inception_resnet_v2/conv2d_135/Round"; +"inception_resnet_v2/conv2d_135/Round" -> "inception_resnet_v2/conv2d_135/mul"; +"inception_resnet_v2/conv2d_135/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_135/ReadVariableOp"; +"inception_resnet_v2/conv2d_135/ReadVariableOp" -> "inception_resnet_v2/conv2d_135/mul"; +"inception_resnet_v2/conv2d_135/mul" -> "inception_resnet_v2/conv2d_135/Conv2D"; +"inception_resnet_v2/conv2d_135/Conv2D" -> "inception_resnet_v2/batch_normalization_135/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_132/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_132/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_132/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_132/Sigmoid"; +"inception_resnet_v2/conv2d_132/Sigmoid" -> "inception_resnet_v2/conv2d_132/Round"; +"inception_resnet_v2/conv2d_132/Round" -> "inception_resnet_v2/conv2d_132/mul"; +"inception_resnet_v2/conv2d_132/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_132/ReadVariableOp"; +"inception_resnet_v2/conv2d_132/ReadVariableOp" -> "inception_resnet_v2/conv2d_132/mul"; +"inception_resnet_v2/conv2d_132/mul" -> "inception_resnet_v2/conv2d_132/Conv2D"; +"inception_resnet_v2/conv2d_132/Conv2D" -> "inception_resnet_v2/batch_normalization_132/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_135/Const" -> "inception_resnet_v2/batch_normalization_135/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_135/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_135/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_135/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_135/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_135/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_135/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_135/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_135/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_135/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_135/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_135/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_135/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_135/FusedBatchNormV3" -> "inception_resnet_v2/activation_135/Relu"; +"inception_resnet_v2/batch_normalization_132/Const" -> "inception_resnet_v2/batch_normalization_132/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_132/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_132/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_132/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_132/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_132/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_132/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_132/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_132/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_132/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_132/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_132/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_132/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_132/FusedBatchNormV3" -> "inception_resnet_v2/activation_132/Relu"; +"inception_resnet_v2/activation_132/Relu" -> "inception_resnet_v2/block17_15_mixed/concat"; +"inception_resnet_v2/activation_135/Relu" -> "inception_resnet_v2/block17_15_mixed/concat"; +"inception_resnet_v2/block17_15_mixed/concat/axis" -> "inception_resnet_v2/block17_15_mixed/concat"; +"inception_resnet_v2/block17_15_mixed/concat" -> "inception_resnet_v2/block17_15_conv/Conv2D"; +"inception_resnet_v2/block17_15_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_15_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_15_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_15_conv/Sigmoid"; +"inception_resnet_v2/block17_15_conv/Sigmoid" -> "inception_resnet_v2/block17_15_conv/Round"; +"inception_resnet_v2/block17_15_conv/Round" -> "inception_resnet_v2/block17_15_conv/mul"; +"inception_resnet_v2/block17_15_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_15_conv/ReadVariableOp"; +"inception_resnet_v2/block17_15_conv/ReadVariableOp" -> "inception_resnet_v2/block17_15_conv/mul"; +"inception_resnet_v2/block17_15_conv/mul" -> "inception_resnet_v2/block17_15_conv/Conv2D"; +"inception_resnet_v2/block17_15_conv/Conv2D" -> "inception_resnet_v2/block17_15_conv/BiasAdd"; +"inception_resnet_v2/block17_15_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_15_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_15_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_15_conv/BiasAdd"; +"inception_resnet_v2/block17_15_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_24/mul"; +"inception_resnet_v2/custom_scale_layer_24/mul/y" -> "inception_resnet_v2/custom_scale_layer_24/mul"; +"inception_resnet_v2/custom_scale_layer_24/mul" -> "inception_resnet_v2/custom_scale_layer_24/add"; +"inception_resnet_v2/custom_scale_layer_24/add" -> "inception_resnet_v2/block17_15_ac/Relu"; +"inception_resnet_v2/block17_15_ac/Relu" -> "inception_resnet_v2/conv2d_137/Conv2D"; +"inception_resnet_v2/block17_15_ac/Relu" -> "inception_resnet_v2/conv2d_136/Conv2D"; +"inception_resnet_v2/block17_15_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_25/add"; +"inception_resnet_v2/conv2d_137/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_137/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_137/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_137/Sigmoid"; +"inception_resnet_v2/conv2d_137/Sigmoid" -> "inception_resnet_v2/conv2d_137/Round"; +"inception_resnet_v2/conv2d_137/Round" -> "inception_resnet_v2/conv2d_137/mul"; +"inception_resnet_v2/conv2d_137/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_137/ReadVariableOp"; +"inception_resnet_v2/conv2d_137/ReadVariableOp" -> "inception_resnet_v2/conv2d_137/mul"; +"inception_resnet_v2/conv2d_137/mul" -> "inception_resnet_v2/conv2d_137/Conv2D"; +"inception_resnet_v2/conv2d_137/Conv2D" -> "inception_resnet_v2/batch_normalization_137/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_137/Const" -> "inception_resnet_v2/batch_normalization_137/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_137/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_137/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_137/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_137/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_137/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_137/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_137/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_137/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_137/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_137/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_137/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_137/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_137/FusedBatchNormV3" -> "inception_resnet_v2/activation_137/Relu"; +"inception_resnet_v2/activation_137/Relu" -> "inception_resnet_v2/conv2d_138/Conv2D"; +"inception_resnet_v2/conv2d_138/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_138/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_138/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_138/Sigmoid"; +"inception_resnet_v2/conv2d_138/Sigmoid" -> "inception_resnet_v2/conv2d_138/Round"; +"inception_resnet_v2/conv2d_138/Round" -> "inception_resnet_v2/conv2d_138/mul"; +"inception_resnet_v2/conv2d_138/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_138/ReadVariableOp"; +"inception_resnet_v2/conv2d_138/ReadVariableOp" -> "inception_resnet_v2/conv2d_138/mul"; +"inception_resnet_v2/conv2d_138/mul" -> "inception_resnet_v2/conv2d_138/Conv2D"; +"inception_resnet_v2/conv2d_138/Conv2D" -> "inception_resnet_v2/batch_normalization_138/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_138/Const" -> "inception_resnet_v2/batch_normalization_138/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_138/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_138/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_138/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_138/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_138/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_138/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_138/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_138/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_138/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_138/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_138/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_138/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_138/FusedBatchNormV3" -> "inception_resnet_v2/activation_138/Relu"; +"inception_resnet_v2/activation_138/Relu" -> "inception_resnet_v2/conv2d_139/Conv2D"; +"inception_resnet_v2/conv2d_139/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_139/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_139/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_139/Sigmoid"; +"inception_resnet_v2/conv2d_139/Sigmoid" -> "inception_resnet_v2/conv2d_139/Round"; +"inception_resnet_v2/conv2d_139/Round" -> "inception_resnet_v2/conv2d_139/mul"; +"inception_resnet_v2/conv2d_139/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_139/ReadVariableOp"; +"inception_resnet_v2/conv2d_139/ReadVariableOp" -> "inception_resnet_v2/conv2d_139/mul"; +"inception_resnet_v2/conv2d_139/mul" -> "inception_resnet_v2/conv2d_139/Conv2D"; +"inception_resnet_v2/conv2d_139/Conv2D" -> "inception_resnet_v2/batch_normalization_139/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_136/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_136/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_136/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_136/Sigmoid"; +"inception_resnet_v2/conv2d_136/Sigmoid" -> "inception_resnet_v2/conv2d_136/Round"; +"inception_resnet_v2/conv2d_136/Round" -> "inception_resnet_v2/conv2d_136/mul"; +"inception_resnet_v2/conv2d_136/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_136/ReadVariableOp"; +"inception_resnet_v2/conv2d_136/ReadVariableOp" -> "inception_resnet_v2/conv2d_136/mul"; +"inception_resnet_v2/conv2d_136/mul" -> "inception_resnet_v2/conv2d_136/Conv2D"; +"inception_resnet_v2/conv2d_136/Conv2D" -> "inception_resnet_v2/batch_normalization_136/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_139/Const" -> "inception_resnet_v2/batch_normalization_139/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_139/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_139/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_139/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_139/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_139/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_139/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_139/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_139/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_139/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_139/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_139/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_139/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_139/FusedBatchNormV3" -> "inception_resnet_v2/activation_139/Relu"; +"inception_resnet_v2/batch_normalization_136/Const" -> "inception_resnet_v2/batch_normalization_136/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_136/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_136/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_136/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_136/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_136/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_136/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_136/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_136/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_136/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_136/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_136/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_136/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_136/FusedBatchNormV3" -> "inception_resnet_v2/activation_136/Relu"; +"inception_resnet_v2/activation_136/Relu" -> "inception_resnet_v2/block17_16_mixed/concat"; +"inception_resnet_v2/activation_139/Relu" -> "inception_resnet_v2/block17_16_mixed/concat"; +"inception_resnet_v2/block17_16_mixed/concat/axis" -> "inception_resnet_v2/block17_16_mixed/concat"; +"inception_resnet_v2/block17_16_mixed/concat" -> "inception_resnet_v2/block17_16_conv/Conv2D"; +"inception_resnet_v2/block17_16_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_16_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_16_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_16_conv/Sigmoid"; +"inception_resnet_v2/block17_16_conv/Sigmoid" -> "inception_resnet_v2/block17_16_conv/Round"; +"inception_resnet_v2/block17_16_conv/Round" -> "inception_resnet_v2/block17_16_conv/mul"; +"inception_resnet_v2/block17_16_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_16_conv/ReadVariableOp"; +"inception_resnet_v2/block17_16_conv/ReadVariableOp" -> "inception_resnet_v2/block17_16_conv/mul"; +"inception_resnet_v2/block17_16_conv/mul" -> "inception_resnet_v2/block17_16_conv/Conv2D"; +"inception_resnet_v2/block17_16_conv/Conv2D" -> "inception_resnet_v2/block17_16_conv/BiasAdd"; +"inception_resnet_v2/block17_16_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_16_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_16_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_16_conv/BiasAdd"; +"inception_resnet_v2/block17_16_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_25/mul"; +"inception_resnet_v2/custom_scale_layer_25/mul/y" -> "inception_resnet_v2/custom_scale_layer_25/mul"; +"inception_resnet_v2/custom_scale_layer_25/mul" -> "inception_resnet_v2/custom_scale_layer_25/add"; +"inception_resnet_v2/custom_scale_layer_25/add" -> "inception_resnet_v2/block17_16_ac/Relu"; +"inception_resnet_v2/block17_16_ac/Relu" -> "inception_resnet_v2/conv2d_141/Conv2D"; +"inception_resnet_v2/block17_16_ac/Relu" -> "inception_resnet_v2/conv2d_140/Conv2D"; +"inception_resnet_v2/block17_16_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_26/add"; +"inception_resnet_v2/conv2d_141/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_141/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_141/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_141/Sigmoid"; +"inception_resnet_v2/conv2d_141/Sigmoid" -> "inception_resnet_v2/conv2d_141/Round"; +"inception_resnet_v2/conv2d_141/Round" -> "inception_resnet_v2/conv2d_141/mul"; +"inception_resnet_v2/conv2d_141/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_141/ReadVariableOp"; +"inception_resnet_v2/conv2d_141/ReadVariableOp" -> "inception_resnet_v2/conv2d_141/mul"; +"inception_resnet_v2/conv2d_141/mul" -> "inception_resnet_v2/conv2d_141/Conv2D"; +"inception_resnet_v2/conv2d_141/Conv2D" -> "inception_resnet_v2/batch_normalization_141/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_141/Const" -> "inception_resnet_v2/batch_normalization_141/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_141/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_141/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_141/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_141/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_141/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_141/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_141/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_141/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_141/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_141/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_141/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_141/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_141/FusedBatchNormV3" -> "inception_resnet_v2/activation_141/Relu"; +"inception_resnet_v2/activation_141/Relu" -> "inception_resnet_v2/conv2d_142/Conv2D"; +"inception_resnet_v2/conv2d_142/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_142/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_142/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_142/Sigmoid"; +"inception_resnet_v2/conv2d_142/Sigmoid" -> "inception_resnet_v2/conv2d_142/Round"; +"inception_resnet_v2/conv2d_142/Round" -> "inception_resnet_v2/conv2d_142/mul"; +"inception_resnet_v2/conv2d_142/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_142/ReadVariableOp"; +"inception_resnet_v2/conv2d_142/ReadVariableOp" -> "inception_resnet_v2/conv2d_142/mul"; +"inception_resnet_v2/conv2d_142/mul" -> "inception_resnet_v2/conv2d_142/Conv2D"; +"inception_resnet_v2/conv2d_142/Conv2D" -> "inception_resnet_v2/batch_normalization_142/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_142/Const" -> "inception_resnet_v2/batch_normalization_142/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_142/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_142/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_142/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_142/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_142/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_142/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_142/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_142/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_142/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_142/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_142/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_142/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_142/FusedBatchNormV3" -> "inception_resnet_v2/activation_142/Relu"; +"inception_resnet_v2/activation_142/Relu" -> "inception_resnet_v2/conv2d_143/Conv2D"; +"inception_resnet_v2/conv2d_143/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_143/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_143/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_143/Sigmoid"; +"inception_resnet_v2/conv2d_143/Sigmoid" -> "inception_resnet_v2/conv2d_143/Round"; +"inception_resnet_v2/conv2d_143/Round" -> "inception_resnet_v2/conv2d_143/mul"; +"inception_resnet_v2/conv2d_143/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_143/ReadVariableOp"; +"inception_resnet_v2/conv2d_143/ReadVariableOp" -> "inception_resnet_v2/conv2d_143/mul"; +"inception_resnet_v2/conv2d_143/mul" -> "inception_resnet_v2/conv2d_143/Conv2D"; +"inception_resnet_v2/conv2d_143/Conv2D" -> "inception_resnet_v2/batch_normalization_143/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_140/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_140/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_140/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_140/Sigmoid"; +"inception_resnet_v2/conv2d_140/Sigmoid" -> "inception_resnet_v2/conv2d_140/Round"; +"inception_resnet_v2/conv2d_140/Round" -> "inception_resnet_v2/conv2d_140/mul"; +"inception_resnet_v2/conv2d_140/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_140/ReadVariableOp"; +"inception_resnet_v2/conv2d_140/ReadVariableOp" -> "inception_resnet_v2/conv2d_140/mul"; +"inception_resnet_v2/conv2d_140/mul" -> "inception_resnet_v2/conv2d_140/Conv2D"; +"inception_resnet_v2/conv2d_140/Conv2D" -> "inception_resnet_v2/batch_normalization_140/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_143/Const" -> "inception_resnet_v2/batch_normalization_143/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_143/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_143/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_143/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_143/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_143/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_143/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_143/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_143/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_143/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_143/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_143/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_143/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_143/FusedBatchNormV3" -> "inception_resnet_v2/activation_143/Relu"; +"inception_resnet_v2/batch_normalization_140/Const" -> "inception_resnet_v2/batch_normalization_140/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_140/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_140/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_140/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_140/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_140/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_140/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_140/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_140/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_140/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_140/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_140/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_140/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_140/FusedBatchNormV3" -> "inception_resnet_v2/activation_140/Relu"; +"inception_resnet_v2/activation_140/Relu" -> "inception_resnet_v2/block17_17_mixed/concat"; +"inception_resnet_v2/activation_143/Relu" -> "inception_resnet_v2/block17_17_mixed/concat"; +"inception_resnet_v2/block17_17_mixed/concat/axis" -> "inception_resnet_v2/block17_17_mixed/concat"; +"inception_resnet_v2/block17_17_mixed/concat" -> "inception_resnet_v2/block17_17_conv/Conv2D"; +"inception_resnet_v2/block17_17_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_17_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_17_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_17_conv/Sigmoid"; +"inception_resnet_v2/block17_17_conv/Sigmoid" -> "inception_resnet_v2/block17_17_conv/Round"; +"inception_resnet_v2/block17_17_conv/Round" -> "inception_resnet_v2/block17_17_conv/mul"; +"inception_resnet_v2/block17_17_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_17_conv/ReadVariableOp"; +"inception_resnet_v2/block17_17_conv/ReadVariableOp" -> "inception_resnet_v2/block17_17_conv/mul"; +"inception_resnet_v2/block17_17_conv/mul" -> "inception_resnet_v2/block17_17_conv/Conv2D"; +"inception_resnet_v2/block17_17_conv/Conv2D" -> "inception_resnet_v2/block17_17_conv/BiasAdd"; +"inception_resnet_v2/block17_17_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_17_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_17_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_17_conv/BiasAdd"; +"inception_resnet_v2/block17_17_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_26/mul"; +"inception_resnet_v2/custom_scale_layer_26/mul/y" -> "inception_resnet_v2/custom_scale_layer_26/mul"; +"inception_resnet_v2/custom_scale_layer_26/mul" -> "inception_resnet_v2/custom_scale_layer_26/add"; +"inception_resnet_v2/custom_scale_layer_26/add" -> "inception_resnet_v2/block17_17_ac/Relu"; +"inception_resnet_v2/block17_17_ac/Relu" -> "inception_resnet_v2/conv2d_145/Conv2D"; +"inception_resnet_v2/block17_17_ac/Relu" -> "inception_resnet_v2/conv2d_144/Conv2D"; +"inception_resnet_v2/block17_17_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_27/add"; +"inception_resnet_v2/conv2d_145/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_145/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_145/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_145/Sigmoid"; +"inception_resnet_v2/conv2d_145/Sigmoid" -> "inception_resnet_v2/conv2d_145/Round"; +"inception_resnet_v2/conv2d_145/Round" -> "inception_resnet_v2/conv2d_145/mul"; +"inception_resnet_v2/conv2d_145/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_145/ReadVariableOp"; +"inception_resnet_v2/conv2d_145/ReadVariableOp" -> "inception_resnet_v2/conv2d_145/mul"; +"inception_resnet_v2/conv2d_145/mul" -> "inception_resnet_v2/conv2d_145/Conv2D"; +"inception_resnet_v2/conv2d_145/Conv2D" -> "inception_resnet_v2/batch_normalization_145/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_145/Const" -> "inception_resnet_v2/batch_normalization_145/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_145/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_145/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_145/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_145/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_145/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_145/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_145/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_145/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_145/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_145/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_145/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_145/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_145/FusedBatchNormV3" -> "inception_resnet_v2/activation_145/Relu"; +"inception_resnet_v2/activation_145/Relu" -> "inception_resnet_v2/conv2d_146/Conv2D"; +"inception_resnet_v2/conv2d_146/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_146/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_146/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_146/Sigmoid"; +"inception_resnet_v2/conv2d_146/Sigmoid" -> "inception_resnet_v2/conv2d_146/Round"; +"inception_resnet_v2/conv2d_146/Round" -> "inception_resnet_v2/conv2d_146/mul"; +"inception_resnet_v2/conv2d_146/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_146/ReadVariableOp"; +"inception_resnet_v2/conv2d_146/ReadVariableOp" -> "inception_resnet_v2/conv2d_146/mul"; +"inception_resnet_v2/conv2d_146/mul" -> "inception_resnet_v2/conv2d_146/Conv2D"; +"inception_resnet_v2/conv2d_146/Conv2D" -> "inception_resnet_v2/batch_normalization_146/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_146/Const" -> "inception_resnet_v2/batch_normalization_146/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_146/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_146/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_146/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_146/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_146/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_146/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_146/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_146/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_146/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_146/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_146/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_146/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_146/FusedBatchNormV3" -> "inception_resnet_v2/activation_146/Relu"; +"inception_resnet_v2/activation_146/Relu" -> "inception_resnet_v2/conv2d_147/Conv2D"; +"inception_resnet_v2/conv2d_147/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_147/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_147/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_147/Sigmoid"; +"inception_resnet_v2/conv2d_147/Sigmoid" -> "inception_resnet_v2/conv2d_147/Round"; +"inception_resnet_v2/conv2d_147/Round" -> "inception_resnet_v2/conv2d_147/mul"; +"inception_resnet_v2/conv2d_147/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_147/ReadVariableOp"; +"inception_resnet_v2/conv2d_147/ReadVariableOp" -> "inception_resnet_v2/conv2d_147/mul"; +"inception_resnet_v2/conv2d_147/mul" -> "inception_resnet_v2/conv2d_147/Conv2D"; +"inception_resnet_v2/conv2d_147/Conv2D" -> "inception_resnet_v2/batch_normalization_147/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_144/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_144/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_144/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_144/Sigmoid"; +"inception_resnet_v2/conv2d_144/Sigmoid" -> "inception_resnet_v2/conv2d_144/Round"; +"inception_resnet_v2/conv2d_144/Round" -> "inception_resnet_v2/conv2d_144/mul"; +"inception_resnet_v2/conv2d_144/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_144/ReadVariableOp"; +"inception_resnet_v2/conv2d_144/ReadVariableOp" -> "inception_resnet_v2/conv2d_144/mul"; +"inception_resnet_v2/conv2d_144/mul" -> "inception_resnet_v2/conv2d_144/Conv2D"; +"inception_resnet_v2/conv2d_144/Conv2D" -> "inception_resnet_v2/batch_normalization_144/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_147/Const" -> "inception_resnet_v2/batch_normalization_147/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_147/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_147/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_147/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_147/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_147/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_147/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_147/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_147/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_147/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_147/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_147/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_147/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_147/FusedBatchNormV3" -> "inception_resnet_v2/activation_147/Relu"; +"inception_resnet_v2/batch_normalization_144/Const" -> "inception_resnet_v2/batch_normalization_144/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_144/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_144/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_144/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_144/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_144/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_144/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_144/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_144/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_144/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_144/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_144/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_144/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_144/FusedBatchNormV3" -> "inception_resnet_v2/activation_144/Relu"; +"inception_resnet_v2/activation_144/Relu" -> "inception_resnet_v2/block17_18_mixed/concat"; +"inception_resnet_v2/activation_147/Relu" -> "inception_resnet_v2/block17_18_mixed/concat"; +"inception_resnet_v2/block17_18_mixed/concat/axis" -> "inception_resnet_v2/block17_18_mixed/concat"; +"inception_resnet_v2/block17_18_mixed/concat" -> "inception_resnet_v2/block17_18_conv/Conv2D"; +"inception_resnet_v2/block17_18_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_18_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_18_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_18_conv/Sigmoid"; +"inception_resnet_v2/block17_18_conv/Sigmoid" -> "inception_resnet_v2/block17_18_conv/Round"; +"inception_resnet_v2/block17_18_conv/Round" -> "inception_resnet_v2/block17_18_conv/mul"; +"inception_resnet_v2/block17_18_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_18_conv/ReadVariableOp"; +"inception_resnet_v2/block17_18_conv/ReadVariableOp" -> "inception_resnet_v2/block17_18_conv/mul"; +"inception_resnet_v2/block17_18_conv/mul" -> "inception_resnet_v2/block17_18_conv/Conv2D"; +"inception_resnet_v2/block17_18_conv/Conv2D" -> "inception_resnet_v2/block17_18_conv/BiasAdd"; +"inception_resnet_v2/block17_18_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_18_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_18_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_18_conv/BiasAdd"; +"inception_resnet_v2/block17_18_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_27/mul"; +"inception_resnet_v2/custom_scale_layer_27/mul/y" -> "inception_resnet_v2/custom_scale_layer_27/mul"; +"inception_resnet_v2/custom_scale_layer_27/mul" -> "inception_resnet_v2/custom_scale_layer_27/add"; +"inception_resnet_v2/custom_scale_layer_27/add" -> "inception_resnet_v2/block17_18_ac/Relu"; +"inception_resnet_v2/block17_18_ac/Relu" -> "inception_resnet_v2/conv2d_149/Conv2D"; +"inception_resnet_v2/block17_18_ac/Relu" -> "inception_resnet_v2/conv2d_148/Conv2D"; +"inception_resnet_v2/block17_18_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_28/add"; +"inception_resnet_v2/conv2d_149/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_149/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_149/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_149/Sigmoid"; +"inception_resnet_v2/conv2d_149/Sigmoid" -> "inception_resnet_v2/conv2d_149/Round"; +"inception_resnet_v2/conv2d_149/Round" -> "inception_resnet_v2/conv2d_149/mul"; +"inception_resnet_v2/conv2d_149/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_149/ReadVariableOp"; +"inception_resnet_v2/conv2d_149/ReadVariableOp" -> "inception_resnet_v2/conv2d_149/mul"; +"inception_resnet_v2/conv2d_149/mul" -> "inception_resnet_v2/conv2d_149/Conv2D"; +"inception_resnet_v2/conv2d_149/Conv2D" -> "inception_resnet_v2/batch_normalization_149/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_149/Const" -> "inception_resnet_v2/batch_normalization_149/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_149/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_149/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_149/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_149/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_149/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_149/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_149/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_149/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_149/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_149/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_149/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_149/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_149/FusedBatchNormV3" -> "inception_resnet_v2/activation_149/Relu"; +"inception_resnet_v2/activation_149/Relu" -> "inception_resnet_v2/conv2d_150/Conv2D"; +"inception_resnet_v2/conv2d_150/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_150/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_150/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_150/Sigmoid"; +"inception_resnet_v2/conv2d_150/Sigmoid" -> "inception_resnet_v2/conv2d_150/Round"; +"inception_resnet_v2/conv2d_150/Round" -> "inception_resnet_v2/conv2d_150/mul"; +"inception_resnet_v2/conv2d_150/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_150/ReadVariableOp"; +"inception_resnet_v2/conv2d_150/ReadVariableOp" -> "inception_resnet_v2/conv2d_150/mul"; +"inception_resnet_v2/conv2d_150/mul" -> "inception_resnet_v2/conv2d_150/Conv2D"; +"inception_resnet_v2/conv2d_150/Conv2D" -> "inception_resnet_v2/batch_normalization_150/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_150/Const" -> "inception_resnet_v2/batch_normalization_150/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_150/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_150/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_150/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_150/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_150/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_150/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_150/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_150/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_150/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_150/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_150/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_150/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_150/FusedBatchNormV3" -> "inception_resnet_v2/activation_150/Relu"; +"inception_resnet_v2/activation_150/Relu" -> "inception_resnet_v2/conv2d_151/Conv2D"; +"inception_resnet_v2/conv2d_151/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_151/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_151/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_151/Sigmoid"; +"inception_resnet_v2/conv2d_151/Sigmoid" -> "inception_resnet_v2/conv2d_151/Round"; +"inception_resnet_v2/conv2d_151/Round" -> "inception_resnet_v2/conv2d_151/mul"; +"inception_resnet_v2/conv2d_151/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_151/ReadVariableOp"; +"inception_resnet_v2/conv2d_151/ReadVariableOp" -> "inception_resnet_v2/conv2d_151/mul"; +"inception_resnet_v2/conv2d_151/mul" -> "inception_resnet_v2/conv2d_151/Conv2D"; +"inception_resnet_v2/conv2d_151/Conv2D" -> "inception_resnet_v2/batch_normalization_151/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_148/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_148/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_148/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_148/Sigmoid"; +"inception_resnet_v2/conv2d_148/Sigmoid" -> "inception_resnet_v2/conv2d_148/Round"; +"inception_resnet_v2/conv2d_148/Round" -> "inception_resnet_v2/conv2d_148/mul"; +"inception_resnet_v2/conv2d_148/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_148/ReadVariableOp"; +"inception_resnet_v2/conv2d_148/ReadVariableOp" -> "inception_resnet_v2/conv2d_148/mul"; +"inception_resnet_v2/conv2d_148/mul" -> "inception_resnet_v2/conv2d_148/Conv2D"; +"inception_resnet_v2/conv2d_148/Conv2D" -> "inception_resnet_v2/batch_normalization_148/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_151/Const" -> "inception_resnet_v2/batch_normalization_151/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_151/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_151/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_151/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_151/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_151/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_151/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_151/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_151/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_151/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_151/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_151/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_151/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_151/FusedBatchNormV3" -> "inception_resnet_v2/activation_151/Relu"; +"inception_resnet_v2/batch_normalization_148/Const" -> "inception_resnet_v2/batch_normalization_148/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_148/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_148/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_148/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_148/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_148/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_148/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_148/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_148/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_148/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_148/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_148/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_148/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_148/FusedBatchNormV3" -> "inception_resnet_v2/activation_148/Relu"; +"inception_resnet_v2/activation_148/Relu" -> "inception_resnet_v2/block17_19_mixed/concat"; +"inception_resnet_v2/activation_151/Relu" -> "inception_resnet_v2/block17_19_mixed/concat"; +"inception_resnet_v2/block17_19_mixed/concat/axis" -> "inception_resnet_v2/block17_19_mixed/concat"; +"inception_resnet_v2/block17_19_mixed/concat" -> "inception_resnet_v2/block17_19_conv/Conv2D"; +"inception_resnet_v2/block17_19_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_19_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_19_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_19_conv/Sigmoid"; +"inception_resnet_v2/block17_19_conv/Sigmoid" -> "inception_resnet_v2/block17_19_conv/Round"; +"inception_resnet_v2/block17_19_conv/Round" -> "inception_resnet_v2/block17_19_conv/mul"; +"inception_resnet_v2/block17_19_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_19_conv/ReadVariableOp"; +"inception_resnet_v2/block17_19_conv/ReadVariableOp" -> "inception_resnet_v2/block17_19_conv/mul"; +"inception_resnet_v2/block17_19_conv/mul" -> "inception_resnet_v2/block17_19_conv/Conv2D"; +"inception_resnet_v2/block17_19_conv/Conv2D" -> "inception_resnet_v2/block17_19_conv/BiasAdd"; +"inception_resnet_v2/block17_19_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_19_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_19_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_19_conv/BiasAdd"; +"inception_resnet_v2/block17_19_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_28/mul"; +"inception_resnet_v2/custom_scale_layer_28/mul/y" -> "inception_resnet_v2/custom_scale_layer_28/mul"; +"inception_resnet_v2/custom_scale_layer_28/mul" -> "inception_resnet_v2/custom_scale_layer_28/add"; +"inception_resnet_v2/custom_scale_layer_28/add" -> "inception_resnet_v2/block17_19_ac/Relu"; +"inception_resnet_v2/block17_19_ac/Relu" -> "inception_resnet_v2/conv2d_153/Conv2D"; +"inception_resnet_v2/block17_19_ac/Relu" -> "inception_resnet_v2/conv2d_152/Conv2D"; +"inception_resnet_v2/block17_19_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_29/add"; +"inception_resnet_v2/conv2d_153/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_153/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_153/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_153/Sigmoid"; +"inception_resnet_v2/conv2d_153/Sigmoid" -> "inception_resnet_v2/conv2d_153/Round"; +"inception_resnet_v2/conv2d_153/Round" -> "inception_resnet_v2/conv2d_153/mul"; +"inception_resnet_v2/conv2d_153/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_153/ReadVariableOp"; +"inception_resnet_v2/conv2d_153/ReadVariableOp" -> "inception_resnet_v2/conv2d_153/mul"; +"inception_resnet_v2/conv2d_153/mul" -> "inception_resnet_v2/conv2d_153/Conv2D"; +"inception_resnet_v2/conv2d_153/Conv2D" -> "inception_resnet_v2/batch_normalization_153/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_153/Const" -> "inception_resnet_v2/batch_normalization_153/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_153/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_153/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_153/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_153/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_153/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_153/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_153/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_153/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_153/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_153/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_153/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_153/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_153/FusedBatchNormV3" -> "inception_resnet_v2/activation_153/Relu"; +"inception_resnet_v2/activation_153/Relu" -> "inception_resnet_v2/conv2d_154/Conv2D"; +"inception_resnet_v2/conv2d_154/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_154/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_154/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_154/Sigmoid"; +"inception_resnet_v2/conv2d_154/Sigmoid" -> "inception_resnet_v2/conv2d_154/Round"; +"inception_resnet_v2/conv2d_154/Round" -> "inception_resnet_v2/conv2d_154/mul"; +"inception_resnet_v2/conv2d_154/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_154/ReadVariableOp"; +"inception_resnet_v2/conv2d_154/ReadVariableOp" -> "inception_resnet_v2/conv2d_154/mul"; +"inception_resnet_v2/conv2d_154/mul" -> "inception_resnet_v2/conv2d_154/Conv2D"; +"inception_resnet_v2/conv2d_154/Conv2D" -> "inception_resnet_v2/batch_normalization_154/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_154/Const" -> "inception_resnet_v2/batch_normalization_154/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_154/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_154/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_154/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_154/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_154/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_154/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_154/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_154/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_154/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_154/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_154/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_154/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_154/FusedBatchNormV3" -> "inception_resnet_v2/activation_154/Relu"; +"inception_resnet_v2/activation_154/Relu" -> "inception_resnet_v2/conv2d_155/Conv2D"; +"inception_resnet_v2/conv2d_155/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_155/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_155/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_155/Sigmoid"; +"inception_resnet_v2/conv2d_155/Sigmoid" -> "inception_resnet_v2/conv2d_155/Round"; +"inception_resnet_v2/conv2d_155/Round" -> "inception_resnet_v2/conv2d_155/mul"; +"inception_resnet_v2/conv2d_155/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_155/ReadVariableOp"; +"inception_resnet_v2/conv2d_155/ReadVariableOp" -> "inception_resnet_v2/conv2d_155/mul"; +"inception_resnet_v2/conv2d_155/mul" -> "inception_resnet_v2/conv2d_155/Conv2D"; +"inception_resnet_v2/conv2d_155/Conv2D" -> "inception_resnet_v2/batch_normalization_155/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_152/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_152/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_152/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_152/Sigmoid"; +"inception_resnet_v2/conv2d_152/Sigmoid" -> "inception_resnet_v2/conv2d_152/Round"; +"inception_resnet_v2/conv2d_152/Round" -> "inception_resnet_v2/conv2d_152/mul"; +"inception_resnet_v2/conv2d_152/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_152/ReadVariableOp"; +"inception_resnet_v2/conv2d_152/ReadVariableOp" -> "inception_resnet_v2/conv2d_152/mul"; +"inception_resnet_v2/conv2d_152/mul" -> "inception_resnet_v2/conv2d_152/Conv2D"; +"inception_resnet_v2/conv2d_152/Conv2D" -> "inception_resnet_v2/batch_normalization_152/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_155/Const" -> "inception_resnet_v2/batch_normalization_155/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_155/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_155/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_155/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_155/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_155/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_155/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_155/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_155/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_155/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_155/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_155/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_155/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_155/FusedBatchNormV3" -> "inception_resnet_v2/activation_155/Relu"; +"inception_resnet_v2/batch_normalization_152/Const" -> "inception_resnet_v2/batch_normalization_152/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_152/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_152/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_152/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_152/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_152/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_152/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_152/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_152/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_152/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_152/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_152/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_152/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_152/FusedBatchNormV3" -> "inception_resnet_v2/activation_152/Relu"; +"inception_resnet_v2/activation_152/Relu" -> "inception_resnet_v2/block17_20_mixed/concat"; +"inception_resnet_v2/activation_155/Relu" -> "inception_resnet_v2/block17_20_mixed/concat"; +"inception_resnet_v2/block17_20_mixed/concat/axis" -> "inception_resnet_v2/block17_20_mixed/concat"; +"inception_resnet_v2/block17_20_mixed/concat" -> "inception_resnet_v2/block17_20_conv/Conv2D"; +"inception_resnet_v2/block17_20_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block17_20_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block17_20_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block17_20_conv/Sigmoid"; +"inception_resnet_v2/block17_20_conv/Sigmoid" -> "inception_resnet_v2/block17_20_conv/Round"; +"inception_resnet_v2/block17_20_conv/Round" -> "inception_resnet_v2/block17_20_conv/mul"; +"inception_resnet_v2/block17_20_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block17_20_conv/ReadVariableOp"; +"inception_resnet_v2/block17_20_conv/ReadVariableOp" -> "inception_resnet_v2/block17_20_conv/mul"; +"inception_resnet_v2/block17_20_conv/mul" -> "inception_resnet_v2/block17_20_conv/Conv2D"; +"inception_resnet_v2/block17_20_conv/Conv2D" -> "inception_resnet_v2/block17_20_conv/BiasAdd"; +"inception_resnet_v2/block17_20_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block17_20_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block17_20_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block17_20_conv/BiasAdd"; +"inception_resnet_v2/block17_20_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_29/mul"; +"inception_resnet_v2/custom_scale_layer_29/mul/y" -> "inception_resnet_v2/custom_scale_layer_29/mul"; +"inception_resnet_v2/custom_scale_layer_29/mul" -> "inception_resnet_v2/custom_scale_layer_29/add"; +"inception_resnet_v2/custom_scale_layer_29/add" -> "inception_resnet_v2/block17_20_ac/Relu"; +"inception_resnet_v2/block17_20_ac/Relu" -> "inception_resnet_v2/conv2d_160/Conv2D"; +"inception_resnet_v2/block17_20_ac/Relu" -> "inception_resnet_v2/conv2d_158/Conv2D"; +"inception_resnet_v2/block17_20_ac/Relu" -> "inception_resnet_v2/conv2d_156/Conv2D"; +"inception_resnet_v2/block17_20_ac/Relu" -> "inception_resnet_v2/max_pooling2d_3/MaxPool"; +"inception_resnet_v2/conv2d_160/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_160/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_160/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_160/Sigmoid"; +"inception_resnet_v2/conv2d_160/Sigmoid" -> "inception_resnet_v2/conv2d_160/Round"; +"inception_resnet_v2/conv2d_160/Round" -> "inception_resnet_v2/conv2d_160/mul"; +"inception_resnet_v2/conv2d_160/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_160/ReadVariableOp"; +"inception_resnet_v2/conv2d_160/ReadVariableOp" -> "inception_resnet_v2/conv2d_160/mul"; +"inception_resnet_v2/conv2d_160/mul" -> "inception_resnet_v2/conv2d_160/Conv2D"; +"inception_resnet_v2/conv2d_160/Conv2D" -> "inception_resnet_v2/batch_normalization_160/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_160/Const" -> "inception_resnet_v2/batch_normalization_160/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_160/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_160/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_160/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_160/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_160/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_160/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_160/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_160/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_160/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_160/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_160/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_160/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_160/FusedBatchNormV3" -> "inception_resnet_v2/activation_160/Relu"; +"inception_resnet_v2/activation_160/Relu" -> "inception_resnet_v2/conv2d_161/Conv2D"; +"inception_resnet_v2/conv2d_161/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_161/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_161/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_161/Sigmoid"; +"inception_resnet_v2/conv2d_161/Sigmoid" -> "inception_resnet_v2/conv2d_161/Round"; +"inception_resnet_v2/conv2d_161/Round" -> "inception_resnet_v2/conv2d_161/mul"; +"inception_resnet_v2/conv2d_161/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_161/ReadVariableOp"; +"inception_resnet_v2/conv2d_161/ReadVariableOp" -> "inception_resnet_v2/conv2d_161/mul"; +"inception_resnet_v2/conv2d_161/mul" -> "inception_resnet_v2/conv2d_161/Conv2D"; +"inception_resnet_v2/conv2d_161/Conv2D" -> "inception_resnet_v2/batch_normalization_161/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_158/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_158/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_158/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_158/Sigmoid"; +"inception_resnet_v2/conv2d_158/Sigmoid" -> "inception_resnet_v2/conv2d_158/Round"; +"inception_resnet_v2/conv2d_158/Round" -> "inception_resnet_v2/conv2d_158/mul"; +"inception_resnet_v2/conv2d_158/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_158/ReadVariableOp"; +"inception_resnet_v2/conv2d_158/ReadVariableOp" -> "inception_resnet_v2/conv2d_158/mul"; +"inception_resnet_v2/conv2d_158/mul" -> "inception_resnet_v2/conv2d_158/Conv2D"; +"inception_resnet_v2/conv2d_158/Conv2D" -> "inception_resnet_v2/batch_normalization_158/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_156/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_156/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_156/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_156/Sigmoid"; +"inception_resnet_v2/conv2d_156/Sigmoid" -> "inception_resnet_v2/conv2d_156/Round"; +"inception_resnet_v2/conv2d_156/Round" -> "inception_resnet_v2/conv2d_156/mul"; +"inception_resnet_v2/conv2d_156/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_156/ReadVariableOp"; +"inception_resnet_v2/conv2d_156/ReadVariableOp" -> "inception_resnet_v2/conv2d_156/mul"; +"inception_resnet_v2/conv2d_156/mul" -> "inception_resnet_v2/conv2d_156/Conv2D"; +"inception_resnet_v2/conv2d_156/Conv2D" -> "inception_resnet_v2/batch_normalization_156/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_161/Const" -> "inception_resnet_v2/batch_normalization_161/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_161/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_161/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_161/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_161/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_161/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_161/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_161/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_161/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_161/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_161/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_161/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_161/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_161/FusedBatchNormV3" -> "inception_resnet_v2/activation_161/Relu"; +"inception_resnet_v2/batch_normalization_158/Const" -> "inception_resnet_v2/batch_normalization_158/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_158/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_158/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_158/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_158/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_158/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_158/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_158/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_158/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_158/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_158/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_158/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_158/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_158/FusedBatchNormV3" -> "inception_resnet_v2/activation_158/Relu"; +"inception_resnet_v2/batch_normalization_156/Const" -> "inception_resnet_v2/batch_normalization_156/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_156/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_156/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_156/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_156/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_156/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_156/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_156/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_156/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_156/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_156/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_156/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_156/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_156/FusedBatchNormV3" -> "inception_resnet_v2/activation_156/Relu"; +"inception_resnet_v2/activation_161/Relu" -> "inception_resnet_v2/conv2d_162/Conv2D"; +"inception_resnet_v2/activation_158/Relu" -> "inception_resnet_v2/conv2d_159/Conv2D"; +"inception_resnet_v2/activation_156/Relu" -> "inception_resnet_v2/conv2d_157/Conv2D"; +"inception_resnet_v2/conv2d_162/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_162/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_162/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_162/Sigmoid"; +"inception_resnet_v2/conv2d_162/Sigmoid" -> "inception_resnet_v2/conv2d_162/Round"; +"inception_resnet_v2/conv2d_162/Round" -> "inception_resnet_v2/conv2d_162/mul"; +"inception_resnet_v2/conv2d_162/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_162/ReadVariableOp"; +"inception_resnet_v2/conv2d_162/ReadVariableOp" -> "inception_resnet_v2/conv2d_162/mul"; +"inception_resnet_v2/conv2d_162/mul" -> "inception_resnet_v2/conv2d_162/Conv2D"; +"inception_resnet_v2/conv2d_162/Conv2D" -> "inception_resnet_v2/batch_normalization_162/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_159/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_159/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_159/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_159/Sigmoid"; +"inception_resnet_v2/conv2d_159/Sigmoid" -> "inception_resnet_v2/conv2d_159/Round"; +"inception_resnet_v2/conv2d_159/Round" -> "inception_resnet_v2/conv2d_159/mul"; +"inception_resnet_v2/conv2d_159/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_159/ReadVariableOp"; +"inception_resnet_v2/conv2d_159/ReadVariableOp" -> "inception_resnet_v2/conv2d_159/mul"; +"inception_resnet_v2/conv2d_159/mul" -> "inception_resnet_v2/conv2d_159/Conv2D"; +"inception_resnet_v2/conv2d_159/Conv2D" -> "inception_resnet_v2/batch_normalization_159/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_157/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_157/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_157/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_157/Sigmoid"; +"inception_resnet_v2/conv2d_157/Sigmoid" -> "inception_resnet_v2/conv2d_157/Round"; +"inception_resnet_v2/conv2d_157/Round" -> "inception_resnet_v2/conv2d_157/mul"; +"inception_resnet_v2/conv2d_157/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_157/ReadVariableOp"; +"inception_resnet_v2/conv2d_157/ReadVariableOp" -> "inception_resnet_v2/conv2d_157/mul"; +"inception_resnet_v2/conv2d_157/mul" -> "inception_resnet_v2/conv2d_157/Conv2D"; +"inception_resnet_v2/conv2d_157/Conv2D" -> "inception_resnet_v2/batch_normalization_157/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_162/Const" -> "inception_resnet_v2/batch_normalization_162/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_162/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_162/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_162/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_162/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_162/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_162/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_162/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_162/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_162/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_162/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_162/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_162/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_162/FusedBatchNormV3" -> "inception_resnet_v2/activation_162/Relu"; +"inception_resnet_v2/batch_normalization_159/Const" -> "inception_resnet_v2/batch_normalization_159/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_159/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_159/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_159/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_159/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_159/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_159/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_159/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_159/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_159/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_159/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_159/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_159/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_159/FusedBatchNormV3" -> "inception_resnet_v2/activation_159/Relu"; +"inception_resnet_v2/batch_normalization_157/Const" -> "inception_resnet_v2/batch_normalization_157/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_157/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_157/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_157/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_157/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_157/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_157/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_157/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_157/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_157/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_157/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_157/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_157/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_157/FusedBatchNormV3" -> "inception_resnet_v2/activation_157/Relu"; +"inception_resnet_v2/activation_157/Relu" -> "inception_resnet_v2/mixed_7a/concat"; +"inception_resnet_v2/activation_159/Relu" -> "inception_resnet_v2/mixed_7a/concat"; +"inception_resnet_v2/activation_162/Relu" -> "inception_resnet_v2/mixed_7a/concat"; +"inception_resnet_v2/max_pooling2d_3/MaxPool" -> "inception_resnet_v2/mixed_7a/concat"; +"inception_resnet_v2/mixed_7a/concat/axis" -> "inception_resnet_v2/mixed_7a/concat"; +"inception_resnet_v2/mixed_7a/concat" -> "inception_resnet_v2/conv2d_164/Conv2D"; +"inception_resnet_v2/mixed_7a/concat" -> "inception_resnet_v2/conv2d_163/Conv2D"; +"inception_resnet_v2/mixed_7a/concat" -> "inception_resnet_v2/custom_scale_layer_30/add"; +"inception_resnet_v2/conv2d_164/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_164/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_164/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_164/Sigmoid"; +"inception_resnet_v2/conv2d_164/Sigmoid" -> "inception_resnet_v2/conv2d_164/Round"; +"inception_resnet_v2/conv2d_164/Round" -> "inception_resnet_v2/conv2d_164/mul"; +"inception_resnet_v2/conv2d_164/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_164/ReadVariableOp"; +"inception_resnet_v2/conv2d_164/ReadVariableOp" -> "inception_resnet_v2/conv2d_164/mul"; +"inception_resnet_v2/conv2d_164/mul" -> "inception_resnet_v2/conv2d_164/Conv2D"; +"inception_resnet_v2/conv2d_164/Conv2D" -> "inception_resnet_v2/batch_normalization_164/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_164/Const" -> "inception_resnet_v2/batch_normalization_164/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_164/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_164/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_164/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_164/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_164/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_164/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_164/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_164/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_164/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_164/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_164/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_164/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_164/FusedBatchNormV3" -> "inception_resnet_v2/activation_164/Relu"; +"inception_resnet_v2/activation_164/Relu" -> "inception_resnet_v2/conv2d_165/Conv2D"; +"inception_resnet_v2/conv2d_165/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_165/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_165/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_165/Sigmoid"; +"inception_resnet_v2/conv2d_165/Sigmoid" -> "inception_resnet_v2/conv2d_165/Round"; +"inception_resnet_v2/conv2d_165/Round" -> "inception_resnet_v2/conv2d_165/mul"; +"inception_resnet_v2/conv2d_165/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_165/ReadVariableOp"; +"inception_resnet_v2/conv2d_165/ReadVariableOp" -> "inception_resnet_v2/conv2d_165/mul"; +"inception_resnet_v2/conv2d_165/mul" -> "inception_resnet_v2/conv2d_165/Conv2D"; +"inception_resnet_v2/conv2d_165/Conv2D" -> "inception_resnet_v2/batch_normalization_165/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_165/Const" -> "inception_resnet_v2/batch_normalization_165/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_165/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_165/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_165/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_165/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_165/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_165/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_165/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_165/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_165/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_165/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_165/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_165/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_165/FusedBatchNormV3" -> "inception_resnet_v2/activation_165/Relu"; +"inception_resnet_v2/activation_165/Relu" -> "inception_resnet_v2/conv2d_166/Conv2D"; +"inception_resnet_v2/conv2d_166/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_166/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_166/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_166/Sigmoid"; +"inception_resnet_v2/conv2d_166/Sigmoid" -> "inception_resnet_v2/conv2d_166/Round"; +"inception_resnet_v2/conv2d_166/Round" -> "inception_resnet_v2/conv2d_166/mul"; +"inception_resnet_v2/conv2d_166/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_166/ReadVariableOp"; +"inception_resnet_v2/conv2d_166/ReadVariableOp" -> "inception_resnet_v2/conv2d_166/mul"; +"inception_resnet_v2/conv2d_166/mul" -> "inception_resnet_v2/conv2d_166/Conv2D"; +"inception_resnet_v2/conv2d_166/Conv2D" -> "inception_resnet_v2/batch_normalization_166/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_163/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_163/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_163/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_163/Sigmoid"; +"inception_resnet_v2/conv2d_163/Sigmoid" -> "inception_resnet_v2/conv2d_163/Round"; +"inception_resnet_v2/conv2d_163/Round" -> "inception_resnet_v2/conv2d_163/mul"; +"inception_resnet_v2/conv2d_163/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_163/ReadVariableOp"; +"inception_resnet_v2/conv2d_163/ReadVariableOp" -> "inception_resnet_v2/conv2d_163/mul"; +"inception_resnet_v2/conv2d_163/mul" -> "inception_resnet_v2/conv2d_163/Conv2D"; +"inception_resnet_v2/conv2d_163/Conv2D" -> "inception_resnet_v2/batch_normalization_163/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_166/Const" -> "inception_resnet_v2/batch_normalization_166/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_166/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_166/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_166/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_166/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_166/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_166/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_166/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_166/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_166/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_166/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_166/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_166/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_166/FusedBatchNormV3" -> "inception_resnet_v2/activation_166/Relu"; +"inception_resnet_v2/batch_normalization_163/Const" -> "inception_resnet_v2/batch_normalization_163/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_163/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_163/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_163/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_163/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_163/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_163/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_163/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_163/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_163/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_163/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_163/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_163/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_163/FusedBatchNormV3" -> "inception_resnet_v2/activation_163/Relu"; +"inception_resnet_v2/activation_163/Relu" -> "inception_resnet_v2/block8_1_mixed/concat"; +"inception_resnet_v2/activation_166/Relu" -> "inception_resnet_v2/block8_1_mixed/concat"; +"inception_resnet_v2/block8_1_mixed/concat/axis" -> "inception_resnet_v2/block8_1_mixed/concat"; +"inception_resnet_v2/block8_1_mixed/concat" -> "inception_resnet_v2/block8_1_conv/Conv2D"; +"inception_resnet_v2/block8_1_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block8_1_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block8_1_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block8_1_conv/Sigmoid"; +"inception_resnet_v2/block8_1_conv/Sigmoid" -> "inception_resnet_v2/block8_1_conv/Round"; +"inception_resnet_v2/block8_1_conv/Round" -> "inception_resnet_v2/block8_1_conv/mul"; +"inception_resnet_v2/block8_1_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block8_1_conv/ReadVariableOp"; +"inception_resnet_v2/block8_1_conv/ReadVariableOp" -> "inception_resnet_v2/block8_1_conv/mul"; +"inception_resnet_v2/block8_1_conv/mul" -> "inception_resnet_v2/block8_1_conv/Conv2D"; +"inception_resnet_v2/block8_1_conv/Conv2D" -> "inception_resnet_v2/block8_1_conv/BiasAdd"; +"inception_resnet_v2/block8_1_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block8_1_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block8_1_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block8_1_conv/BiasAdd"; +"inception_resnet_v2/block8_1_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_30/mul"; +"inception_resnet_v2/custom_scale_layer_30/mul/y" -> "inception_resnet_v2/custom_scale_layer_30/mul"; +"inception_resnet_v2/custom_scale_layer_30/mul" -> "inception_resnet_v2/custom_scale_layer_30/add"; +"inception_resnet_v2/custom_scale_layer_30/add" -> "inception_resnet_v2/block8_1_ac/Relu"; +"inception_resnet_v2/block8_1_ac/Relu" -> "inception_resnet_v2/conv2d_168/Conv2D"; +"inception_resnet_v2/block8_1_ac/Relu" -> "inception_resnet_v2/conv2d_167/Conv2D"; +"inception_resnet_v2/block8_1_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_31/add"; +"inception_resnet_v2/conv2d_168/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_168/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_168/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_168/Sigmoid"; +"inception_resnet_v2/conv2d_168/Sigmoid" -> "inception_resnet_v2/conv2d_168/Round"; +"inception_resnet_v2/conv2d_168/Round" -> "inception_resnet_v2/conv2d_168/mul"; +"inception_resnet_v2/conv2d_168/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_168/ReadVariableOp"; +"inception_resnet_v2/conv2d_168/ReadVariableOp" -> "inception_resnet_v2/conv2d_168/mul"; +"inception_resnet_v2/conv2d_168/mul" -> "inception_resnet_v2/conv2d_168/Conv2D"; +"inception_resnet_v2/conv2d_168/Conv2D" -> "inception_resnet_v2/batch_normalization_168/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_168/Const" -> "inception_resnet_v2/batch_normalization_168/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_168/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_168/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_168/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_168/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_168/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_168/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_168/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_168/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_168/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_168/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_168/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_168/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_168/FusedBatchNormV3" -> "inception_resnet_v2/activation_168/Relu"; +"inception_resnet_v2/activation_168/Relu" -> "inception_resnet_v2/conv2d_169/Conv2D"; +"inception_resnet_v2/conv2d_169/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_169/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_169/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_169/Sigmoid"; +"inception_resnet_v2/conv2d_169/Sigmoid" -> "inception_resnet_v2/conv2d_169/Round"; +"inception_resnet_v2/conv2d_169/Round" -> "inception_resnet_v2/conv2d_169/mul"; +"inception_resnet_v2/conv2d_169/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_169/ReadVariableOp"; +"inception_resnet_v2/conv2d_169/ReadVariableOp" -> "inception_resnet_v2/conv2d_169/mul"; +"inception_resnet_v2/conv2d_169/mul" -> "inception_resnet_v2/conv2d_169/Conv2D"; +"inception_resnet_v2/conv2d_169/Conv2D" -> "inception_resnet_v2/batch_normalization_169/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_169/Const" -> "inception_resnet_v2/batch_normalization_169/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_169/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_169/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_169/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_169/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_169/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_169/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_169/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_169/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_169/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_169/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_169/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_169/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_169/FusedBatchNormV3" -> "inception_resnet_v2/activation_169/Relu"; +"inception_resnet_v2/activation_169/Relu" -> "inception_resnet_v2/conv2d_170/Conv2D"; +"inception_resnet_v2/conv2d_170/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_170/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_170/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_170/Sigmoid"; +"inception_resnet_v2/conv2d_170/Sigmoid" -> "inception_resnet_v2/conv2d_170/Round"; +"inception_resnet_v2/conv2d_170/Round" -> "inception_resnet_v2/conv2d_170/mul"; +"inception_resnet_v2/conv2d_170/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_170/ReadVariableOp"; +"inception_resnet_v2/conv2d_170/ReadVariableOp" -> "inception_resnet_v2/conv2d_170/mul"; +"inception_resnet_v2/conv2d_170/mul" -> "inception_resnet_v2/conv2d_170/Conv2D"; +"inception_resnet_v2/conv2d_170/Conv2D" -> "inception_resnet_v2/batch_normalization_170/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_167/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_167/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_167/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_167/Sigmoid"; +"inception_resnet_v2/conv2d_167/Sigmoid" -> "inception_resnet_v2/conv2d_167/Round"; +"inception_resnet_v2/conv2d_167/Round" -> "inception_resnet_v2/conv2d_167/mul"; +"inception_resnet_v2/conv2d_167/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_167/ReadVariableOp"; +"inception_resnet_v2/conv2d_167/ReadVariableOp" -> "inception_resnet_v2/conv2d_167/mul"; +"inception_resnet_v2/conv2d_167/mul" -> "inception_resnet_v2/conv2d_167/Conv2D"; +"inception_resnet_v2/conv2d_167/Conv2D" -> "inception_resnet_v2/batch_normalization_167/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_170/Const" -> "inception_resnet_v2/batch_normalization_170/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_170/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_170/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_170/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_170/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_170/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_170/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_170/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_170/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_170/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_170/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_170/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_170/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_170/FusedBatchNormV3" -> "inception_resnet_v2/activation_170/Relu"; +"inception_resnet_v2/batch_normalization_167/Const" -> "inception_resnet_v2/batch_normalization_167/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_167/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_167/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_167/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_167/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_167/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_167/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_167/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_167/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_167/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_167/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_167/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_167/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_167/FusedBatchNormV3" -> "inception_resnet_v2/activation_167/Relu"; +"inception_resnet_v2/activation_167/Relu" -> "inception_resnet_v2/block8_2_mixed/concat"; +"inception_resnet_v2/activation_170/Relu" -> "inception_resnet_v2/block8_2_mixed/concat"; +"inception_resnet_v2/block8_2_mixed/concat/axis" -> "inception_resnet_v2/block8_2_mixed/concat"; +"inception_resnet_v2/block8_2_mixed/concat" -> "inception_resnet_v2/block8_2_conv/Conv2D"; +"inception_resnet_v2/block8_2_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block8_2_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block8_2_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block8_2_conv/Sigmoid"; +"inception_resnet_v2/block8_2_conv/Sigmoid" -> "inception_resnet_v2/block8_2_conv/Round"; +"inception_resnet_v2/block8_2_conv/Round" -> "inception_resnet_v2/block8_2_conv/mul"; +"inception_resnet_v2/block8_2_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block8_2_conv/ReadVariableOp"; +"inception_resnet_v2/block8_2_conv/ReadVariableOp" -> "inception_resnet_v2/block8_2_conv/mul"; +"inception_resnet_v2/block8_2_conv/mul" -> "inception_resnet_v2/block8_2_conv/Conv2D"; +"inception_resnet_v2/block8_2_conv/Conv2D" -> "inception_resnet_v2/block8_2_conv/BiasAdd"; +"inception_resnet_v2/block8_2_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block8_2_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block8_2_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block8_2_conv/BiasAdd"; +"inception_resnet_v2/block8_2_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_31/mul"; +"inception_resnet_v2/custom_scale_layer_31/mul/y" -> "inception_resnet_v2/custom_scale_layer_31/mul"; +"inception_resnet_v2/custom_scale_layer_31/mul" -> "inception_resnet_v2/custom_scale_layer_31/add"; +"inception_resnet_v2/custom_scale_layer_31/add" -> "inception_resnet_v2/block8_2_ac/Relu"; +"inception_resnet_v2/block8_2_ac/Relu" -> "inception_resnet_v2/conv2d_172/Conv2D"; +"inception_resnet_v2/block8_2_ac/Relu" -> "inception_resnet_v2/conv2d_171/Conv2D"; +"inception_resnet_v2/block8_2_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_32/add"; +"inception_resnet_v2/conv2d_172/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_172/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_172/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_172/Sigmoid"; +"inception_resnet_v2/conv2d_172/Sigmoid" -> "inception_resnet_v2/conv2d_172/Round"; +"inception_resnet_v2/conv2d_172/Round" -> "inception_resnet_v2/conv2d_172/mul"; +"inception_resnet_v2/conv2d_172/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_172/ReadVariableOp"; +"inception_resnet_v2/conv2d_172/ReadVariableOp" -> "inception_resnet_v2/conv2d_172/mul"; +"inception_resnet_v2/conv2d_172/mul" -> "inception_resnet_v2/conv2d_172/Conv2D"; +"inception_resnet_v2/conv2d_172/Conv2D" -> "inception_resnet_v2/batch_normalization_172/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_172/Const" -> "inception_resnet_v2/batch_normalization_172/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_172/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_172/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_172/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_172/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_172/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_172/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_172/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_172/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_172/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_172/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_172/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_172/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_172/FusedBatchNormV3" -> "inception_resnet_v2/activation_172/Relu"; +"inception_resnet_v2/activation_172/Relu" -> "inception_resnet_v2/conv2d_173/Conv2D"; +"inception_resnet_v2/conv2d_173/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_173/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_173/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_173/Sigmoid"; +"inception_resnet_v2/conv2d_173/Sigmoid" -> "inception_resnet_v2/conv2d_173/Round"; +"inception_resnet_v2/conv2d_173/Round" -> "inception_resnet_v2/conv2d_173/mul"; +"inception_resnet_v2/conv2d_173/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_173/ReadVariableOp"; +"inception_resnet_v2/conv2d_173/ReadVariableOp" -> "inception_resnet_v2/conv2d_173/mul"; +"inception_resnet_v2/conv2d_173/mul" -> "inception_resnet_v2/conv2d_173/Conv2D"; +"inception_resnet_v2/conv2d_173/Conv2D" -> "inception_resnet_v2/batch_normalization_173/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_173/Const" -> "inception_resnet_v2/batch_normalization_173/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_173/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_173/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_173/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_173/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_173/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_173/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_173/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_173/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_173/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_173/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_173/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_173/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_173/FusedBatchNormV3" -> "inception_resnet_v2/activation_173/Relu"; +"inception_resnet_v2/activation_173/Relu" -> "inception_resnet_v2/conv2d_174/Conv2D"; +"inception_resnet_v2/conv2d_174/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_174/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_174/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_174/Sigmoid"; +"inception_resnet_v2/conv2d_174/Sigmoid" -> "inception_resnet_v2/conv2d_174/Round"; +"inception_resnet_v2/conv2d_174/Round" -> "inception_resnet_v2/conv2d_174/mul"; +"inception_resnet_v2/conv2d_174/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_174/ReadVariableOp"; +"inception_resnet_v2/conv2d_174/ReadVariableOp" -> "inception_resnet_v2/conv2d_174/mul"; +"inception_resnet_v2/conv2d_174/mul" -> "inception_resnet_v2/conv2d_174/Conv2D"; +"inception_resnet_v2/conv2d_174/Conv2D" -> "inception_resnet_v2/batch_normalization_174/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_171/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_171/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_171/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_171/Sigmoid"; +"inception_resnet_v2/conv2d_171/Sigmoid" -> "inception_resnet_v2/conv2d_171/Round"; +"inception_resnet_v2/conv2d_171/Round" -> "inception_resnet_v2/conv2d_171/mul"; +"inception_resnet_v2/conv2d_171/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_171/ReadVariableOp"; +"inception_resnet_v2/conv2d_171/ReadVariableOp" -> "inception_resnet_v2/conv2d_171/mul"; +"inception_resnet_v2/conv2d_171/mul" -> "inception_resnet_v2/conv2d_171/Conv2D"; +"inception_resnet_v2/conv2d_171/Conv2D" -> "inception_resnet_v2/batch_normalization_171/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_174/Const" -> "inception_resnet_v2/batch_normalization_174/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_174/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_174/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_174/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_174/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_174/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_174/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_174/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_174/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_174/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_174/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_174/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_174/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_174/FusedBatchNormV3" -> "inception_resnet_v2/activation_174/Relu"; +"inception_resnet_v2/batch_normalization_171/Const" -> "inception_resnet_v2/batch_normalization_171/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_171/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_171/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_171/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_171/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_171/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_171/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_171/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_171/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_171/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_171/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_171/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_171/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_171/FusedBatchNormV3" -> "inception_resnet_v2/activation_171/Relu"; +"inception_resnet_v2/activation_171/Relu" -> "inception_resnet_v2/block8_3_mixed/concat"; +"inception_resnet_v2/activation_174/Relu" -> "inception_resnet_v2/block8_3_mixed/concat"; +"inception_resnet_v2/block8_3_mixed/concat/axis" -> "inception_resnet_v2/block8_3_mixed/concat"; +"inception_resnet_v2/block8_3_mixed/concat" -> "inception_resnet_v2/block8_3_conv/Conv2D"; +"inception_resnet_v2/block8_3_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block8_3_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block8_3_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block8_3_conv/Sigmoid"; +"inception_resnet_v2/block8_3_conv/Sigmoid" -> "inception_resnet_v2/block8_3_conv/Round"; +"inception_resnet_v2/block8_3_conv/Round" -> "inception_resnet_v2/block8_3_conv/mul"; +"inception_resnet_v2/block8_3_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block8_3_conv/ReadVariableOp"; +"inception_resnet_v2/block8_3_conv/ReadVariableOp" -> "inception_resnet_v2/block8_3_conv/mul"; +"inception_resnet_v2/block8_3_conv/mul" -> "inception_resnet_v2/block8_3_conv/Conv2D"; +"inception_resnet_v2/block8_3_conv/Conv2D" -> "inception_resnet_v2/block8_3_conv/BiasAdd"; +"inception_resnet_v2/block8_3_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block8_3_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block8_3_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block8_3_conv/BiasAdd"; +"inception_resnet_v2/block8_3_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_32/mul"; +"inception_resnet_v2/custom_scale_layer_32/mul/y" -> "inception_resnet_v2/custom_scale_layer_32/mul"; +"inception_resnet_v2/custom_scale_layer_32/mul" -> "inception_resnet_v2/custom_scale_layer_32/add"; +"inception_resnet_v2/custom_scale_layer_32/add" -> "inception_resnet_v2/block8_3_ac/Relu"; +"inception_resnet_v2/block8_3_ac/Relu" -> "inception_resnet_v2/conv2d_176/Conv2D"; +"inception_resnet_v2/block8_3_ac/Relu" -> "inception_resnet_v2/conv2d_175/Conv2D"; +"inception_resnet_v2/block8_3_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_33/add"; +"inception_resnet_v2/conv2d_176/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_176/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_176/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_176/Sigmoid"; +"inception_resnet_v2/conv2d_176/Sigmoid" -> "inception_resnet_v2/conv2d_176/Round"; +"inception_resnet_v2/conv2d_176/Round" -> "inception_resnet_v2/conv2d_176/mul"; +"inception_resnet_v2/conv2d_176/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_176/ReadVariableOp"; +"inception_resnet_v2/conv2d_176/ReadVariableOp" -> "inception_resnet_v2/conv2d_176/mul"; +"inception_resnet_v2/conv2d_176/mul" -> "inception_resnet_v2/conv2d_176/Conv2D"; +"inception_resnet_v2/conv2d_176/Conv2D" -> "inception_resnet_v2/batch_normalization_176/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_176/Const" -> "inception_resnet_v2/batch_normalization_176/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_176/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_176/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_176/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_176/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_176/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_176/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_176/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_176/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_176/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_176/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_176/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_176/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_176/FusedBatchNormV3" -> "inception_resnet_v2/activation_176/Relu"; +"inception_resnet_v2/activation_176/Relu" -> "inception_resnet_v2/conv2d_177/Conv2D"; +"inception_resnet_v2/conv2d_177/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_177/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_177/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_177/Sigmoid"; +"inception_resnet_v2/conv2d_177/Sigmoid" -> "inception_resnet_v2/conv2d_177/Round"; +"inception_resnet_v2/conv2d_177/Round" -> "inception_resnet_v2/conv2d_177/mul"; +"inception_resnet_v2/conv2d_177/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_177/ReadVariableOp"; +"inception_resnet_v2/conv2d_177/ReadVariableOp" -> "inception_resnet_v2/conv2d_177/mul"; +"inception_resnet_v2/conv2d_177/mul" -> "inception_resnet_v2/conv2d_177/Conv2D"; +"inception_resnet_v2/conv2d_177/Conv2D" -> "inception_resnet_v2/batch_normalization_177/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_177/Const" -> "inception_resnet_v2/batch_normalization_177/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_177/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_177/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_177/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_177/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_177/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_177/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_177/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_177/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_177/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_177/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_177/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_177/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_177/FusedBatchNormV3" -> "inception_resnet_v2/activation_177/Relu"; +"inception_resnet_v2/activation_177/Relu" -> "inception_resnet_v2/conv2d_178/Conv2D"; +"inception_resnet_v2/conv2d_178/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_178/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_178/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_178/Sigmoid"; +"inception_resnet_v2/conv2d_178/Sigmoid" -> "inception_resnet_v2/conv2d_178/Round"; +"inception_resnet_v2/conv2d_178/Round" -> "inception_resnet_v2/conv2d_178/mul"; +"inception_resnet_v2/conv2d_178/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_178/ReadVariableOp"; +"inception_resnet_v2/conv2d_178/ReadVariableOp" -> "inception_resnet_v2/conv2d_178/mul"; +"inception_resnet_v2/conv2d_178/mul" -> "inception_resnet_v2/conv2d_178/Conv2D"; +"inception_resnet_v2/conv2d_178/Conv2D" -> "inception_resnet_v2/batch_normalization_178/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_175/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_175/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_175/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_175/Sigmoid"; +"inception_resnet_v2/conv2d_175/Sigmoid" -> "inception_resnet_v2/conv2d_175/Round"; +"inception_resnet_v2/conv2d_175/Round" -> "inception_resnet_v2/conv2d_175/mul"; +"inception_resnet_v2/conv2d_175/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_175/ReadVariableOp"; +"inception_resnet_v2/conv2d_175/ReadVariableOp" -> "inception_resnet_v2/conv2d_175/mul"; +"inception_resnet_v2/conv2d_175/mul" -> "inception_resnet_v2/conv2d_175/Conv2D"; +"inception_resnet_v2/conv2d_175/Conv2D" -> "inception_resnet_v2/batch_normalization_175/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_178/Const" -> "inception_resnet_v2/batch_normalization_178/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_178/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_178/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_178/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_178/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_178/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_178/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_178/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_178/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_178/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_178/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_178/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_178/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_178/FusedBatchNormV3" -> "inception_resnet_v2/activation_178/Relu"; +"inception_resnet_v2/batch_normalization_175/Const" -> "inception_resnet_v2/batch_normalization_175/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_175/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_175/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_175/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_175/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_175/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_175/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_175/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_175/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_175/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_175/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_175/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_175/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_175/FusedBatchNormV3" -> "inception_resnet_v2/activation_175/Relu"; +"inception_resnet_v2/activation_175/Relu" -> "inception_resnet_v2/block8_4_mixed/concat"; +"inception_resnet_v2/activation_178/Relu" -> "inception_resnet_v2/block8_4_mixed/concat"; +"inception_resnet_v2/block8_4_mixed/concat/axis" -> "inception_resnet_v2/block8_4_mixed/concat"; +"inception_resnet_v2/block8_4_mixed/concat" -> "inception_resnet_v2/block8_4_conv/Conv2D"; +"inception_resnet_v2/block8_4_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block8_4_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block8_4_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block8_4_conv/Sigmoid"; +"inception_resnet_v2/block8_4_conv/Sigmoid" -> "inception_resnet_v2/block8_4_conv/Round"; +"inception_resnet_v2/block8_4_conv/Round" -> "inception_resnet_v2/block8_4_conv/mul"; +"inception_resnet_v2/block8_4_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block8_4_conv/ReadVariableOp"; +"inception_resnet_v2/block8_4_conv/ReadVariableOp" -> "inception_resnet_v2/block8_4_conv/mul"; +"inception_resnet_v2/block8_4_conv/mul" -> "inception_resnet_v2/block8_4_conv/Conv2D"; +"inception_resnet_v2/block8_4_conv/Conv2D" -> "inception_resnet_v2/block8_4_conv/BiasAdd"; +"inception_resnet_v2/block8_4_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block8_4_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block8_4_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block8_4_conv/BiasAdd"; +"inception_resnet_v2/block8_4_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_33/mul"; +"inception_resnet_v2/custom_scale_layer_33/mul/y" -> "inception_resnet_v2/custom_scale_layer_33/mul"; +"inception_resnet_v2/custom_scale_layer_33/mul" -> "inception_resnet_v2/custom_scale_layer_33/add"; +"inception_resnet_v2/custom_scale_layer_33/add" -> "inception_resnet_v2/block8_4_ac/Relu"; +"inception_resnet_v2/block8_4_ac/Relu" -> "inception_resnet_v2/conv2d_180/Conv2D"; +"inception_resnet_v2/block8_4_ac/Relu" -> "inception_resnet_v2/conv2d_179/Conv2D"; +"inception_resnet_v2/block8_4_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_34/add"; +"inception_resnet_v2/conv2d_180/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_180/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_180/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_180/Sigmoid"; +"inception_resnet_v2/conv2d_180/Sigmoid" -> "inception_resnet_v2/conv2d_180/Round"; +"inception_resnet_v2/conv2d_180/Round" -> "inception_resnet_v2/conv2d_180/mul"; +"inception_resnet_v2/conv2d_180/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_180/ReadVariableOp"; +"inception_resnet_v2/conv2d_180/ReadVariableOp" -> "inception_resnet_v2/conv2d_180/mul"; +"inception_resnet_v2/conv2d_180/mul" -> "inception_resnet_v2/conv2d_180/Conv2D"; +"inception_resnet_v2/conv2d_180/Conv2D" -> "inception_resnet_v2/batch_normalization_180/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_180/Const" -> "inception_resnet_v2/batch_normalization_180/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_180/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_180/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_180/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_180/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_180/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_180/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_180/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_180/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_180/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_180/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_180/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_180/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_180/FusedBatchNormV3" -> "inception_resnet_v2/activation_180/Relu"; +"inception_resnet_v2/activation_180/Relu" -> "inception_resnet_v2/conv2d_181/Conv2D"; +"inception_resnet_v2/conv2d_181/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_181/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_181/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_181/Sigmoid"; +"inception_resnet_v2/conv2d_181/Sigmoid" -> "inception_resnet_v2/conv2d_181/Round"; +"inception_resnet_v2/conv2d_181/Round" -> "inception_resnet_v2/conv2d_181/mul"; +"inception_resnet_v2/conv2d_181/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_181/ReadVariableOp"; +"inception_resnet_v2/conv2d_181/ReadVariableOp" -> "inception_resnet_v2/conv2d_181/mul"; +"inception_resnet_v2/conv2d_181/mul" -> "inception_resnet_v2/conv2d_181/Conv2D"; +"inception_resnet_v2/conv2d_181/Conv2D" -> "inception_resnet_v2/batch_normalization_181/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_181/Const" -> "inception_resnet_v2/batch_normalization_181/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_181/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_181/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_181/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_181/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_181/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_181/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_181/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_181/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_181/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_181/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_181/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_181/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_181/FusedBatchNormV3" -> "inception_resnet_v2/activation_181/Relu"; +"inception_resnet_v2/activation_181/Relu" -> "inception_resnet_v2/conv2d_182/Conv2D"; +"inception_resnet_v2/conv2d_182/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_182/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_182/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_182/Sigmoid"; +"inception_resnet_v2/conv2d_182/Sigmoid" -> "inception_resnet_v2/conv2d_182/Round"; +"inception_resnet_v2/conv2d_182/Round" -> "inception_resnet_v2/conv2d_182/mul"; +"inception_resnet_v2/conv2d_182/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_182/ReadVariableOp"; +"inception_resnet_v2/conv2d_182/ReadVariableOp" -> "inception_resnet_v2/conv2d_182/mul"; +"inception_resnet_v2/conv2d_182/mul" -> "inception_resnet_v2/conv2d_182/Conv2D"; +"inception_resnet_v2/conv2d_182/Conv2D" -> "inception_resnet_v2/batch_normalization_182/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_179/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_179/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_179/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_179/Sigmoid"; +"inception_resnet_v2/conv2d_179/Sigmoid" -> "inception_resnet_v2/conv2d_179/Round"; +"inception_resnet_v2/conv2d_179/Round" -> "inception_resnet_v2/conv2d_179/mul"; +"inception_resnet_v2/conv2d_179/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_179/ReadVariableOp"; +"inception_resnet_v2/conv2d_179/ReadVariableOp" -> "inception_resnet_v2/conv2d_179/mul"; +"inception_resnet_v2/conv2d_179/mul" -> "inception_resnet_v2/conv2d_179/Conv2D"; +"inception_resnet_v2/conv2d_179/Conv2D" -> "inception_resnet_v2/batch_normalization_179/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_182/Const" -> "inception_resnet_v2/batch_normalization_182/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_182/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_182/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_182/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_182/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_182/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_182/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_182/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_182/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_182/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_182/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_182/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_182/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_182/FusedBatchNormV3" -> "inception_resnet_v2/activation_182/Relu"; +"inception_resnet_v2/batch_normalization_179/Const" -> "inception_resnet_v2/batch_normalization_179/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_179/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_179/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_179/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_179/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_179/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_179/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_179/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_179/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_179/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_179/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_179/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_179/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_179/FusedBatchNormV3" -> "inception_resnet_v2/activation_179/Relu"; +"inception_resnet_v2/activation_179/Relu" -> "inception_resnet_v2/block8_5_mixed/concat"; +"inception_resnet_v2/activation_182/Relu" -> "inception_resnet_v2/block8_5_mixed/concat"; +"inception_resnet_v2/block8_5_mixed/concat/axis" -> "inception_resnet_v2/block8_5_mixed/concat"; +"inception_resnet_v2/block8_5_mixed/concat" -> "inception_resnet_v2/block8_5_conv/Conv2D"; +"inception_resnet_v2/block8_5_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block8_5_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block8_5_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block8_5_conv/Sigmoid"; +"inception_resnet_v2/block8_5_conv/Sigmoid" -> "inception_resnet_v2/block8_5_conv/Round"; +"inception_resnet_v2/block8_5_conv/Round" -> "inception_resnet_v2/block8_5_conv/mul"; +"inception_resnet_v2/block8_5_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block8_5_conv/ReadVariableOp"; +"inception_resnet_v2/block8_5_conv/ReadVariableOp" -> "inception_resnet_v2/block8_5_conv/mul"; +"inception_resnet_v2/block8_5_conv/mul" -> "inception_resnet_v2/block8_5_conv/Conv2D"; +"inception_resnet_v2/block8_5_conv/Conv2D" -> "inception_resnet_v2/block8_5_conv/BiasAdd"; +"inception_resnet_v2/block8_5_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block8_5_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block8_5_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block8_5_conv/BiasAdd"; +"inception_resnet_v2/block8_5_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_34/mul"; +"inception_resnet_v2/custom_scale_layer_34/mul/y" -> "inception_resnet_v2/custom_scale_layer_34/mul"; +"inception_resnet_v2/custom_scale_layer_34/mul" -> "inception_resnet_v2/custom_scale_layer_34/add"; +"inception_resnet_v2/custom_scale_layer_34/add" -> "inception_resnet_v2/block8_5_ac/Relu"; +"inception_resnet_v2/block8_5_ac/Relu" -> "inception_resnet_v2/conv2d_184/Conv2D"; +"inception_resnet_v2/block8_5_ac/Relu" -> "inception_resnet_v2/conv2d_183/Conv2D"; +"inception_resnet_v2/block8_5_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_35/add"; +"inception_resnet_v2/conv2d_184/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_184/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_184/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_184/Sigmoid"; +"inception_resnet_v2/conv2d_184/Sigmoid" -> "inception_resnet_v2/conv2d_184/Round"; +"inception_resnet_v2/conv2d_184/Round" -> "inception_resnet_v2/conv2d_184/mul"; +"inception_resnet_v2/conv2d_184/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_184/ReadVariableOp"; +"inception_resnet_v2/conv2d_184/ReadVariableOp" -> "inception_resnet_v2/conv2d_184/mul"; +"inception_resnet_v2/conv2d_184/mul" -> "inception_resnet_v2/conv2d_184/Conv2D"; +"inception_resnet_v2/conv2d_184/Conv2D" -> "inception_resnet_v2/batch_normalization_184/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_184/Const" -> "inception_resnet_v2/batch_normalization_184/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_184/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_184/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_184/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_184/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_184/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_184/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_184/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_184/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_184/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_184/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_184/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_184/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_184/FusedBatchNormV3" -> "inception_resnet_v2/activation_184/Relu"; +"inception_resnet_v2/activation_184/Relu" -> "inception_resnet_v2/conv2d_185/Conv2D"; +"inception_resnet_v2/conv2d_185/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_185/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_185/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_185/Sigmoid"; +"inception_resnet_v2/conv2d_185/Sigmoid" -> "inception_resnet_v2/conv2d_185/Round"; +"inception_resnet_v2/conv2d_185/Round" -> "inception_resnet_v2/conv2d_185/mul"; +"inception_resnet_v2/conv2d_185/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_185/ReadVariableOp"; +"inception_resnet_v2/conv2d_185/ReadVariableOp" -> "inception_resnet_v2/conv2d_185/mul"; +"inception_resnet_v2/conv2d_185/mul" -> "inception_resnet_v2/conv2d_185/Conv2D"; +"inception_resnet_v2/conv2d_185/Conv2D" -> "inception_resnet_v2/batch_normalization_185/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_185/Const" -> "inception_resnet_v2/batch_normalization_185/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_185/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_185/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_185/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_185/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_185/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_185/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_185/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_185/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_185/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_185/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_185/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_185/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_185/FusedBatchNormV3" -> "inception_resnet_v2/activation_185/Relu"; +"inception_resnet_v2/activation_185/Relu" -> "inception_resnet_v2/conv2d_186/Conv2D"; +"inception_resnet_v2/conv2d_186/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_186/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_186/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_186/Sigmoid"; +"inception_resnet_v2/conv2d_186/Sigmoid" -> "inception_resnet_v2/conv2d_186/Round"; +"inception_resnet_v2/conv2d_186/Round" -> "inception_resnet_v2/conv2d_186/mul"; +"inception_resnet_v2/conv2d_186/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_186/ReadVariableOp"; +"inception_resnet_v2/conv2d_186/ReadVariableOp" -> "inception_resnet_v2/conv2d_186/mul"; +"inception_resnet_v2/conv2d_186/mul" -> "inception_resnet_v2/conv2d_186/Conv2D"; +"inception_resnet_v2/conv2d_186/Conv2D" -> "inception_resnet_v2/batch_normalization_186/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_183/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_183/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_183/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_183/Sigmoid"; +"inception_resnet_v2/conv2d_183/Sigmoid" -> "inception_resnet_v2/conv2d_183/Round"; +"inception_resnet_v2/conv2d_183/Round" -> "inception_resnet_v2/conv2d_183/mul"; +"inception_resnet_v2/conv2d_183/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_183/ReadVariableOp"; +"inception_resnet_v2/conv2d_183/ReadVariableOp" -> "inception_resnet_v2/conv2d_183/mul"; +"inception_resnet_v2/conv2d_183/mul" -> "inception_resnet_v2/conv2d_183/Conv2D"; +"inception_resnet_v2/conv2d_183/Conv2D" -> "inception_resnet_v2/batch_normalization_183/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_186/Const" -> "inception_resnet_v2/batch_normalization_186/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_186/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_186/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_186/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_186/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_186/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_186/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_186/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_186/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_186/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_186/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_186/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_186/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_186/FusedBatchNormV3" -> "inception_resnet_v2/activation_186/Relu"; +"inception_resnet_v2/batch_normalization_183/Const" -> "inception_resnet_v2/batch_normalization_183/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_183/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_183/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_183/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_183/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_183/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_183/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_183/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_183/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_183/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_183/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_183/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_183/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_183/FusedBatchNormV3" -> "inception_resnet_v2/activation_183/Relu"; +"inception_resnet_v2/activation_183/Relu" -> "inception_resnet_v2/block8_6_mixed/concat"; +"inception_resnet_v2/activation_186/Relu" -> "inception_resnet_v2/block8_6_mixed/concat"; +"inception_resnet_v2/block8_6_mixed/concat/axis" -> "inception_resnet_v2/block8_6_mixed/concat"; +"inception_resnet_v2/block8_6_mixed/concat" -> "inception_resnet_v2/block8_6_conv/Conv2D"; +"inception_resnet_v2/block8_6_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block8_6_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block8_6_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block8_6_conv/Sigmoid"; +"inception_resnet_v2/block8_6_conv/Sigmoid" -> "inception_resnet_v2/block8_6_conv/Round"; +"inception_resnet_v2/block8_6_conv/Round" -> "inception_resnet_v2/block8_6_conv/mul"; +"inception_resnet_v2/block8_6_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block8_6_conv/ReadVariableOp"; +"inception_resnet_v2/block8_6_conv/ReadVariableOp" -> "inception_resnet_v2/block8_6_conv/mul"; +"inception_resnet_v2/block8_6_conv/mul" -> "inception_resnet_v2/block8_6_conv/Conv2D"; +"inception_resnet_v2/block8_6_conv/Conv2D" -> "inception_resnet_v2/block8_6_conv/BiasAdd"; +"inception_resnet_v2/block8_6_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block8_6_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block8_6_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block8_6_conv/BiasAdd"; +"inception_resnet_v2/block8_6_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_35/mul"; +"inception_resnet_v2/custom_scale_layer_35/mul/y" -> "inception_resnet_v2/custom_scale_layer_35/mul"; +"inception_resnet_v2/custom_scale_layer_35/mul" -> "inception_resnet_v2/custom_scale_layer_35/add"; +"inception_resnet_v2/custom_scale_layer_35/add" -> "inception_resnet_v2/block8_6_ac/Relu"; +"inception_resnet_v2/block8_6_ac/Relu" -> "inception_resnet_v2/conv2d_188/Conv2D"; +"inception_resnet_v2/block8_6_ac/Relu" -> "inception_resnet_v2/conv2d_187/Conv2D"; +"inception_resnet_v2/block8_6_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_36/add"; +"inception_resnet_v2/conv2d_188/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_188/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_188/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_188/Sigmoid"; +"inception_resnet_v2/conv2d_188/Sigmoid" -> "inception_resnet_v2/conv2d_188/Round"; +"inception_resnet_v2/conv2d_188/Round" -> "inception_resnet_v2/conv2d_188/mul"; +"inception_resnet_v2/conv2d_188/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_188/ReadVariableOp"; +"inception_resnet_v2/conv2d_188/ReadVariableOp" -> "inception_resnet_v2/conv2d_188/mul"; +"inception_resnet_v2/conv2d_188/mul" -> "inception_resnet_v2/conv2d_188/Conv2D"; +"inception_resnet_v2/conv2d_188/Conv2D" -> "inception_resnet_v2/batch_normalization_188/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_188/Const" -> "inception_resnet_v2/batch_normalization_188/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_188/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_188/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_188/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_188/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_188/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_188/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_188/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_188/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_188/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_188/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_188/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_188/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_188/FusedBatchNormV3" -> "inception_resnet_v2/activation_188/Relu"; +"inception_resnet_v2/activation_188/Relu" -> "inception_resnet_v2/conv2d_189/Conv2D"; +"inception_resnet_v2/conv2d_189/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_189/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_189/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_189/Sigmoid"; +"inception_resnet_v2/conv2d_189/Sigmoid" -> "inception_resnet_v2/conv2d_189/Round"; +"inception_resnet_v2/conv2d_189/Round" -> "inception_resnet_v2/conv2d_189/mul"; +"inception_resnet_v2/conv2d_189/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_189/ReadVariableOp"; +"inception_resnet_v2/conv2d_189/ReadVariableOp" -> "inception_resnet_v2/conv2d_189/mul"; +"inception_resnet_v2/conv2d_189/mul" -> "inception_resnet_v2/conv2d_189/Conv2D"; +"inception_resnet_v2/conv2d_189/Conv2D" -> "inception_resnet_v2/batch_normalization_189/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_189/Const" -> "inception_resnet_v2/batch_normalization_189/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_189/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_189/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_189/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_189/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_189/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_189/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_189/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_189/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_189/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_189/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_189/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_189/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_189/FusedBatchNormV3" -> "inception_resnet_v2/activation_189/Relu"; +"inception_resnet_v2/activation_189/Relu" -> "inception_resnet_v2/conv2d_190/Conv2D"; +"inception_resnet_v2/conv2d_190/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_190/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_190/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_190/Sigmoid"; +"inception_resnet_v2/conv2d_190/Sigmoid" -> "inception_resnet_v2/conv2d_190/Round"; +"inception_resnet_v2/conv2d_190/Round" -> "inception_resnet_v2/conv2d_190/mul"; +"inception_resnet_v2/conv2d_190/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_190/ReadVariableOp"; +"inception_resnet_v2/conv2d_190/ReadVariableOp" -> "inception_resnet_v2/conv2d_190/mul"; +"inception_resnet_v2/conv2d_190/mul" -> "inception_resnet_v2/conv2d_190/Conv2D"; +"inception_resnet_v2/conv2d_190/Conv2D" -> "inception_resnet_v2/batch_normalization_190/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_187/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_187/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_187/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_187/Sigmoid"; +"inception_resnet_v2/conv2d_187/Sigmoid" -> "inception_resnet_v2/conv2d_187/Round"; +"inception_resnet_v2/conv2d_187/Round" -> "inception_resnet_v2/conv2d_187/mul"; +"inception_resnet_v2/conv2d_187/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_187/ReadVariableOp"; +"inception_resnet_v2/conv2d_187/ReadVariableOp" -> "inception_resnet_v2/conv2d_187/mul"; +"inception_resnet_v2/conv2d_187/mul" -> "inception_resnet_v2/conv2d_187/Conv2D"; +"inception_resnet_v2/conv2d_187/Conv2D" -> "inception_resnet_v2/batch_normalization_187/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_190/Const" -> "inception_resnet_v2/batch_normalization_190/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_190/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_190/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_190/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_190/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_190/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_190/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_190/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_190/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_190/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_190/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_190/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_190/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_190/FusedBatchNormV3" -> "inception_resnet_v2/activation_190/Relu"; +"inception_resnet_v2/batch_normalization_187/Const" -> "inception_resnet_v2/batch_normalization_187/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_187/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_187/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_187/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_187/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_187/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_187/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_187/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_187/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_187/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_187/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_187/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_187/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_187/FusedBatchNormV3" -> "inception_resnet_v2/activation_187/Relu"; +"inception_resnet_v2/activation_187/Relu" -> "inception_resnet_v2/block8_7_mixed/concat"; +"inception_resnet_v2/activation_190/Relu" -> "inception_resnet_v2/block8_7_mixed/concat"; +"inception_resnet_v2/block8_7_mixed/concat/axis" -> "inception_resnet_v2/block8_7_mixed/concat"; +"inception_resnet_v2/block8_7_mixed/concat" -> "inception_resnet_v2/block8_7_conv/Conv2D"; +"inception_resnet_v2/block8_7_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block8_7_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block8_7_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block8_7_conv/Sigmoid"; +"inception_resnet_v2/block8_7_conv/Sigmoid" -> "inception_resnet_v2/block8_7_conv/Round"; +"inception_resnet_v2/block8_7_conv/Round" -> "inception_resnet_v2/block8_7_conv/mul"; +"inception_resnet_v2/block8_7_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block8_7_conv/ReadVariableOp"; +"inception_resnet_v2/block8_7_conv/ReadVariableOp" -> "inception_resnet_v2/block8_7_conv/mul"; +"inception_resnet_v2/block8_7_conv/mul" -> "inception_resnet_v2/block8_7_conv/Conv2D"; +"inception_resnet_v2/block8_7_conv/Conv2D" -> "inception_resnet_v2/block8_7_conv/BiasAdd"; +"inception_resnet_v2/block8_7_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block8_7_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block8_7_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block8_7_conv/BiasAdd"; +"inception_resnet_v2/block8_7_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_36/mul"; +"inception_resnet_v2/custom_scale_layer_36/mul/y" -> "inception_resnet_v2/custom_scale_layer_36/mul"; +"inception_resnet_v2/custom_scale_layer_36/mul" -> "inception_resnet_v2/custom_scale_layer_36/add"; +"inception_resnet_v2/custom_scale_layer_36/add" -> "inception_resnet_v2/block8_7_ac/Relu"; +"inception_resnet_v2/block8_7_ac/Relu" -> "inception_resnet_v2/conv2d_192/Conv2D"; +"inception_resnet_v2/block8_7_ac/Relu" -> "inception_resnet_v2/conv2d_191/Conv2D"; +"inception_resnet_v2/block8_7_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_37/add"; +"inception_resnet_v2/conv2d_192/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_192/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_192/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_192/Sigmoid"; +"inception_resnet_v2/conv2d_192/Sigmoid" -> "inception_resnet_v2/conv2d_192/Round"; +"inception_resnet_v2/conv2d_192/Round" -> "inception_resnet_v2/conv2d_192/mul"; +"inception_resnet_v2/conv2d_192/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_192/ReadVariableOp"; +"inception_resnet_v2/conv2d_192/ReadVariableOp" -> "inception_resnet_v2/conv2d_192/mul"; +"inception_resnet_v2/conv2d_192/mul" -> "inception_resnet_v2/conv2d_192/Conv2D"; +"inception_resnet_v2/conv2d_192/Conv2D" -> "inception_resnet_v2/batch_normalization_192/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_192/Const" -> "inception_resnet_v2/batch_normalization_192/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_192/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_192/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_192/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_192/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_192/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_192/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_192/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_192/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_192/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_192/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_192/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_192/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_192/FusedBatchNormV3" -> "inception_resnet_v2/activation_192/Relu"; +"inception_resnet_v2/activation_192/Relu" -> "inception_resnet_v2/conv2d_193/Conv2D"; +"inception_resnet_v2/conv2d_193/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_193/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_193/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_193/Sigmoid"; +"inception_resnet_v2/conv2d_193/Sigmoid" -> "inception_resnet_v2/conv2d_193/Round"; +"inception_resnet_v2/conv2d_193/Round" -> "inception_resnet_v2/conv2d_193/mul"; +"inception_resnet_v2/conv2d_193/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_193/ReadVariableOp"; +"inception_resnet_v2/conv2d_193/ReadVariableOp" -> "inception_resnet_v2/conv2d_193/mul"; +"inception_resnet_v2/conv2d_193/mul" -> "inception_resnet_v2/conv2d_193/Conv2D"; +"inception_resnet_v2/conv2d_193/Conv2D" -> "inception_resnet_v2/batch_normalization_193/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_193/Const" -> "inception_resnet_v2/batch_normalization_193/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_193/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_193/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_193/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_193/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_193/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_193/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_193/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_193/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_193/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_193/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_193/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_193/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_193/FusedBatchNormV3" -> "inception_resnet_v2/activation_193/Relu"; +"inception_resnet_v2/activation_193/Relu" -> "inception_resnet_v2/conv2d_194/Conv2D"; +"inception_resnet_v2/conv2d_194/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_194/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_194/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_194/Sigmoid"; +"inception_resnet_v2/conv2d_194/Sigmoid" -> "inception_resnet_v2/conv2d_194/Round"; +"inception_resnet_v2/conv2d_194/Round" -> "inception_resnet_v2/conv2d_194/mul"; +"inception_resnet_v2/conv2d_194/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_194/ReadVariableOp"; +"inception_resnet_v2/conv2d_194/ReadVariableOp" -> "inception_resnet_v2/conv2d_194/mul"; +"inception_resnet_v2/conv2d_194/mul" -> "inception_resnet_v2/conv2d_194/Conv2D"; +"inception_resnet_v2/conv2d_194/Conv2D" -> "inception_resnet_v2/batch_normalization_194/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_191/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_191/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_191/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_191/Sigmoid"; +"inception_resnet_v2/conv2d_191/Sigmoid" -> "inception_resnet_v2/conv2d_191/Round"; +"inception_resnet_v2/conv2d_191/Round" -> "inception_resnet_v2/conv2d_191/mul"; +"inception_resnet_v2/conv2d_191/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_191/ReadVariableOp"; +"inception_resnet_v2/conv2d_191/ReadVariableOp" -> "inception_resnet_v2/conv2d_191/mul"; +"inception_resnet_v2/conv2d_191/mul" -> "inception_resnet_v2/conv2d_191/Conv2D"; +"inception_resnet_v2/conv2d_191/Conv2D" -> "inception_resnet_v2/batch_normalization_191/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_194/Const" -> "inception_resnet_v2/batch_normalization_194/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_194/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_194/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_194/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_194/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_194/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_194/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_194/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_194/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_194/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_194/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_194/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_194/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_194/FusedBatchNormV3" -> "inception_resnet_v2/activation_194/Relu"; +"inception_resnet_v2/batch_normalization_191/Const" -> "inception_resnet_v2/batch_normalization_191/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_191/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_191/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_191/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_191/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_191/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_191/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_191/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_191/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_191/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_191/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_191/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_191/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_191/FusedBatchNormV3" -> "inception_resnet_v2/activation_191/Relu"; +"inception_resnet_v2/activation_191/Relu" -> "inception_resnet_v2/block8_8_mixed/concat"; +"inception_resnet_v2/activation_194/Relu" -> "inception_resnet_v2/block8_8_mixed/concat"; +"inception_resnet_v2/block8_8_mixed/concat/axis" -> "inception_resnet_v2/block8_8_mixed/concat"; +"inception_resnet_v2/block8_8_mixed/concat" -> "inception_resnet_v2/block8_8_conv/Conv2D"; +"inception_resnet_v2/block8_8_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block8_8_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block8_8_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block8_8_conv/Sigmoid"; +"inception_resnet_v2/block8_8_conv/Sigmoid" -> "inception_resnet_v2/block8_8_conv/Round"; +"inception_resnet_v2/block8_8_conv/Round" -> "inception_resnet_v2/block8_8_conv/mul"; +"inception_resnet_v2/block8_8_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block8_8_conv/ReadVariableOp"; +"inception_resnet_v2/block8_8_conv/ReadVariableOp" -> "inception_resnet_v2/block8_8_conv/mul"; +"inception_resnet_v2/block8_8_conv/mul" -> "inception_resnet_v2/block8_8_conv/Conv2D"; +"inception_resnet_v2/block8_8_conv/Conv2D" -> "inception_resnet_v2/block8_8_conv/BiasAdd"; +"inception_resnet_v2/block8_8_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block8_8_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block8_8_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block8_8_conv/BiasAdd"; +"inception_resnet_v2/block8_8_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_37/mul"; +"inception_resnet_v2/custom_scale_layer_37/mul/y" -> "inception_resnet_v2/custom_scale_layer_37/mul"; +"inception_resnet_v2/custom_scale_layer_37/mul" -> "inception_resnet_v2/custom_scale_layer_37/add"; +"inception_resnet_v2/custom_scale_layer_37/add" -> "inception_resnet_v2/block8_8_ac/Relu"; +"inception_resnet_v2/block8_8_ac/Relu" -> "inception_resnet_v2/conv2d_196/Conv2D"; +"inception_resnet_v2/block8_8_ac/Relu" -> "inception_resnet_v2/conv2d_195/Conv2D"; +"inception_resnet_v2/block8_8_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_38/add"; +"inception_resnet_v2/conv2d_196/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_196/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_196/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_196/Sigmoid"; +"inception_resnet_v2/conv2d_196/Sigmoid" -> "inception_resnet_v2/conv2d_196/Round"; +"inception_resnet_v2/conv2d_196/Round" -> "inception_resnet_v2/conv2d_196/mul"; +"inception_resnet_v2/conv2d_196/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_196/ReadVariableOp"; +"inception_resnet_v2/conv2d_196/ReadVariableOp" -> "inception_resnet_v2/conv2d_196/mul"; +"inception_resnet_v2/conv2d_196/mul" -> "inception_resnet_v2/conv2d_196/Conv2D"; +"inception_resnet_v2/conv2d_196/Conv2D" -> "inception_resnet_v2/batch_normalization_196/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_196/Const" -> "inception_resnet_v2/batch_normalization_196/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_196/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_196/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_196/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_196/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_196/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_196/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_196/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_196/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_196/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_196/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_196/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_196/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_196/FusedBatchNormV3" -> "inception_resnet_v2/activation_196/Relu"; +"inception_resnet_v2/activation_196/Relu" -> "inception_resnet_v2/conv2d_197/Conv2D"; +"inception_resnet_v2/conv2d_197/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_197/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_197/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_197/Sigmoid"; +"inception_resnet_v2/conv2d_197/Sigmoid" -> "inception_resnet_v2/conv2d_197/Round"; +"inception_resnet_v2/conv2d_197/Round" -> "inception_resnet_v2/conv2d_197/mul"; +"inception_resnet_v2/conv2d_197/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_197/ReadVariableOp"; +"inception_resnet_v2/conv2d_197/ReadVariableOp" -> "inception_resnet_v2/conv2d_197/mul"; +"inception_resnet_v2/conv2d_197/mul" -> "inception_resnet_v2/conv2d_197/Conv2D"; +"inception_resnet_v2/conv2d_197/Conv2D" -> "inception_resnet_v2/batch_normalization_197/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_197/Const" -> "inception_resnet_v2/batch_normalization_197/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_197/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_197/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_197/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_197/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_197/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_197/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_197/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_197/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_197/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_197/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_197/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_197/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_197/FusedBatchNormV3" -> "inception_resnet_v2/activation_197/Relu"; +"inception_resnet_v2/activation_197/Relu" -> "inception_resnet_v2/conv2d_198/Conv2D"; +"inception_resnet_v2/conv2d_198/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_198/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_198/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_198/Sigmoid"; +"inception_resnet_v2/conv2d_198/Sigmoid" -> "inception_resnet_v2/conv2d_198/Round"; +"inception_resnet_v2/conv2d_198/Round" -> "inception_resnet_v2/conv2d_198/mul"; +"inception_resnet_v2/conv2d_198/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_198/ReadVariableOp"; +"inception_resnet_v2/conv2d_198/ReadVariableOp" -> "inception_resnet_v2/conv2d_198/mul"; +"inception_resnet_v2/conv2d_198/mul" -> "inception_resnet_v2/conv2d_198/Conv2D"; +"inception_resnet_v2/conv2d_198/Conv2D" -> "inception_resnet_v2/batch_normalization_198/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_195/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_195/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_195/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_195/Sigmoid"; +"inception_resnet_v2/conv2d_195/Sigmoid" -> "inception_resnet_v2/conv2d_195/Round"; +"inception_resnet_v2/conv2d_195/Round" -> "inception_resnet_v2/conv2d_195/mul"; +"inception_resnet_v2/conv2d_195/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_195/ReadVariableOp"; +"inception_resnet_v2/conv2d_195/ReadVariableOp" -> "inception_resnet_v2/conv2d_195/mul"; +"inception_resnet_v2/conv2d_195/mul" -> "inception_resnet_v2/conv2d_195/Conv2D"; +"inception_resnet_v2/conv2d_195/Conv2D" -> "inception_resnet_v2/batch_normalization_195/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_198/Const" -> "inception_resnet_v2/batch_normalization_198/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_198/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_198/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_198/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_198/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_198/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_198/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_198/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_198/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_198/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_198/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_198/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_198/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_198/FusedBatchNormV3" -> "inception_resnet_v2/activation_198/Relu"; +"inception_resnet_v2/batch_normalization_195/Const" -> "inception_resnet_v2/batch_normalization_195/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_195/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_195/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_195/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_195/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_195/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_195/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_195/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_195/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_195/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_195/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_195/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_195/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_195/FusedBatchNormV3" -> "inception_resnet_v2/activation_195/Relu"; +"inception_resnet_v2/activation_195/Relu" -> "inception_resnet_v2/block8_9_mixed/concat"; +"inception_resnet_v2/activation_198/Relu" -> "inception_resnet_v2/block8_9_mixed/concat"; +"inception_resnet_v2/block8_9_mixed/concat/axis" -> "inception_resnet_v2/block8_9_mixed/concat"; +"inception_resnet_v2/block8_9_mixed/concat" -> "inception_resnet_v2/block8_9_conv/Conv2D"; +"inception_resnet_v2/block8_9_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block8_9_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block8_9_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block8_9_conv/Sigmoid"; +"inception_resnet_v2/block8_9_conv/Sigmoid" -> "inception_resnet_v2/block8_9_conv/Round"; +"inception_resnet_v2/block8_9_conv/Round" -> "inception_resnet_v2/block8_9_conv/mul"; +"inception_resnet_v2/block8_9_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block8_9_conv/ReadVariableOp"; +"inception_resnet_v2/block8_9_conv/ReadVariableOp" -> "inception_resnet_v2/block8_9_conv/mul"; +"inception_resnet_v2/block8_9_conv/mul" -> "inception_resnet_v2/block8_9_conv/Conv2D"; +"inception_resnet_v2/block8_9_conv/Conv2D" -> "inception_resnet_v2/block8_9_conv/BiasAdd"; +"inception_resnet_v2/block8_9_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block8_9_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block8_9_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block8_9_conv/BiasAdd"; +"inception_resnet_v2/block8_9_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_38/mul"; +"inception_resnet_v2/custom_scale_layer_38/mul/y" -> "inception_resnet_v2/custom_scale_layer_38/mul"; +"inception_resnet_v2/custom_scale_layer_38/mul" -> "inception_resnet_v2/custom_scale_layer_38/add"; +"inception_resnet_v2/custom_scale_layer_38/add" -> "inception_resnet_v2/block8_9_ac/Relu"; +"inception_resnet_v2/block8_9_ac/Relu" -> "inception_resnet_v2/conv2d_200/Conv2D"; +"inception_resnet_v2/block8_9_ac/Relu" -> "inception_resnet_v2/conv2d_199/Conv2D"; +"inception_resnet_v2/block8_9_ac/Relu" -> "inception_resnet_v2/custom_scale_layer_39/add"; +"inception_resnet_v2/conv2d_200/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_200/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_200/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_200/Sigmoid"; +"inception_resnet_v2/conv2d_200/Sigmoid" -> "inception_resnet_v2/conv2d_200/Round"; +"inception_resnet_v2/conv2d_200/Round" -> "inception_resnet_v2/conv2d_200/mul"; +"inception_resnet_v2/conv2d_200/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_200/ReadVariableOp"; +"inception_resnet_v2/conv2d_200/ReadVariableOp" -> "inception_resnet_v2/conv2d_200/mul"; +"inception_resnet_v2/conv2d_200/mul" -> "inception_resnet_v2/conv2d_200/Conv2D"; +"inception_resnet_v2/conv2d_200/Conv2D" -> "inception_resnet_v2/batch_normalization_200/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_200/Const" -> "inception_resnet_v2/batch_normalization_200/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_200/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_200/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_200/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_200/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_200/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_200/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_200/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_200/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_200/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_200/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_200/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_200/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_200/FusedBatchNormV3" -> "inception_resnet_v2/activation_200/Relu"; +"inception_resnet_v2/activation_200/Relu" -> "inception_resnet_v2/conv2d_201/Conv2D"; +"inception_resnet_v2/conv2d_201/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_201/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_201/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_201/Sigmoid"; +"inception_resnet_v2/conv2d_201/Sigmoid" -> "inception_resnet_v2/conv2d_201/Round"; +"inception_resnet_v2/conv2d_201/Round" -> "inception_resnet_v2/conv2d_201/mul"; +"inception_resnet_v2/conv2d_201/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_201/ReadVariableOp"; +"inception_resnet_v2/conv2d_201/ReadVariableOp" -> "inception_resnet_v2/conv2d_201/mul"; +"inception_resnet_v2/conv2d_201/mul" -> "inception_resnet_v2/conv2d_201/Conv2D"; +"inception_resnet_v2/conv2d_201/Conv2D" -> "inception_resnet_v2/batch_normalization_201/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_201/Const" -> "inception_resnet_v2/batch_normalization_201/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_201/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_201/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_201/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_201/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_201/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_201/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_201/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_201/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_201/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_201/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_201/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_201/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_201/FusedBatchNormV3" -> "inception_resnet_v2/activation_201/Relu"; +"inception_resnet_v2/activation_201/Relu" -> "inception_resnet_v2/conv2d_202/Conv2D"; +"inception_resnet_v2/conv2d_202/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_202/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_202/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_202/Sigmoid"; +"inception_resnet_v2/conv2d_202/Sigmoid" -> "inception_resnet_v2/conv2d_202/Round"; +"inception_resnet_v2/conv2d_202/Round" -> "inception_resnet_v2/conv2d_202/mul"; +"inception_resnet_v2/conv2d_202/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_202/ReadVariableOp"; +"inception_resnet_v2/conv2d_202/ReadVariableOp" -> "inception_resnet_v2/conv2d_202/mul"; +"inception_resnet_v2/conv2d_202/mul" -> "inception_resnet_v2/conv2d_202/Conv2D"; +"inception_resnet_v2/conv2d_202/Conv2D" -> "inception_resnet_v2/batch_normalization_202/FusedBatchNormV3"; +"inception_resnet_v2/conv2d_199/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_199/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv2d_199/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv2d_199/Sigmoid"; +"inception_resnet_v2/conv2d_199/Sigmoid" -> "inception_resnet_v2/conv2d_199/Round"; +"inception_resnet_v2/conv2d_199/Round" -> "inception_resnet_v2/conv2d_199/mul"; +"inception_resnet_v2/conv2d_199/ReadVariableOp/resource" -> "inception_resnet_v2/conv2d_199/ReadVariableOp"; +"inception_resnet_v2/conv2d_199/ReadVariableOp" -> "inception_resnet_v2/conv2d_199/mul"; +"inception_resnet_v2/conv2d_199/mul" -> "inception_resnet_v2/conv2d_199/Conv2D"; +"inception_resnet_v2/conv2d_199/Conv2D" -> "inception_resnet_v2/batch_normalization_199/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_202/Const" -> "inception_resnet_v2/batch_normalization_202/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_202/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_202/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_202/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_202/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_202/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_202/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_202/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_202/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_202/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_202/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_202/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_202/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_202/FusedBatchNormV3" -> "inception_resnet_v2/activation_202/Relu"; +"inception_resnet_v2/batch_normalization_199/Const" -> "inception_resnet_v2/batch_normalization_199/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_199/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_199/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_199/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_199/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_199/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/batch_normalization_199/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/batch_normalization_199/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/batch_normalization_199/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_199/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/batch_normalization_199/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/batch_normalization_199/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/batch_normalization_199/FusedBatchNormV3"; +"inception_resnet_v2/batch_normalization_199/FusedBatchNormV3" -> "inception_resnet_v2/activation_199/Relu"; +"inception_resnet_v2/activation_199/Relu" -> "inception_resnet_v2/block8_10_mixed/concat"; +"inception_resnet_v2/activation_202/Relu" -> "inception_resnet_v2/block8_10_mixed/concat"; +"inception_resnet_v2/block8_10_mixed/concat/axis" -> "inception_resnet_v2/block8_10_mixed/concat"; +"inception_resnet_v2/block8_10_mixed/concat" -> "inception_resnet_v2/block8_10_conv/Conv2D"; +"inception_resnet_v2/block8_10_conv/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/block8_10_conv/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/block8_10_conv/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/block8_10_conv/Sigmoid"; +"inception_resnet_v2/block8_10_conv/Sigmoid" -> "inception_resnet_v2/block8_10_conv/Round"; +"inception_resnet_v2/block8_10_conv/Round" -> "inception_resnet_v2/block8_10_conv/mul"; +"inception_resnet_v2/block8_10_conv/ReadVariableOp/resource" -> "inception_resnet_v2/block8_10_conv/ReadVariableOp"; +"inception_resnet_v2/block8_10_conv/ReadVariableOp" -> "inception_resnet_v2/block8_10_conv/mul"; +"inception_resnet_v2/block8_10_conv/mul" -> "inception_resnet_v2/block8_10_conv/Conv2D"; +"inception_resnet_v2/block8_10_conv/Conv2D" -> "inception_resnet_v2/block8_10_conv/BiasAdd"; +"inception_resnet_v2/block8_10_conv/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/block8_10_conv/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/block8_10_conv/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/block8_10_conv/BiasAdd"; +"inception_resnet_v2/block8_10_conv/BiasAdd" -> "inception_resnet_v2/custom_scale_layer_39/mul"; +"inception_resnet_v2/custom_scale_layer_39/mul/y" -> "inception_resnet_v2/custom_scale_layer_39/mul"; +"inception_resnet_v2/custom_scale_layer_39/mul" -> "inception_resnet_v2/custom_scale_layer_39/add"; +"inception_resnet_v2/custom_scale_layer_39/add" -> "inception_resnet_v2/conv_7b/Conv2D"; +"inception_resnet_v2/conv_7b/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/conv_7b/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/conv_7b/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/conv_7b/Sigmoid"; +"inception_resnet_v2/conv_7b/Sigmoid" -> "inception_resnet_v2/conv_7b/Round"; +"inception_resnet_v2/conv_7b/Round" -> "inception_resnet_v2/conv_7b/mul"; +"inception_resnet_v2/conv_7b/ReadVariableOp/resource" -> "inception_resnet_v2/conv_7b/ReadVariableOp"; +"inception_resnet_v2/conv_7b/ReadVariableOp" -> "inception_resnet_v2/conv_7b/mul"; +"inception_resnet_v2/conv_7b/mul" -> "inception_resnet_v2/conv_7b/Conv2D"; +"inception_resnet_v2/conv_7b/Conv2D" -> "inception_resnet_v2/conv_7b_bn/FusedBatchNormV3"; +"inception_resnet_v2/conv_7b_bn/Const" -> "inception_resnet_v2/conv_7b_bn/FusedBatchNormV3"; +"inception_resnet_v2/conv_7b_bn/ReadVariableOp/resource" -> "inception_resnet_v2/conv_7b_bn/ReadVariableOp"; +"inception_resnet_v2/conv_7b_bn/ReadVariableOp" -> "inception_resnet_v2/conv_7b_bn/FusedBatchNormV3"; +"inception_resnet_v2/conv_7b_bn/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_resnet_v2/conv_7b_bn/FusedBatchNormV3/ReadVariableOp"; +"inception_resnet_v2/conv_7b_bn/FusedBatchNormV3/ReadVariableOp" -> "inception_resnet_v2/conv_7b_bn/FusedBatchNormV3"; +"inception_resnet_v2/conv_7b_bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_resnet_v2/conv_7b_bn/FusedBatchNormV3/ReadVariableOp_1"; +"inception_resnet_v2/conv_7b_bn/FusedBatchNormV3/ReadVariableOp_1" -> "inception_resnet_v2/conv_7b_bn/FusedBatchNormV3"; +"inception_resnet_v2/conv_7b_bn/FusedBatchNormV3" -> "inception_resnet_v2/conv_7b_ac/Relu"; +"inception_resnet_v2/conv_7b_ac/Relu" -> "inception_resnet_v2/avg_pool/Mean"; +"inception_resnet_v2/avg_pool/Mean/reduction_indices" -> "inception_resnet_v2/avg_pool/Mean"; +"inception_resnet_v2/avg_pool/Mean" -> "inception_resnet_v2/predictions/MatMul"; +"inception_resnet_v2/predictions/Sigmoid/ReadVariableOp/resource" -> "inception_resnet_v2/predictions/Sigmoid/ReadVariableOp"; +"inception_resnet_v2/predictions/Sigmoid/ReadVariableOp" -> "inception_resnet_v2/predictions/Sigmoid"; +"inception_resnet_v2/predictions/Sigmoid" -> "inception_resnet_v2/predictions/Round"; +"inception_resnet_v2/predictions/Round" -> "inception_resnet_v2/predictions/mul"; +"inception_resnet_v2/predictions/ReadVariableOp/resource" -> "inception_resnet_v2/predictions/ReadVariableOp"; +"inception_resnet_v2/predictions/ReadVariableOp" -> "inception_resnet_v2/predictions/mul"; +"inception_resnet_v2/predictions/mul" -> "inception_resnet_v2/predictions/MatMul"; +"inception_resnet_v2/predictions/MatMul" -> "inception_resnet_v2/predictions/BiasAdd"; +"inception_resnet_v2/predictions/BiasAdd/ReadVariableOp/resource" -> "inception_resnet_v2/predictions/BiasAdd/ReadVariableOp"; +"inception_resnet_v2/predictions/BiasAdd/ReadVariableOp" -> "inception_resnet_v2/predictions/BiasAdd"; +"inception_resnet_v2/predictions/BiasAdd" -> "inception_resnet_v2/predictions/Softmax"; +"inception_resnet_v2/predictions/Softmax" -> Identity; +} diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/inception_v3.dot new file mode 120000 index 00000000000..cb8e9908c6d --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/inception_v3.dot @@ -0,0 +1 @@ +../../../2.11/sparsity/rb_sparsity/inception_v3.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mask_rcnn.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mask_rcnn.dot new file mode 120000 index 00000000000..47728d505e7 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mask_rcnn.dot @@ -0,0 +1 @@ +../../../2.8/sparsity/rb_sparsity/mask_rcnn.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mobilenet_v1.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mobilenet_v1.dot new file mode 120000 index 00000000000..75717c78937 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mobilenet_v1.dot @@ -0,0 +1 @@ +../../../2.8/sparsity/rb_sparsity/mobilenet_v1.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mobilenet_v2.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mobilenet_v2.dot new file mode 120000 index 00000000000..753287f74b7 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mobilenet_v2.dot @@ -0,0 +1 @@ +../../../2.5/sparsity/rb_sparsity/mobilenet_v2.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mobilenet_v3_large.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mobilenet_v3_large.dot new file mode 120000 index 00000000000..578b53d4374 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mobilenet_v3_large.dot @@ -0,0 +1 @@ +../../../2.5/sparsity/rb_sparsity/mobilenet_v3_large.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mobilenet_v3_small.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mobilenet_v3_small.dot new file mode 120000 index 00000000000..859e2d85d95 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/mobilenet_v3_small.dot @@ -0,0 +1 @@ +../../../2.5/sparsity/rb_sparsity/mobilenet_v3_small.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/nasnet_mobile.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/nasnet_mobile.dot new file mode 120000 index 00000000000..8997d95532b --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/nasnet_mobile.dot @@ -0,0 +1 @@ +../../../2.5/sparsity/rb_sparsity/nasnet_mobile.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/resnet50.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/resnet50.dot new file mode 120000 index 00000000000..3683dce13d7 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/resnet50.dot @@ -0,0 +1 @@ +../../../2.5/sparsity/rb_sparsity/resnet50.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/resnet50v2.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/resnet50v2.dot new file mode 120000 index 00000000000..13ca366b7df --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/resnet50v2.dot @@ -0,0 +1 @@ +../../../2.5/sparsity/rb_sparsity/resnet50v2.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/retinanet.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/retinanet.dot new file mode 120000 index 00000000000..86fe000d2a6 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/retinanet.dot @@ -0,0 +1 @@ +../../../2.5/sparsity/rb_sparsity/retinanet.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/sequential_model.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/sequential_model.dot new file mode 120000 index 00000000000..067a3bfa644 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/sequential_model.dot @@ -0,0 +1 @@ +../../../2.5/sparsity/rb_sparsity/sequential_model.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/sequential_no_input_model.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/sequential_no_input_model.dot new file mode 120000 index 00000000000..64510b66324 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/sequential_no_input_model.dot @@ -0,0 +1 @@ +../../../2.5/sparsity/rb_sparsity/sequential_no_input_model.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/shared_layers_model.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/shared_layers_model.dot new file mode 120000 index 00000000000..cfb1a7ee38a --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/shared_layers_model.dot @@ -0,0 +1 @@ +../../../2.5/sparsity/rb_sparsity/shared_layers_model.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/vgg16.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/vgg16.dot new file mode 120000 index 00000000000..b5c6fcb42f3 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/vgg16.dot @@ -0,0 +1 @@ +../../../2.5/sparsity/rb_sparsity/vgg16.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/xception.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/xception.dot new file mode 120000 index 00000000000..13d496f7d9d --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/xception.dot @@ -0,0 +1 @@ +../../../2.5/sparsity/rb_sparsity/xception.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/yolo_v4.dot b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/yolo_v4.dot new file mode 120000 index 00000000000..6487c87d407 --- /dev/null +++ b/tests/tensorflow/data/reference_graphs/2.12/sparsity/rb_sparsity/yolo_v4.dot @@ -0,0 +1 @@ +../../../2.5/sparsity/rb_sparsity/yolo_v4.dot \ No newline at end of file diff --git a/tests/tensorflow/data/reference_graphs/2.4/quantized/hw/CPU/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.4/quantized/hw/CPU/inception_v3.dot index 5ff2e61abce..8fc50588ab2 100644 --- a/tests/tensorflow/data/reference_graphs/2.4/quantized/hw/CPU/inception_v3.dot +++ b/tests/tensorflow/data/reference_graphs/2.4/quantized/hw/CPU/inception_v3.dot @@ -2853,18 +2853,6 @@ self [op=Placeholder]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; "inception_v3/conv2d_87/Conv2D" [op=Conv2D]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; -"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; -"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; -"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_93/SymmQuant/Abs" [op=Abs]; @@ -2909,14 +2897,18 @@ self [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "inception_v3/batch_normalization_87/FusedBatchNormV3" [op=FusedBatchNormV3]; -"inception_v3/batch_normalization_85/scale" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; +"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; +"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; +"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/batch_normalization_93/scale" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp" [op=ReadVariableOp]; @@ -2929,62 +2921,80 @@ self [op=Placeholder]; "inception_v3/activation_91/Relu" [op=Relu]; "inception_v3/activation_88/Relu" [op=Relu]; "inception_v3/activation_87/Relu" [op=Relu]; -"inception_v3/activation_85/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/batch_normalization_85/scale" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; "inception_v3/activation_93/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_85/Relu" [op=Relu]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed9_1/concat/axis" [op=Const]; "inception_v3/mixed9_1/concat" [op=ConcatV2]; "inception_v3/concatenate_1/concat/axis" [op=Const]; "inception_v3/concatenate_1/concat" [op=ConcatV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed10/concat/axis" [op=Const]; "inception_v3/mixed10/concat" [op=ConcatV2]; "inception_v3/avg_pool/Mean/reduction_indices" [op=Const]; @@ -6155,19 +6165,6 @@ self -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_87/Conv2D"; "inception_v3/conv2d_87/Conv2D" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; -"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; -"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_93/SymmQuant/Abs"; "inception_v3/conv2d_93/SymmQuant/Abs" -> "inception_v3/conv2d_93/SymmQuant/add"; @@ -6213,14 +6210,19 @@ self -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; "inception_v3/batch_normalization_87/FusedBatchNormV3" -> "inception_v3/activation_87/Relu"; -"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; -"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; +"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; +"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/batch_normalization_93/scale" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/ReadVariableOp/resource" -> "inception_v3/batch_normalization_93/ReadVariableOp"; "inception_v3/batch_normalization_93/ReadVariableOp" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; @@ -6229,82 +6231,90 @@ self -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/FusedBatchNormV3" -> "inception_v3/activation_93/Relu"; -"inception_v3/activation_92/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_91/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_88/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_85/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_93/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_92/Relu" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/Relu" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/Relu" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; +"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/activation_93/Relu" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_85/Relu" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed9_1/concat/axis" -> "inception_v3/mixed9_1/concat"; "inception_v3/mixed9_1/concat" -> "inception_v3/mixed10/concat"; "inception_v3/concatenate_1/concat/axis" -> "inception_v3/concatenate_1/concat"; "inception_v3/concatenate_1/concat" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat/axis" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat" -> "inception_v3/avg_pool/Mean"; "inception_v3/avg_pool/Mean/reduction_indices" -> "inception_v3/avg_pool/Mean"; diff --git a/tests/tensorflow/data/reference_graphs/2.4/quantized/hw/GPU/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.4/quantized/hw/GPU/inception_v3.dot index 5ff2e61abce..8fc50588ab2 100644 --- a/tests/tensorflow/data/reference_graphs/2.4/quantized/hw/GPU/inception_v3.dot +++ b/tests/tensorflow/data/reference_graphs/2.4/quantized/hw/GPU/inception_v3.dot @@ -2853,18 +2853,6 @@ self [op=Placeholder]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; "inception_v3/conv2d_87/Conv2D" [op=Conv2D]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; -"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; -"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; -"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_93/SymmQuant/Abs" [op=Abs]; @@ -2909,14 +2897,18 @@ self [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "inception_v3/batch_normalization_87/FusedBatchNormV3" [op=FusedBatchNormV3]; -"inception_v3/batch_normalization_85/scale" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; +"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; +"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; +"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/batch_normalization_93/scale" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp" [op=ReadVariableOp]; @@ -2929,62 +2921,80 @@ self [op=Placeholder]; "inception_v3/activation_91/Relu" [op=Relu]; "inception_v3/activation_88/Relu" [op=Relu]; "inception_v3/activation_87/Relu" [op=Relu]; -"inception_v3/activation_85/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/batch_normalization_85/scale" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; "inception_v3/activation_93/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_85/Relu" [op=Relu]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed9_1/concat/axis" [op=Const]; "inception_v3/mixed9_1/concat" [op=ConcatV2]; "inception_v3/concatenate_1/concat/axis" [op=Const]; "inception_v3/concatenate_1/concat" [op=ConcatV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed10/concat/axis" [op=Const]; "inception_v3/mixed10/concat" [op=ConcatV2]; "inception_v3/avg_pool/Mean/reduction_indices" [op=Const]; @@ -6155,19 +6165,6 @@ self -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_87/Conv2D"; "inception_v3/conv2d_87/Conv2D" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; -"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; -"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_93/SymmQuant/Abs"; "inception_v3/conv2d_93/SymmQuant/Abs" -> "inception_v3/conv2d_93/SymmQuant/add"; @@ -6213,14 +6210,19 @@ self -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; "inception_v3/batch_normalization_87/FusedBatchNormV3" -> "inception_v3/activation_87/Relu"; -"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; -"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; +"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; +"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/batch_normalization_93/scale" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/ReadVariableOp/resource" -> "inception_v3/batch_normalization_93/ReadVariableOp"; "inception_v3/batch_normalization_93/ReadVariableOp" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; @@ -6229,82 +6231,90 @@ self -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/FusedBatchNormV3" -> "inception_v3/activation_93/Relu"; -"inception_v3/activation_92/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_91/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_88/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_85/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_93/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_92/Relu" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/Relu" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/Relu" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; +"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/activation_93/Relu" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_85/Relu" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed9_1/concat/axis" -> "inception_v3/mixed9_1/concat"; "inception_v3/mixed9_1/concat" -> "inception_v3/mixed10/concat"; "inception_v3/concatenate_1/concat/axis" -> "inception_v3/concatenate_1/concat"; "inception_v3/concatenate_1/concat" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat/axis" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat" -> "inception_v3/avg_pool/Mean"; "inception_v3/avg_pool/Mean/reduction_indices" -> "inception_v3/avg_pool/Mean"; diff --git a/tests/tensorflow/data/reference_graphs/2.4/quantized/hw/VPU/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.4/quantized/hw/VPU/inception_v3.dot index 5ff2e61abce..8fc50588ab2 100644 --- a/tests/tensorflow/data/reference_graphs/2.4/quantized/hw/VPU/inception_v3.dot +++ b/tests/tensorflow/data/reference_graphs/2.4/quantized/hw/VPU/inception_v3.dot @@ -2853,18 +2853,6 @@ self [op=Placeholder]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; "inception_v3/conv2d_87/Conv2D" [op=Conv2D]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; -"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; -"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; -"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_93/SymmQuant/Abs" [op=Abs]; @@ -2909,14 +2897,18 @@ self [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "inception_v3/batch_normalization_87/FusedBatchNormV3" [op=FusedBatchNormV3]; -"inception_v3/batch_normalization_85/scale" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; +"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; +"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; +"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/batch_normalization_93/scale" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp" [op=ReadVariableOp]; @@ -2929,62 +2921,80 @@ self [op=Placeholder]; "inception_v3/activation_91/Relu" [op=Relu]; "inception_v3/activation_88/Relu" [op=Relu]; "inception_v3/activation_87/Relu" [op=Relu]; -"inception_v3/activation_85/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/batch_normalization_85/scale" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; "inception_v3/activation_93/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_85/Relu" [op=Relu]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed9_1/concat/axis" [op=Const]; "inception_v3/mixed9_1/concat" [op=ConcatV2]; "inception_v3/concatenate_1/concat/axis" [op=Const]; "inception_v3/concatenate_1/concat" [op=ConcatV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed10/concat/axis" [op=Const]; "inception_v3/mixed10/concat" [op=ConcatV2]; "inception_v3/avg_pool/Mean/reduction_indices" [op=Const]; @@ -6155,19 +6165,6 @@ self -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_87/Conv2D"; "inception_v3/conv2d_87/Conv2D" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; -"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; -"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_93/SymmQuant/Abs"; "inception_v3/conv2d_93/SymmQuant/Abs" -> "inception_v3/conv2d_93/SymmQuant/add"; @@ -6213,14 +6210,19 @@ self -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; "inception_v3/batch_normalization_87/FusedBatchNormV3" -> "inception_v3/activation_87/Relu"; -"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; -"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; +"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; +"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/batch_normalization_93/scale" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/ReadVariableOp/resource" -> "inception_v3/batch_normalization_93/ReadVariableOp"; "inception_v3/batch_normalization_93/ReadVariableOp" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; @@ -6229,82 +6231,90 @@ self -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/FusedBatchNormV3" -> "inception_v3/activation_93/Relu"; -"inception_v3/activation_92/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_91/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_88/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_85/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_93/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_92/Relu" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/Relu" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/Relu" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; +"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/activation_93/Relu" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_85/Relu" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed9_1/concat/axis" -> "inception_v3/mixed9_1/concat"; "inception_v3/mixed9_1/concat" -> "inception_v3/mixed10/concat"; "inception_v3/concatenate_1/concat/axis" -> "inception_v3/concatenate_1/concat"; "inception_v3/concatenate_1/concat" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat/axis" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat" -> "inception_v3/avg_pool/Mean"; "inception_v3/avg_pool/Mean/reduction_indices" -> "inception_v3/avg_pool/Mean"; diff --git a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/densenet121.pb b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/densenet121.pb index 6eb354093be..b1983e0c6fd 100644 --- a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/densenet121.pb +++ b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/densenet121.pb @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:83e646ecd1ea8bf1b18ce40ef78c5e323013a6d46e8401299f1593d22aa3aa9f -size 1068261 +oid sha256:5d918075fba8048fd29116ed86a8539988cf55b3ef4cc2b7169f8fd8725450e1 +size 1067562 diff --git a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/inception_v3.dot index 5ff2e61abce..8fc50588ab2 100644 --- a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/inception_v3.dot +++ b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/inception_v3.dot @@ -2853,18 +2853,6 @@ self [op=Placeholder]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; "inception_v3/conv2d_87/Conv2D" [op=Conv2D]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; -"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; -"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; -"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_93/SymmQuant/Abs" [op=Abs]; @@ -2909,14 +2897,18 @@ self [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "inception_v3/batch_normalization_87/FusedBatchNormV3" [op=FusedBatchNormV3]; -"inception_v3/batch_normalization_85/scale" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; +"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; +"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; +"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/batch_normalization_93/scale" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp" [op=ReadVariableOp]; @@ -2929,62 +2921,80 @@ self [op=Placeholder]; "inception_v3/activation_91/Relu" [op=Relu]; "inception_v3/activation_88/Relu" [op=Relu]; "inception_v3/activation_87/Relu" [op=Relu]; -"inception_v3/activation_85/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/batch_normalization_85/scale" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; "inception_v3/activation_93/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_85/Relu" [op=Relu]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed9_1/concat/axis" [op=Const]; "inception_v3/mixed9_1/concat" [op=ConcatV2]; "inception_v3/concatenate_1/concat/axis" [op=Const]; "inception_v3/concatenate_1/concat" [op=ConcatV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed10/concat/axis" [op=Const]; "inception_v3/mixed10/concat" [op=ConcatV2]; "inception_v3/avg_pool/Mean/reduction_indices" [op=Const]; @@ -6155,19 +6165,6 @@ self -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_87/Conv2D"; "inception_v3/conv2d_87/Conv2D" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; -"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; -"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_93/SymmQuant/Abs"; "inception_v3/conv2d_93/SymmQuant/Abs" -> "inception_v3/conv2d_93/SymmQuant/add"; @@ -6213,14 +6210,19 @@ self -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; "inception_v3/batch_normalization_87/FusedBatchNormV3" -> "inception_v3/activation_87/Relu"; -"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; -"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; +"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; +"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/batch_normalization_93/scale" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/ReadVariableOp/resource" -> "inception_v3/batch_normalization_93/ReadVariableOp"; "inception_v3/batch_normalization_93/ReadVariableOp" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; @@ -6229,82 +6231,90 @@ self -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/FusedBatchNormV3" -> "inception_v3/activation_93/Relu"; -"inception_v3/activation_92/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_91/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_88/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_85/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_93/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_92/Relu" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/Relu" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/Relu" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; +"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/activation_93/Relu" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_85/Relu" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed9_1/concat/axis" -> "inception_v3/mixed9_1/concat"; "inception_v3/mixed9_1/concat" -> "inception_v3/mixed10/concat"; "inception_v3/concatenate_1/concat/axis" -> "inception_v3/concatenate_1/concat"; "inception_v3/concatenate_1/concat" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat/axis" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat" -> "inception_v3/avg_pool/Mean"; "inception_v3/avg_pool/Mean/reduction_indices" -> "inception_v3/avg_pool/Mean"; diff --git a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/mask_rcnn.dot b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/mask_rcnn.dot index 240b1f91919..bf61c63b813 100644 --- a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/mask_rcnn.dot +++ b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/mask_rcnn.dot @@ -2217,48 +2217,56 @@ self_1 [op=Placeholder]; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "maskrcnn/p2-bn/FusedBatchNormV3" [op=FusedBatchNormV3]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/rpn/SymmQuant/Abs" [op=Abs]; @@ -3208,96 +3216,106 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.subtract/Sub" [op=Sub]; "maskrcnn/tf.expand_dims/ExpandDims/dim" [op=Const]; "maskrcnn/tf.expand_dims/ExpandDims" [op=ExpandDims]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_38/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_37/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_34/AddV2" [op=AddV2]; @@ -3513,22 +3531,26 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs" [op=Abs]; @@ -3569,22 +3591,26 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs" [op=Abs]; @@ -3625,22 +3651,26 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs" [op=Abs]; @@ -3681,22 +3711,26 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs" [op=Abs]; @@ -3737,22 +3771,26 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.concat_4/concat/axis" [op=Const]; "maskrcnn/tf.concat_4/concat" [op=ConcatV2]; "maskrcnn/tf.stack_4/stack" [op=Pack]; @@ -3978,48 +4016,56 @@ self_1 [op=Placeholder]; "maskrcnn/tf.ones/ones/Const" [op=Const]; "maskrcnn/tf.ones/ones" [op=Fill]; "maskrcnn/tf.math.multiply_30/Mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add_1" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.concat_5/concat/axis" [op=Const]; "maskrcnn/tf.concat_5/concat" [op=ConcatV2]; "maskrcnn/tf.stack_5/stack" [op=Pack]; @@ -4146,32 +4192,36 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_1/truediv" [op=RealDiv]; "maskrcnn/tf.math.subtract_42/Sub/y" [op=Const]; "maskrcnn/tf.math.subtract_42/Sub" [op=Sub]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.cast_23/Cast" [op=Cast]; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack_1" [op=Const]; @@ -4589,24 +4639,26 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_8/truediv" [op=RealDiv]; "maskrcnn/tf.math.truediv_6/truediv/y" [op=Const]; "maskrcnn/tf.math.truediv_6/truediv" [op=RealDiv]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_53/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_51/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_49/AddV2" [op=AddV2]; @@ -5599,24 +5651,26 @@ self_1 [op=Placeholder]; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_1" [op=Const]; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_2" [op=Const]; "maskrcnn/tf.__operators__.getitem_60/strided_slice" [op=StridedSlice]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_66/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_65/AddV2" [op=AddV2]; "maskrcnn/tf.unstack_5/unstack" [op=Unpack]; @@ -5692,22 +5746,26 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.concat_13/concat/axis" [op=Const]; "maskrcnn/tf.concat_13/concat" [op=ConcatV2]; "maskrcnn/tf.stack_15/stack" [op=Pack]; @@ -5903,32 +5961,36 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_26/truediv" [op=RealDiv]; "maskrcnn/tf.math.subtract_64/Sub/y" [op=Const]; "maskrcnn/tf.math.subtract_64/Sub" [op=Sub]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.cast_37/Cast" [op=Cast]; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack_1" [op=Const]; @@ -6696,24 +6758,26 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_33/truediv" [op=RealDiv]; "maskrcnn/tf.math.truediv_31/truediv/y" [op=Const]; "maskrcnn/tf.math.truediv_31/truediv" [op=RealDiv]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_96/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_94/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_92/AddV2" [op=AddV2]; @@ -23047,7 +23111,7 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p6-bn/FusedBatchNormV3"; "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p6-bn/FusedBatchNormV3"; -"maskrcnn/p6-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/FusedBatchNormV3" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p5-bn/ReadVariableOp/resource" -> "maskrcnn/p5-bn/ReadVariableOp"; "maskrcnn/p5-bn/ReadVariableOp" -> "maskrcnn/p5-bn/FusedBatchNormV3"; "maskrcnn/p5-bn/ReadVariableOp_1/resource" -> "maskrcnn/p5-bn/ReadVariableOp_1"; @@ -23056,7 +23120,7 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p5-bn/FusedBatchNormV3"; "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p5-bn/FusedBatchNormV3"; -"maskrcnn/p5-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/p5-bn/FusedBatchNormV3" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p4-bn/ReadVariableOp/resource" -> "maskrcnn/p4-bn/ReadVariableOp"; "maskrcnn/p4-bn/ReadVariableOp" -> "maskrcnn/p4-bn/FusedBatchNormV3"; "maskrcnn/p4-bn/ReadVariableOp_1/resource" -> "maskrcnn/p4-bn/ReadVariableOp_1"; @@ -23065,7 +23129,7 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p4-bn/FusedBatchNormV3"; "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p4-bn/FusedBatchNormV3"; -"maskrcnn/p4-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/p4-bn/FusedBatchNormV3" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p3-bn/ReadVariableOp/resource" -> "maskrcnn/p3-bn/ReadVariableOp"; "maskrcnn/p3-bn/ReadVariableOp" -> "maskrcnn/p3-bn/FusedBatchNormV3"; "maskrcnn/p3-bn/ReadVariableOp_1/resource" -> "maskrcnn/p3-bn/ReadVariableOp_1"; @@ -23074,7 +23138,7 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p3-bn/FusedBatchNormV3"; "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p3-bn/FusedBatchNormV3"; -"maskrcnn/p3-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/p3-bn/FusedBatchNormV3" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p2-bn/ReadVariableOp/resource" -> "maskrcnn/p2-bn/ReadVariableOp"; "maskrcnn/p2-bn/ReadVariableOp" -> "maskrcnn/p2-bn/FusedBatchNormV3"; "maskrcnn/p2-bn/ReadVariableOp_1/resource" -> "maskrcnn/p2-bn/ReadVariableOp_1"; @@ -23083,72 +23147,72 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p2-bn/FusedBatchNormV3"; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p2-bn/FusedBatchNormV3"; -"maskrcnn/p2-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_18/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_47/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_16/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_45/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_2"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_14/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_43/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_3"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_12/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_41/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_4"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_10/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_39/Reshape"; +"maskrcnn/p2-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/add" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_18/Reshape"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_47/Reshape"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/add" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_1"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_16/Reshape"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_45/Reshape"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/add" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_2"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_14/Reshape"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_43/Reshape"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/add" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_3"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_12/Reshape"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_41/Reshape"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/add" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_4"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_10/Reshape"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_39/Reshape"; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn/SymmQuant_1/Abs/ReadVariableOp"; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn/SymmQuant_2/Abs/ReadVariableOp"; @@ -24221,176 +24285,146 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.multiply_4/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.multiply_4/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_4/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.multiply_4/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_4/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract/Sub"; -"maskrcnn/tf.math.subtract_33/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_25/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_17/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_9/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_33/Sub" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_32/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_25/Sub" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_24/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_17/Sub" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_16/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_9/Sub" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_8/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem/strided_slice"; "maskrcnn/tf.__operators__.getitem/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem/strided_slice"; "maskrcnn/tf.__operators__.getitem/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem/strided_slice"; "maskrcnn/tf.__operators__.getitem/strided_slice" -> "maskrcnn/tf.expand_dims/ExpandDims"; -"maskrcnn/tf.math.subtract_1/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_1/Sub" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.expand_dims/ExpandDims/dim" -> "maskrcnn/tf.expand_dims/ExpandDims"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_4/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_3/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_2/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_1/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack/unstack"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_37/AddV2"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_38/AddV2"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_33/AddV2"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_34/AddV2"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_29/AddV2"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_30/AddV2"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_25/AddV2"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_26/AddV2"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_21/AddV2"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_22/AddV2"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_37/AddV2"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_38/AddV2"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_33/AddV2"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_34/AddV2"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_29/AddV2"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_30/AddV2"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_25/AddV2"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_26/AddV2"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_21/AddV2"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_22/AddV2"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; "maskrcnn/tf.__operators__.add_38/AddV2" -> "maskrcnn/tf.__operators__.add_38/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_37/AddV2" -> "maskrcnn/tf.__operators__.add_37/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_34/AddV2" -> "maskrcnn/tf.__operators__.add_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; @@ -24540,9 +24574,9 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.subtract_36/Sub/y" -> "maskrcnn/tf.math.subtract_36/Sub"; "maskrcnn/tf.math.subtract_36/Sub" -> "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_35/Sub/y" -> "maskrcnn/tf.math.subtract_35/Sub"; -"maskrcnn/tf.math.subtract_35/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_35/Sub" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_34/Sub/y" -> "maskrcnn/tf.math.subtract_34/Sub"; -"maskrcnn/tf.math.subtract_34/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_34/Sub" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_31/Sub/y" -> "maskrcnn/tf.math.subtract_31/Sub"; "maskrcnn/tf.math.subtract_31/Sub" -> "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_30/Sub/y" -> "maskrcnn/tf.math.subtract_30/Sub"; @@ -24552,9 +24586,9 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.subtract_28/Sub/y" -> "maskrcnn/tf.math.subtract_28/Sub"; "maskrcnn/tf.math.subtract_28/Sub" -> "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_27/Sub/y" -> "maskrcnn/tf.math.subtract_27/Sub"; -"maskrcnn/tf.math.subtract_27/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_27/Sub" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_26/Sub/y" -> "maskrcnn/tf.math.subtract_26/Sub"; -"maskrcnn/tf.math.subtract_26/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_26/Sub" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_23/Sub/y" -> "maskrcnn/tf.math.subtract_23/Sub"; "maskrcnn/tf.math.subtract_23/Sub" -> "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_22/Sub/y" -> "maskrcnn/tf.math.subtract_22/Sub"; @@ -24564,9 +24598,9 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.subtract_20/Sub/y" -> "maskrcnn/tf.math.subtract_20/Sub"; "maskrcnn/tf.math.subtract_20/Sub" -> "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_19/Sub/y" -> "maskrcnn/tf.math.subtract_19/Sub"; -"maskrcnn/tf.math.subtract_19/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_19/Sub" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_18/Sub/y" -> "maskrcnn/tf.math.subtract_18/Sub"; -"maskrcnn/tf.math.subtract_18/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_18/Sub" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_15/Sub/y" -> "maskrcnn/tf.math.subtract_15/Sub"; "maskrcnn/tf.math.subtract_15/Sub" -> "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_14/Sub/y" -> "maskrcnn/tf.math.subtract_14/Sub"; @@ -24576,9 +24610,9 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.subtract_12/Sub/y" -> "maskrcnn/tf.math.subtract_12/Sub"; "maskrcnn/tf.math.subtract_12/Sub" -> "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_11/Sub/y" -> "maskrcnn/tf.math.subtract_11/Sub"; -"maskrcnn/tf.math.subtract_11/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_11/Sub" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_10/Sub/y" -> "maskrcnn/tf.math.subtract_10/Sub"; -"maskrcnn/tf.math.subtract_10/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_10/Sub" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_7/Sub/y" -> "maskrcnn/tf.math.subtract_7/Sub"; "maskrcnn/tf.math.subtract_7/Sub" -> "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_6/Sub/y" -> "maskrcnn/tf.math.subtract_6/Sub"; @@ -24588,9 +24622,9 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.subtract_4/Sub/y" -> "maskrcnn/tf.math.subtract_4/Sub"; "maskrcnn/tf.math.subtract_4/Sub" -> "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_3/Sub/y" -> "maskrcnn/tf.math.subtract_3/Sub"; -"maskrcnn/tf.math.subtract_3/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_3/Sub" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_2/Sub/y" -> "maskrcnn/tf.math.subtract_2/Sub"; -"maskrcnn/tf.math.subtract_2/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_2/Sub" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/add"; @@ -24635,22 +24669,28 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_4/stack"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/add"; @@ -24695,22 +24735,28 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_3/stack"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/add"; @@ -24755,22 +24801,28 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_2/stack"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/add"; @@ -24815,22 +24867,28 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_1/stack"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/add"; @@ -24875,22 +24933,28 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack/stack"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; "maskrcnn/tf.concat_4/concat/axis" -> "maskrcnn/tf.concat_4/concat"; "maskrcnn/tf.concat_4/concat" -> "maskrcnn/tf.math.minimum_14/Minimum"; "maskrcnn/tf.stack_4/stack" -> "maskrcnn/tf.math.minimum_14/Minimum"; @@ -25101,31 +25165,31 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression/max_total_size" -> "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression/max_total_size" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression/max_total_size" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression/max_total_size" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression/max_total_size" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.concat_6/concat/axis" -> "maskrcnn/tf.concat_6/concat"; "maskrcnn/tf.concat_6/concat" -> "maskrcnn/tf.math.top_k/TopKV2"; "maskrcnn/tf.math.top_k/TopKV2/k" -> "maskrcnn/tf.math.top_k/TopKV2"; @@ -25154,61 +25218,61 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.ones/ones/Const" -> "maskrcnn/tf.ones/ones"; "maskrcnn/tf.ones/ones" -> "maskrcnn/tf.math.multiply_30/Mul"; "maskrcnn/tf.math.multiply_30/Mul" -> "maskrcnn/tf.stack_5/stack"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; "maskrcnn/tf.concat_5/concat/axis" -> "maskrcnn/tf.concat_5/concat"; "maskrcnn/tf.concat_5/concat" -> "maskrcnn/tf.compat.v1.gather_nd/GatherNd"; "maskrcnn/tf.stack_5/stack" -> "maskrcnn/tf.compat.v1.gather_nd/GatherNd"; @@ -25348,46 +25412,46 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.pow/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_3/truediv"; "maskrcnn/tf.expand_dims_12/ExpandDims/dim" -> "maskrcnn/tf.expand_dims_12/ExpandDims"; "maskrcnn/tf.expand_dims_12/ExpandDims" -> "maskrcnn/tf.math.truediv_1/truediv"; -"maskrcnn/tf.math.truediv_2/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_3/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_2/truediv" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_3/truediv" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_1/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_42/Sub/y" -> "maskrcnn/tf.math.subtract_42/Sub"; "maskrcnn/tf.math.subtract_42/Sub" -> "maskrcnn/tf.cast_23/Cast"; "maskrcnn/tf.math.subtract_42/Sub" -> "maskrcnn/tf.compat.v1.gather_1/GatherV2"; "maskrcnn/tf.math.subtract_42/Sub" -> "maskrcnn/tf.compat.v1.gather/GatherV2"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_14/ExpandDims"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_13/ExpandDims"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_14/ExpandDims"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_13/ExpandDims"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; "maskrcnn/tf.cast_23/Cast" -> "maskrcnn/tf.cast_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; @@ -25736,9 +25800,9 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.multiply_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.multiply_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_6/truediv"; "maskrcnn/tf.math.subtract_44/Sub/y" -> "maskrcnn/tf.math.subtract_44/Sub"; -"maskrcnn/tf.math.subtract_44/Sub" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/Sub" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_43/Sub/y" -> "maskrcnn/tf.math.subtract_43/Sub"; -"maskrcnn/tf.math.subtract_43/Sub" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_43/Sub" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_54/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_54/strided_slice"; "maskrcnn/tf.__operators__.getitem_54/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_54/strided_slice"; "maskrcnn/tf.__operators__.getitem_54/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_54/strided_slice"; @@ -25858,28 +25922,28 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.truediv_8/truediv" -> "maskrcnn/tf.math.truediv_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.truediv_6/truediv/y" -> "maskrcnn/tf.math.truediv_6/truediv"; "maskrcnn/tf.math.truediv_6/truediv" -> "maskrcnn/tf.math.truediv_6/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_16/ExpandDims"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_15/ExpandDims"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_16/ExpandDims"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_15/ExpandDims"; "maskrcnn/tf.__operators__.add_53/AddV2" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_51/AddV2" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_49/AddV2" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; @@ -26971,42 +27035,36 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.multiply_57/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.multiply_57/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_57/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.multiply_57/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_57/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract_53/Sub"; -"maskrcnn/tf.math.subtract_54/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_54/Sub" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_53/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_60/strided_slice"; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_60/strided_slice"; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_60/strided_slice"; "maskrcnn/tf.__operators__.getitem_60/strided_slice" -> "maskrcnn/tf.unstack_5/unstack"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_65/AddV2"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_66/AddV2"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_65/AddV2"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_66/AddV2"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; "maskrcnn/tf.__operators__.add_66/AddV2" -> "maskrcnn/tf.__operators__.add_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_65/AddV2" -> "maskrcnn/tf.__operators__.add_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.unstack_5/unstack" -> "maskrcnn/tf.math.subtract_60/Sub"; @@ -27044,9 +27102,9 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.subtract_57/Sub/y" -> "maskrcnn/tf.math.subtract_57/Sub"; "maskrcnn/tf.math.subtract_57/Sub" -> "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_56/Sub/y" -> "maskrcnn/tf.math.subtract_56/Sub"; -"maskrcnn/tf.math.subtract_56/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_56/Sub" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_55/Sub/y" -> "maskrcnn/tf.math.subtract_55/Sub"; -"maskrcnn/tf.math.subtract_55/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_55/Sub" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/add"; @@ -27091,22 +27149,28 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_15/stack"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; "maskrcnn/tf.concat_13/concat/axis" -> "maskrcnn/tf.concat_13/concat"; "maskrcnn/tf.concat_13/concat" -> "maskrcnn/tf.math.minimum_22/Minimum"; "maskrcnn/tf.stack_15/stack" -> "maskrcnn/tf.math.minimum_22/Minimum"; @@ -27325,46 +27389,46 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.pow_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_28/truediv"; "maskrcnn/tf.expand_dims_24/ExpandDims/dim" -> "maskrcnn/tf.expand_dims_24/ExpandDims"; "maskrcnn/tf.expand_dims_24/ExpandDims" -> "maskrcnn/tf.math.truediv_26/truediv"; -"maskrcnn/tf.math.truediv_27/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_28/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_27/truediv" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_28/truediv" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_26/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_64/Sub/y" -> "maskrcnn/tf.math.subtract_64/Sub"; "maskrcnn/tf.math.subtract_64/Sub" -> "maskrcnn/tf.cast_37/Cast"; "maskrcnn/tf.math.subtract_64/Sub" -> "maskrcnn/tf.compat.v1.gather_4/GatherV2"; "maskrcnn/tf.math.subtract_64/Sub" -> "maskrcnn/tf.compat.v1.gather_3/GatherV2"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_26/ExpandDims"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_25/ExpandDims"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_26/ExpandDims"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_25/ExpandDims"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; "maskrcnn/tf.cast_37/Cast" -> "maskrcnn/tf.cast_37/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; @@ -27993,9 +28057,9 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.multiply_61/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.multiply_61/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_61/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_31/truediv"; "maskrcnn/tf.math.subtract_66/Sub/y" -> "maskrcnn/tf.math.subtract_66/Sub"; -"maskrcnn/tf.math.subtract_66/Sub" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/Sub" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_65/Sub/y" -> "maskrcnn/tf.math.subtract_65/Sub"; -"maskrcnn/tf.math.subtract_65/Sub" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_65/Sub" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_131/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_131/strided_slice"; "maskrcnn/tf.__operators__.getitem_131/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_131/strided_slice"; "maskrcnn/tf.__operators__.getitem_131/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_131/strided_slice"; @@ -28234,28 +28298,28 @@ self_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" "maskrcnn/tf.math.truediv_33/truediv" -> "maskrcnn/tf.math.truediv_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.truediv_31/truediv/y" -> "maskrcnn/tf.math.truediv_31/truediv"; "maskrcnn/tf.math.truediv_31/truediv" -> "maskrcnn/tf.math.truediv_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_28/ExpandDims"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_27/ExpandDims"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_28/ExpandDims"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_27/ExpandDims"; "maskrcnn/tf.__operators__.add_96/AddV2" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_94/AddV2" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_92/AddV2" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; diff --git a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/retinanet.pb b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/retinanet.pb index 933bef9928d..c1f32dabace 100644 --- a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/retinanet.pb +++ b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/retinanet.pb @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:16fc75e88a32b0f8cc863de41eb52f9f3462564c02603f9fe77b07491573de66 -size 1409182 +oid sha256:40d4ddb6534e0876183abebe2c031d280a24eefa95f2c4a0914461ff4bc1307b +size 1413729 diff --git a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/retinanet_quantize_outputs.pb b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/retinanet_quantize_outputs.pb index 15cef0374d5..1765222fe46 100644 --- a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/retinanet_quantize_outputs.pb +++ b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_ch_a_asym_t/retinanet_quantize_outputs.pb @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:af28ebbc39298c3c2187a6900c2e35cadee2d9c3ba89faf2442519d5e0b93707 -size 1452303 +oid sha256:dbb94add5242244fc8a44adbf2bc7c2aa6af2a6c9a6c602979c4cd93647a2087 +size 1456850 diff --git a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/densenet121.pb b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/densenet121.pb index fd60c20290d..5d8221d8287 100644 --- a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/densenet121.pb +++ b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/densenet121.pb @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:bc211bf3663a28e16d6f56441f323aec60e19cf4267877ac2f3c4aecf245d02c -size 983951 +oid sha256:f4e71d24c330cff878c923e62c63cf33403418b30d40f7fb48b97d1240cdc6c3 +size 985729 diff --git a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/inception_v3.dot index ed7680de599..613caefbb22 100644 --- a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/inception_v3.dot +++ b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/inception_v3.dot @@ -2755,18 +2755,6 @@ self [op=Placeholder]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/conv2d_87/Conv2D" [op=Conv2D]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; -"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; -"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_93/SymmQuant/Abs" [op=Abs]; @@ -2811,14 +2799,18 @@ self [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "inception_v3/batch_normalization_87/FusedBatchNormV3" [op=FusedBatchNormV3]; -"inception_v3/batch_normalization_85/scale" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; +"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; +"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/batch_normalization_93/scale" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp" [op=ReadVariableOp]; @@ -2831,56 +2823,74 @@ self [op=Placeholder]; "inception_v3/activation_91/Relu" [op=Relu]; "inception_v3/activation_88/Relu" [op=Relu]; "inception_v3/activation_87/Relu" [op=Relu]; -"inception_v3/activation_85/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/batch_normalization_85/scale" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; "inception_v3/activation_93/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul" [op=Mul]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_91/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/SymmQuant/Abs" [op=Abs]; +"inception_v3/activation_91/fake_quantize/SymmQuant/add/y" [op=Const]; +"inception_v3/activation_91/fake_quantize/SymmQuant/add" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/SymmQuant/mul" [op=Mul]; +"inception_v3/activation_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_92/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/SymmQuant/Abs" [op=Abs]; +"inception_v3/activation_92/fake_quantize/SymmQuant/add/y" [op=Const]; +"inception_v3/activation_92/fake_quantize/SymmQuant/add" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/SymmQuant/mul" [op=Mul]; +"inception_v3/activation_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_87/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/SymmQuant/Abs" [op=Abs]; +"inception_v3/activation_87/fake_quantize/SymmQuant/add/y" [op=Const]; +"inception_v3/activation_87/fake_quantize/SymmQuant/add" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/SymmQuant/mul" [op=Mul]; +"inception_v3/activation_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_88/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/SymmQuant/Abs" [op=Abs]; +"inception_v3/activation_88/fake_quantize/SymmQuant/add/y" [op=Const]; +"inception_v3/activation_88/fake_quantize/SymmQuant/add" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/SymmQuant/mul" [op=Mul]; +"inception_v3/activation_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_85/Relu" [op=Relu]; +"inception_v3/activation_85/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/SymmQuant/Abs" [op=Abs]; +"inception_v3/activation_85/fake_quantize/SymmQuant/add/y" [op=Const]; +"inception_v3/activation_85/fake_quantize/SymmQuant/add" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/SymmQuant/mul" [op=Mul]; +"inception_v3/activation_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed9_1/concat/axis" [op=Const]; "inception_v3/mixed9_1/concat" [op=ConcatV2]; "inception_v3/concatenate_1/concat/axis" [op=Const]; "inception_v3/concatenate_1/concat" [op=ConcatV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul" [op=Mul]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_93/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/SymmQuant/Abs" [op=Abs]; +"inception_v3/activation_93/fake_quantize/SymmQuant/add/y" [op=Const]; +"inception_v3/activation_93/fake_quantize/SymmQuant/add" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/SymmQuant/mul" [op=Mul]; +"inception_v3/activation_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed10/concat/axis" [op=Const]; "inception_v3/mixed10/concat" [op=ConcatV2]; "inception_v3/avg_pool/Mean/reduction_indices" [op=Const]; @@ -5952,19 +5962,6 @@ self -> "inception_v3/input_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVars"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/conv2d_87/Conv2D"; "inception_v3/conv2d_87/Conv2D" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; -"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/conv2d_85/Conv2D"; -"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_93/SymmQuant/Abs"; "inception_v3/conv2d_93/SymmQuant/Abs" -> "inception_v3/conv2d_93/SymmQuant/add"; @@ -6010,14 +6007,19 @@ self -> "inception_v3/input_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; "inception_v3/batch_normalization_87/FusedBatchNormV3" -> "inception_v3/activation_87/Relu"; -"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; -"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; +"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/conv2d_85/Conv2D"; +"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/batch_normalization_93/scale" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/ReadVariableOp/resource" -> "inception_v3/batch_normalization_93/ReadVariableOp"; "inception_v3/batch_normalization_93/ReadVariableOp" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; @@ -6026,76 +6028,84 @@ self -> "inception_v3/input_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/FusedBatchNormV3" -> "inception_v3/activation_93/Relu"; -"inception_v3/activation_92/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_91/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_88/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_85/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_93/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_92/Relu" -> "inception_v3/activation_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/Relu" -> "inception_v3/activation_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/Relu" -> "inception_v3/activation_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; +"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/activation_93/Relu" -> "inception_v3/activation_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/SymmQuant/Abs"; +"inception_v3/activation_91/fake_quantize/SymmQuant/Abs" -> "inception_v3/activation_91/fake_quantize/SymmQuant/add"; +"inception_v3/activation_91/fake_quantize/SymmQuant/add/y" -> "inception_v3/activation_91/fake_quantize/SymmQuant/add"; +"inception_v3/activation_91/fake_quantize/SymmQuant/add" -> "inception_v3/activation_91/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_91/fake_quantize/SymmQuant/add" -> "inception_v3/activation_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_91/fake_quantize/SymmQuant/mul" -> "inception_v3/activation_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_92/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/SymmQuant/Abs"; +"inception_v3/activation_92/fake_quantize/SymmQuant/Abs" -> "inception_v3/activation_92/fake_quantize/SymmQuant/add"; +"inception_v3/activation_92/fake_quantize/SymmQuant/add/y" -> "inception_v3/activation_92/fake_quantize/SymmQuant/add"; +"inception_v3/activation_92/fake_quantize/SymmQuant/add" -> "inception_v3/activation_92/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_92/fake_quantize/SymmQuant/add" -> "inception_v3/activation_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_92/fake_quantize/SymmQuant/mul" -> "inception_v3/activation_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_87/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/SymmQuant/Abs"; +"inception_v3/activation_87/fake_quantize/SymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/SymmQuant/add"; +"inception_v3/activation_87/fake_quantize/SymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/SymmQuant/add"; +"inception_v3/activation_87/fake_quantize/SymmQuant/add" -> "inception_v3/activation_87/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_87/fake_quantize/SymmQuant/add" -> "inception_v3/activation_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_87/fake_quantize/SymmQuant/mul" -> "inception_v3/activation_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_88/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/SymmQuant/Abs"; +"inception_v3/activation_88/fake_quantize/SymmQuant/Abs" -> "inception_v3/activation_88/fake_quantize/SymmQuant/add"; +"inception_v3/activation_88/fake_quantize/SymmQuant/add/y" -> "inception_v3/activation_88/fake_quantize/SymmQuant/add"; +"inception_v3/activation_88/fake_quantize/SymmQuant/add" -> "inception_v3/activation_88/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_88/fake_quantize/SymmQuant/add" -> "inception_v3/activation_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_88/fake_quantize/SymmQuant/mul" -> "inception_v3/activation_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_85/Relu" -> "inception_v3/activation_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/SymmQuant/Abs"; +"inception_v3/activation_85/fake_quantize/SymmQuant/Abs" -> "inception_v3/activation_85/fake_quantize/SymmQuant/add"; +"inception_v3/activation_85/fake_quantize/SymmQuant/add/y" -> "inception_v3/activation_85/fake_quantize/SymmQuant/add"; +"inception_v3/activation_85/fake_quantize/SymmQuant/add" -> "inception_v3/activation_85/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_85/fake_quantize/SymmQuant/add" -> "inception_v3/activation_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_85/fake_quantize/SymmQuant/mul" -> "inception_v3/activation_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed9_1/concat/axis" -> "inception_v3/mixed9_1/concat"; "inception_v3/mixed9_1/concat" -> "inception_v3/mixed10/concat"; "inception_v3/concatenate_1/concat/axis" -> "inception_v3/concatenate_1/concat"; "inception_v3/concatenate_1/concat" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; +"inception_v3/activation_93/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/SymmQuant/Abs"; +"inception_v3/activation_93/fake_quantize/SymmQuant/Abs" -> "inception_v3/activation_93/fake_quantize/SymmQuant/add"; +"inception_v3/activation_93/fake_quantize/SymmQuant/add/y" -> "inception_v3/activation_93/fake_quantize/SymmQuant/add"; +"inception_v3/activation_93/fake_quantize/SymmQuant/add" -> "inception_v3/activation_93/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_93/fake_quantize/SymmQuant/add" -> "inception_v3/activation_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_93/fake_quantize/SymmQuant/mul" -> "inception_v3/activation_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat/axis" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat" -> "inception_v3/avg_pool/Mean"; "inception_v3/avg_pool/Mean/reduction_indices" -> "inception_v3/avg_pool/Mean"; diff --git a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/mask_rcnn.dot b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/mask_rcnn.dot index 981632bd79b..5a6b48b69a4 100644 --- a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/mask_rcnn.dot +++ b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/mask_rcnn.dot @@ -2136,43 +2136,51 @@ self_1 [op=Placeholder]; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "maskrcnn/p2-bn/FusedBatchNormV3" [op=FusedBatchNormV3]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul" [op=Mul]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/rpn/SymmQuant/Abs" [op=Abs]; @@ -3061,86 +3069,96 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.subtract/Sub" [op=Sub]; "maskrcnn/tf.expand_dims/ExpandDims/dim" [op=Const]; "maskrcnn/tf.expand_dims/ExpandDims" [op=ExpandDims]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_38/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_37/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_34/AddV2" [op=AddV2]; @@ -3342,20 +3360,24 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs" [op=Abs]; @@ -3392,20 +3414,24 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs" [op=Abs]; @@ -3442,20 +3468,24 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs" [op=Abs]; @@ -3492,20 +3522,24 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs" [op=Abs]; @@ -3542,20 +3576,24 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.concat_4/concat/axis" [op=Const]; "maskrcnn/tf.concat_4/concat" [op=ConcatV2]; "maskrcnn/tf.stack_4/stack" [op=Pack]; @@ -3776,43 +3814,51 @@ self_1 [op=Placeholder]; "maskrcnn/tf.ones/ones/Const" [op=Const]; "maskrcnn/tf.ones/ones" [op=Fill]; "maskrcnn/tf.math.multiply_30/Mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.concat_5/concat/axis" [op=Const]; "maskrcnn/tf.concat_5/concat" [op=ConcatV2]; "maskrcnn/tf.stack_5/stack" [op=Pack]; @@ -3931,29 +3977,33 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_1/truediv" [op=RealDiv]; "maskrcnn/tf.math.subtract_42/Sub/y" [op=Const]; "maskrcnn/tf.math.subtract_42/Sub" [op=Sub]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.cast_23/Cast" [op=Cast]; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack_1" [op=Const]; @@ -4346,22 +4396,24 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_8/truediv" [op=RealDiv]; "maskrcnn/tf.math.truediv_6/truediv/y" [op=Const]; "maskrcnn/tf.math.truediv_6/truediv" [op=RealDiv]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_53/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_51/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_49/AddV2" [op=AddV2]; @@ -5292,22 +5344,24 @@ self_1 [op=Placeholder]; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_1" [op=Const]; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_2" [op=Const]; "maskrcnn/tf.__operators__.getitem_60/strided_slice" [op=StridedSlice]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_66/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_65/AddV2" [op=AddV2]; "maskrcnn/tf.unstack_5/unstack" [op=Unpack]; @@ -5377,20 +5431,24 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.concat_13/concat/axis" [op=Const]; "maskrcnn/tf.concat_13/concat" [op=ConcatV2]; "maskrcnn/tf.stack_15/stack" [op=Pack]; @@ -5573,29 +5631,33 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_26/truediv" [op=RealDiv]; "maskrcnn/tf.math.subtract_64/Sub/y" [op=Const]; "maskrcnn/tf.math.subtract_64/Sub" [op=Sub]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.cast_37/Cast" [op=Cast]; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack_1" [op=Const]; @@ -6317,22 +6379,24 @@ self_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_33/truediv" [op=RealDiv]; "maskrcnn/tf.math.truediv_31/truediv/y" [op=Const]; "maskrcnn/tf.math.truediv_31/truediv" [op=RealDiv]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_96/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_94/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_92/AddV2" [op=AddV2]; @@ -22518,7 +22582,7 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p6-bn/FusedBatchNormV3"; "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p6-bn/FusedBatchNormV3"; -"maskrcnn/p6-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/FusedBatchNormV3" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p5-bn/ReadVariableOp/resource" -> "maskrcnn/p5-bn/ReadVariableOp"; "maskrcnn/p5-bn/ReadVariableOp" -> "maskrcnn/p5-bn/FusedBatchNormV3"; "maskrcnn/p5-bn/ReadVariableOp_1/resource" -> "maskrcnn/p5-bn/ReadVariableOp_1"; @@ -22527,7 +22591,7 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p5-bn/FusedBatchNormV3"; "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p5-bn/FusedBatchNormV3"; -"maskrcnn/p5-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/p5-bn/FusedBatchNormV3" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p4-bn/ReadVariableOp/resource" -> "maskrcnn/p4-bn/ReadVariableOp"; "maskrcnn/p4-bn/ReadVariableOp" -> "maskrcnn/p4-bn/FusedBatchNormV3"; "maskrcnn/p4-bn/ReadVariableOp_1/resource" -> "maskrcnn/p4-bn/ReadVariableOp_1"; @@ -22536,7 +22600,7 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p4-bn/FusedBatchNormV3"; "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p4-bn/FusedBatchNormV3"; -"maskrcnn/p4-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/p4-bn/FusedBatchNormV3" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p3-bn/ReadVariableOp/resource" -> "maskrcnn/p3-bn/ReadVariableOp"; "maskrcnn/p3-bn/ReadVariableOp" -> "maskrcnn/p3-bn/FusedBatchNormV3"; "maskrcnn/p3-bn/ReadVariableOp_1/resource" -> "maskrcnn/p3-bn/ReadVariableOp_1"; @@ -22545,7 +22609,7 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p3-bn/FusedBatchNormV3"; "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p3-bn/FusedBatchNormV3"; -"maskrcnn/p3-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/p3-bn/FusedBatchNormV3" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p2-bn/ReadVariableOp/resource" -> "maskrcnn/p2-bn/ReadVariableOp"; "maskrcnn/p2-bn/ReadVariableOp" -> "maskrcnn/p2-bn/FusedBatchNormV3"; "maskrcnn/p2-bn/ReadVariableOp_1/resource" -> "maskrcnn/p2-bn/ReadVariableOp_1"; @@ -22554,67 +22618,67 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p2-bn/FusedBatchNormV3"; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p2-bn/FusedBatchNormV3"; -"maskrcnn/p2-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_18/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_47/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_16/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_45/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_2"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_14/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_43/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_3"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_12/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_41/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_4"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_10/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_39/Reshape"; +"maskrcnn/p2-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/add/y" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/mul" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_18/Reshape"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_47/Reshape"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/add/y" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/mul" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_1"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_16/Reshape"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_45/Reshape"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/add/y" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/mul" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_2"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_14/Reshape"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_43/Reshape"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/add/y" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/mul" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_3"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_12/Reshape"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_41/Reshape"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/add/y" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/mul" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_4"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_10/Reshape"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_39/Reshape"; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn/SymmQuant_1/Abs/ReadVariableOp"; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn/SymmQuant_2/Abs/ReadVariableOp"; @@ -23626,156 +23690,136 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_4/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.multiply_4/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.multiply_4/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.multiply_4/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_4/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract/Sub"; -"maskrcnn/tf.math.subtract_33/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_25/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_17/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_9/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_33/Sub" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_32/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_25/Sub" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_24/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_17/Sub" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_16/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_9/Sub" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_8/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem/strided_slice"; "maskrcnn/tf.__operators__.getitem/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem/strided_slice"; "maskrcnn/tf.__operators__.getitem/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem/strided_slice"; "maskrcnn/tf.__operators__.getitem/strided_slice" -> "maskrcnn/tf.expand_dims/ExpandDims"; -"maskrcnn/tf.math.subtract_1/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_1/Sub" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.expand_dims/ExpandDims/dim" -> "maskrcnn/tf.expand_dims/ExpandDims"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_4/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_3/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_2/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_1/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack/unstack"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_37/AddV2"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_38/AddV2"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_33/AddV2"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_34/AddV2"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_29/AddV2"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_30/AddV2"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_25/AddV2"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_26/AddV2"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_21/AddV2"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_22/AddV2"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_37/AddV2"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_38/AddV2"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_33/AddV2"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_34/AddV2"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_29/AddV2"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_30/AddV2"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_25/AddV2"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_26/AddV2"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_21/AddV2"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_22/AddV2"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; "maskrcnn/tf.__operators__.add_38/AddV2" -> "maskrcnn/tf.__operators__.add_38/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_37/AddV2" -> "maskrcnn/tf.__operators__.add_37/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_34/AddV2" -> "maskrcnn/tf.__operators__.add_34/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; @@ -23915,9 +23959,9 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_36/Sub/y" -> "maskrcnn/tf.math.subtract_36/Sub"; "maskrcnn/tf.math.subtract_36/Sub" -> "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_35/Sub/y" -> "maskrcnn/tf.math.subtract_35/Sub"; -"maskrcnn/tf.math.subtract_35/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_35/Sub" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_34/Sub/y" -> "maskrcnn/tf.math.subtract_34/Sub"; -"maskrcnn/tf.math.subtract_34/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_34/Sub" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_31/Sub/y" -> "maskrcnn/tf.math.subtract_31/Sub"; "maskrcnn/tf.math.subtract_31/Sub" -> "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_30/Sub/y" -> "maskrcnn/tf.math.subtract_30/Sub"; @@ -23927,9 +23971,9 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_28/Sub/y" -> "maskrcnn/tf.math.subtract_28/Sub"; "maskrcnn/tf.math.subtract_28/Sub" -> "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_27/Sub/y" -> "maskrcnn/tf.math.subtract_27/Sub"; -"maskrcnn/tf.math.subtract_27/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_27/Sub" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_26/Sub/y" -> "maskrcnn/tf.math.subtract_26/Sub"; -"maskrcnn/tf.math.subtract_26/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_26/Sub" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_23/Sub/y" -> "maskrcnn/tf.math.subtract_23/Sub"; "maskrcnn/tf.math.subtract_23/Sub" -> "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_22/Sub/y" -> "maskrcnn/tf.math.subtract_22/Sub"; @@ -23939,9 +23983,9 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_20/Sub/y" -> "maskrcnn/tf.math.subtract_20/Sub"; "maskrcnn/tf.math.subtract_20/Sub" -> "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_19/Sub/y" -> "maskrcnn/tf.math.subtract_19/Sub"; -"maskrcnn/tf.math.subtract_19/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_19/Sub" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_18/Sub/y" -> "maskrcnn/tf.math.subtract_18/Sub"; -"maskrcnn/tf.math.subtract_18/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_18/Sub" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_15/Sub/y" -> "maskrcnn/tf.math.subtract_15/Sub"; "maskrcnn/tf.math.subtract_15/Sub" -> "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_14/Sub/y" -> "maskrcnn/tf.math.subtract_14/Sub"; @@ -23951,9 +23995,9 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_12/Sub/y" -> "maskrcnn/tf.math.subtract_12/Sub"; "maskrcnn/tf.math.subtract_12/Sub" -> "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_11/Sub/y" -> "maskrcnn/tf.math.subtract_11/Sub"; -"maskrcnn/tf.math.subtract_11/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_11/Sub" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_10/Sub/y" -> "maskrcnn/tf.math.subtract_10/Sub"; -"maskrcnn/tf.math.subtract_10/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_10/Sub" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_7/Sub/y" -> "maskrcnn/tf.math.subtract_7/Sub"; "maskrcnn/tf.math.subtract_7/Sub" -> "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_6/Sub/y" -> "maskrcnn/tf.math.subtract_6/Sub"; @@ -23963,9 +24007,9 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_4/Sub/y" -> "maskrcnn/tf.math.subtract_4/Sub"; "maskrcnn/tf.math.subtract_4/Sub" -> "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_3/Sub/y" -> "maskrcnn/tf.math.subtract_3/Sub"; -"maskrcnn/tf.math.subtract_3/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_3/Sub" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_2/Sub/y" -> "maskrcnn/tf.math.subtract_2/Sub"; -"maskrcnn/tf.math.subtract_2/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_2/Sub" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/add"; @@ -24006,22 +24050,26 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_4/stack"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/add"; @@ -24062,22 +24110,26 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_3/stack"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/add"; @@ -24118,22 +24170,26 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_2/stack"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/add"; @@ -24174,22 +24230,26 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_1/stack"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/add"; @@ -24230,22 +24290,26 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack/stack"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; "maskrcnn/tf.concat_4/concat/axis" -> "maskrcnn/tf.concat_4/concat"; "maskrcnn/tf.concat_4/concat" -> "maskrcnn/tf.math.minimum_14/Minimum"; "maskrcnn/tf.stack_4/stack" -> "maskrcnn/tf.math.minimum_14/Minimum"; @@ -24451,31 +24515,31 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression/max_total_size" -> "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression/max_total_size" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression/max_total_size" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression/max_total_size" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression/max_total_size" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.concat_6/concat/axis" -> "maskrcnn/tf.concat_6/concat"; "maskrcnn/tf.concat_6/concat" -> "maskrcnn/tf.math.top_k/TopKV2"; "maskrcnn/tf.math.top_k/TopKV2/k" -> "maskrcnn/tf.math.top_k/TopKV2"; @@ -24504,56 +24568,56 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.ones/ones/Const" -> "maskrcnn/tf.ones/ones"; "maskrcnn/tf.ones/ones" -> "maskrcnn/tf.math.multiply_30/Mul"; "maskrcnn/tf.math.multiply_30/Mul" -> "maskrcnn/tf.stack_5/stack"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; "maskrcnn/tf.concat_5/concat/axis" -> "maskrcnn/tf.concat_5/concat"; "maskrcnn/tf.concat_5/concat" -> "maskrcnn/tf.compat.v1.gather_nd/GatherNd"; "maskrcnn/tf.stack_5/stack" -> "maskrcnn/tf.compat.v1.gather_nd/GatherNd"; @@ -24685,43 +24749,43 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.pow/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_3/truediv"; "maskrcnn/tf.expand_dims_12/ExpandDims/dim" -> "maskrcnn/tf.expand_dims_12/ExpandDims"; "maskrcnn/tf.expand_dims_12/ExpandDims" -> "maskrcnn/tf.math.truediv_1/truediv"; -"maskrcnn/tf.math.truediv_2/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_3/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_2/truediv" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_3/truediv" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_1/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_42/Sub/y" -> "maskrcnn/tf.math.subtract_42/Sub"; "maskrcnn/tf.math.subtract_42/Sub" -> "maskrcnn/tf.cast_23/Cast"; "maskrcnn/tf.math.subtract_42/Sub" -> "maskrcnn/tf.compat.v1.gather_1/GatherV2"; "maskrcnn/tf.math.subtract_42/Sub" -> "maskrcnn/tf.compat.v1.gather/GatherV2"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_14/ExpandDims"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_13/ExpandDims"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_14/ExpandDims"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_13/ExpandDims"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; "maskrcnn/tf.cast_23/Cast" -> "maskrcnn/tf.cast_23/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; @@ -25052,9 +25116,9 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_32/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.multiply_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_6/truediv"; "maskrcnn/tf.math.subtract_44/Sub/y" -> "maskrcnn/tf.math.subtract_44/Sub"; -"maskrcnn/tf.math.subtract_44/Sub" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/Sub" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_43/Sub/y" -> "maskrcnn/tf.math.subtract_43/Sub"; -"maskrcnn/tf.math.subtract_43/Sub" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_43/Sub" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_54/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_54/strided_slice"; "maskrcnn/tf.__operators__.getitem_54/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_54/strided_slice"; "maskrcnn/tf.__operators__.getitem_54/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_54/strided_slice"; @@ -25167,26 +25231,26 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.truediv_8/truediv" -> "maskrcnn/tf.math.truediv_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.truediv_6/truediv/y" -> "maskrcnn/tf.math.truediv_6/truediv"; "maskrcnn/tf.math.truediv_6/truediv" -> "maskrcnn/tf.math.truediv_6/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_16/ExpandDims"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_15/ExpandDims"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_16/ExpandDims"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_15/ExpandDims"; "maskrcnn/tf.__operators__.add_53/AddV2" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_51/AddV2" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_49/AddV2" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; @@ -26216,38 +26280,34 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_57/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.multiply_57/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.multiply_57/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.multiply_57/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_57/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract_53/Sub"; -"maskrcnn/tf.math.subtract_54/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_54/Sub" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_53/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_60/strided_slice"; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_60/strided_slice"; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_60/strided_slice"; "maskrcnn/tf.__operators__.getitem_60/strided_slice" -> "maskrcnn/tf.unstack_5/unstack"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_65/AddV2"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_66/AddV2"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_65/AddV2"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_66/AddV2"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; "maskrcnn/tf.__operators__.add_66/AddV2" -> "maskrcnn/tf.__operators__.add_66/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_65/AddV2" -> "maskrcnn/tf.__operators__.add_65/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.unstack_5/unstack" -> "maskrcnn/tf.math.subtract_60/Sub"; @@ -26283,9 +26343,9 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_57/Sub/y" -> "maskrcnn/tf.math.subtract_57/Sub"; "maskrcnn/tf.math.subtract_57/Sub" -> "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_56/Sub/y" -> "maskrcnn/tf.math.subtract_56/Sub"; -"maskrcnn/tf.math.subtract_56/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_56/Sub" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_55/Sub/y" -> "maskrcnn/tf.math.subtract_55/Sub"; -"maskrcnn/tf.math.subtract_55/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_55/Sub" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/add"; @@ -26326,22 +26386,26 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_15/stack"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; "maskrcnn/tf.concat_13/concat/axis" -> "maskrcnn/tf.concat_13/concat"; "maskrcnn/tf.concat_13/concat" -> "maskrcnn/tf.math.minimum_22/Minimum"; "maskrcnn/tf.stack_15/stack" -> "maskrcnn/tf.math.minimum_22/Minimum"; @@ -26547,43 +26611,43 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.pow_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_28/truediv"; "maskrcnn/tf.expand_dims_24/ExpandDims/dim" -> "maskrcnn/tf.expand_dims_24/ExpandDims"; "maskrcnn/tf.expand_dims_24/ExpandDims" -> "maskrcnn/tf.math.truediv_26/truediv"; -"maskrcnn/tf.math.truediv_27/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_28/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_27/truediv" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_28/truediv" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_26/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_64/Sub/y" -> "maskrcnn/tf.math.subtract_64/Sub"; "maskrcnn/tf.math.subtract_64/Sub" -> "maskrcnn/tf.cast_37/Cast"; "maskrcnn/tf.math.subtract_64/Sub" -> "maskrcnn/tf.compat.v1.gather_4/GatherV2"; "maskrcnn/tf.math.subtract_64/Sub" -> "maskrcnn/tf.compat.v1.gather_3/GatherV2"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_26/ExpandDims"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_25/ExpandDims"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_26/ExpandDims"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_25/ExpandDims"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; "maskrcnn/tf.cast_37/Cast" -> "maskrcnn/tf.cast_37/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; @@ -27180,9 +27244,9 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_61/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.multiply_61/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_61/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_31/truediv"; "maskrcnn/tf.math.subtract_66/Sub/y" -> "maskrcnn/tf.math.subtract_66/Sub"; -"maskrcnn/tf.math.subtract_66/Sub" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/Sub" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_65/Sub/y" -> "maskrcnn/tf.math.subtract_65/Sub"; -"maskrcnn/tf.math.subtract_65/Sub" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_65/Sub" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_131/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_131/strided_slice"; "maskrcnn/tf.__operators__.getitem_131/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_131/strided_slice"; "maskrcnn/tf.__operators__.getitem_131/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_131/strided_slice"; @@ -27407,26 +27471,26 @@ self_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.truediv_33/truediv" -> "maskrcnn/tf.math.truediv_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.truediv_31/truediv/y" -> "maskrcnn/tf.math.truediv_31/truediv"; "maskrcnn/tf.math.truediv_31/truediv" -> "maskrcnn/tf.math.truediv_31/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_28/ExpandDims"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_27/ExpandDims"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_28/ExpandDims"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_27/ExpandDims"; "maskrcnn/tf.__operators__.add_96/AddV2" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_94/AddV2" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_92/AddV2" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; diff --git a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/retinanet.pb b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/retinanet.pb index 5fdb7fd5fdf..c461b6162d2 100644 --- a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/retinanet.pb +++ b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/retinanet.pb @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:e8f2d91709980ea5c520116bdc63cab714e288ba0a8f934746094b01652b663d -size 1245581 +oid sha256:032dba3f3af9a3f82bd282b6e38e1d03516bd6388da68927c14afab30b54e792 +size 1248913 diff --git a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/retinanet_quantize_outputs.pb b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/retinanet_quantize_outputs.pb index e5c826605b9..e5845efe197 100644 --- a/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/retinanet_quantize_outputs.pb +++ b/tests/tensorflow/data/reference_graphs/2.4/quantized/w_sym_t_a_sym_t/retinanet_quantize_outputs.pb @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:dfec435b3cd0a152f22e7bb217a9424717cc2b758448b7c07b693bef73b39680 -size 1277200 +oid sha256:0fe813053948828cb561881ba9347077a7b8b68aac4dd50e8852f24b5e62a2eb +size 1280532 diff --git a/tests/tensorflow/data/reference_graphs/2.5/quantized/hw/CPU/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.5/quantized/hw/CPU/inception_v3.dot index 67419b94043..fd10f116ee0 100644 --- a/tests/tensorflow/data/reference_graphs/2.5/quantized/hw/CPU/inception_v3.dot +++ b/tests/tensorflow/data/reference_graphs/2.5/quantized/hw/CPU/inception_v3.dot @@ -2853,18 +2853,6 @@ args_0 [op=Placeholder]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; "inception_v3/conv2d_87/Conv2D" [op=Conv2D]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; -"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; -"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; -"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_93/SymmQuant/Abs" [op=Abs]; @@ -2909,14 +2897,18 @@ args_0 [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "inception_v3/batch_normalization_87/FusedBatchNormV3" [op=FusedBatchNormV3]; -"inception_v3/batch_normalization_85/scale" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; +"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; +"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; +"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/batch_normalization_93/scale" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp" [op=ReadVariableOp]; @@ -2929,62 +2921,80 @@ args_0 [op=Placeholder]; "inception_v3/activation_91/Relu" [op=Relu]; "inception_v3/activation_88/Relu" [op=Relu]; "inception_v3/activation_87/Relu" [op=Relu]; -"inception_v3/activation_85/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/batch_normalization_85/scale" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; "inception_v3/activation_93/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_85/Relu" [op=Relu]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed9_1/concat/axis" [op=Const]; "inception_v3/mixed9_1/concat" [op=ConcatV2]; "inception_v3/concatenate_1/concat/axis" [op=Const]; "inception_v3/concatenate_1/concat" [op=ConcatV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed10/concat/axis" [op=Const]; "inception_v3/mixed10/concat" [op=ConcatV2]; "inception_v3/avg_pool/Mean/reduction_indices" [op=Const]; @@ -6155,19 +6165,6 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_87/Conv2D"; "inception_v3/conv2d_87/Conv2D" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; -"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; -"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_93/SymmQuant/Abs"; "inception_v3/conv2d_93/SymmQuant/Abs" -> "inception_v3/conv2d_93/SymmQuant/add"; @@ -6213,14 +6210,19 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; "inception_v3/batch_normalization_87/FusedBatchNormV3" -> "inception_v3/activation_87/Relu"; -"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; -"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; +"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; +"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/batch_normalization_93/scale" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/ReadVariableOp/resource" -> "inception_v3/batch_normalization_93/ReadVariableOp"; "inception_v3/batch_normalization_93/ReadVariableOp" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; @@ -6229,82 +6231,90 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/FusedBatchNormV3" -> "inception_v3/activation_93/Relu"; -"inception_v3/activation_92/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_91/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_88/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_85/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_93/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_92/Relu" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/Relu" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/Relu" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; +"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/activation_93/Relu" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_85/Relu" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed9_1/concat/axis" -> "inception_v3/mixed9_1/concat"; "inception_v3/mixed9_1/concat" -> "inception_v3/mixed10/concat"; "inception_v3/concatenate_1/concat/axis" -> "inception_v3/concatenate_1/concat"; "inception_v3/concatenate_1/concat" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat/axis" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat" -> "inception_v3/avg_pool/Mean"; "inception_v3/avg_pool/Mean/reduction_indices" -> "inception_v3/avg_pool/Mean"; diff --git a/tests/tensorflow/data/reference_graphs/2.5/quantized/hw/GPU/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.5/quantized/hw/GPU/inception_v3.dot index 67419b94043..fd10f116ee0 100644 --- a/tests/tensorflow/data/reference_graphs/2.5/quantized/hw/GPU/inception_v3.dot +++ b/tests/tensorflow/data/reference_graphs/2.5/quantized/hw/GPU/inception_v3.dot @@ -2853,18 +2853,6 @@ args_0 [op=Placeholder]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; "inception_v3/conv2d_87/Conv2D" [op=Conv2D]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; -"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; -"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; -"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_93/SymmQuant/Abs" [op=Abs]; @@ -2909,14 +2897,18 @@ args_0 [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "inception_v3/batch_normalization_87/FusedBatchNormV3" [op=FusedBatchNormV3]; -"inception_v3/batch_normalization_85/scale" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; +"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; +"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; +"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/batch_normalization_93/scale" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp" [op=ReadVariableOp]; @@ -2929,62 +2921,80 @@ args_0 [op=Placeholder]; "inception_v3/activation_91/Relu" [op=Relu]; "inception_v3/activation_88/Relu" [op=Relu]; "inception_v3/activation_87/Relu" [op=Relu]; -"inception_v3/activation_85/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/batch_normalization_85/scale" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; "inception_v3/activation_93/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_85/Relu" [op=Relu]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed9_1/concat/axis" [op=Const]; "inception_v3/mixed9_1/concat" [op=ConcatV2]; "inception_v3/concatenate_1/concat/axis" [op=Const]; "inception_v3/concatenate_1/concat" [op=ConcatV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed10/concat/axis" [op=Const]; "inception_v3/mixed10/concat" [op=ConcatV2]; "inception_v3/avg_pool/Mean/reduction_indices" [op=Const]; @@ -6155,19 +6165,6 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_87/Conv2D"; "inception_v3/conv2d_87/Conv2D" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; -"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; -"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_93/SymmQuant/Abs"; "inception_v3/conv2d_93/SymmQuant/Abs" -> "inception_v3/conv2d_93/SymmQuant/add"; @@ -6213,14 +6210,19 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; "inception_v3/batch_normalization_87/FusedBatchNormV3" -> "inception_v3/activation_87/Relu"; -"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; -"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; +"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; +"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/batch_normalization_93/scale" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/ReadVariableOp/resource" -> "inception_v3/batch_normalization_93/ReadVariableOp"; "inception_v3/batch_normalization_93/ReadVariableOp" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; @@ -6229,82 +6231,90 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/FusedBatchNormV3" -> "inception_v3/activation_93/Relu"; -"inception_v3/activation_92/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_91/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_88/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_85/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_93/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_92/Relu" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/Relu" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/Relu" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; +"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/activation_93/Relu" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_85/Relu" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed9_1/concat/axis" -> "inception_v3/mixed9_1/concat"; "inception_v3/mixed9_1/concat" -> "inception_v3/mixed10/concat"; "inception_v3/concatenate_1/concat/axis" -> "inception_v3/concatenate_1/concat"; "inception_v3/concatenate_1/concat" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat/axis" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat" -> "inception_v3/avg_pool/Mean"; "inception_v3/avg_pool/Mean/reduction_indices" -> "inception_v3/avg_pool/Mean"; diff --git a/tests/tensorflow/data/reference_graphs/2.5/quantized/hw/VPU/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.5/quantized/hw/VPU/inception_v3.dot index 67419b94043..fd10f116ee0 100644 --- a/tests/tensorflow/data/reference_graphs/2.5/quantized/hw/VPU/inception_v3.dot +++ b/tests/tensorflow/data/reference_graphs/2.5/quantized/hw/VPU/inception_v3.dot @@ -2853,18 +2853,6 @@ args_0 [op=Placeholder]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; "inception_v3/conv2d_87/Conv2D" [op=Conv2D]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; -"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; -"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; -"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_93/SymmQuant/Abs" [op=Abs]; @@ -2909,14 +2897,18 @@ args_0 [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "inception_v3/batch_normalization_87/FusedBatchNormV3" [op=FusedBatchNormV3]; -"inception_v3/batch_normalization_85/scale" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; +"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; +"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; +"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/batch_normalization_93/scale" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp" [op=ReadVariableOp]; @@ -2929,62 +2921,80 @@ args_0 [op=Placeholder]; "inception_v3/activation_91/Relu" [op=Relu]; "inception_v3/activation_88/Relu" [op=Relu]; "inception_v3/activation_87/Relu" [op=Relu]; -"inception_v3/activation_85/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/batch_normalization_85/scale" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; "inception_v3/activation_93/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_85/Relu" [op=Relu]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed9_1/concat/axis" [op=Const]; "inception_v3/mixed9_1/concat" [op=ConcatV2]; "inception_v3/concatenate_1/concat/axis" [op=Const]; "inception_v3/concatenate_1/concat" [op=ConcatV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed10/concat/axis" [op=Const]; "inception_v3/mixed10/concat" [op=ConcatV2]; "inception_v3/avg_pool/Mean/reduction_indices" [op=Const]; @@ -6155,19 +6165,6 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_87/Conv2D"; "inception_v3/conv2d_87/Conv2D" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; -"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; -"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_93/SymmQuant/Abs"; "inception_v3/conv2d_93/SymmQuant/Abs" -> "inception_v3/conv2d_93/SymmQuant/add"; @@ -6213,14 +6210,19 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; "inception_v3/batch_normalization_87/FusedBatchNormV3" -> "inception_v3/activation_87/Relu"; -"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; -"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; +"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; +"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/batch_normalization_93/scale" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/ReadVariableOp/resource" -> "inception_v3/batch_normalization_93/ReadVariableOp"; "inception_v3/batch_normalization_93/ReadVariableOp" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; @@ -6229,82 +6231,90 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/FusedBatchNormV3" -> "inception_v3/activation_93/Relu"; -"inception_v3/activation_92/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_91/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_88/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_85/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_93/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_92/Relu" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/Relu" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/Relu" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; +"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/activation_93/Relu" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_85/Relu" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed9_1/concat/axis" -> "inception_v3/mixed9_1/concat"; "inception_v3/mixed9_1/concat" -> "inception_v3/mixed10/concat"; "inception_v3/concatenate_1/concat/axis" -> "inception_v3/concatenate_1/concat"; "inception_v3/concatenate_1/concat" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat/axis" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat" -> "inception_v3/avg_pool/Mean"; "inception_v3/avg_pool/Mean/reduction_indices" -> "inception_v3/avg_pool/Mean"; diff --git a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/densenet121.pb b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/densenet121.pb index bd8bc4ce15d..9c4998af98d 100644 --- a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/densenet121.pb +++ b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/densenet121.pb @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:dc8924e11836d2a94fdbd9f96a98b2d2aaee491643fd1c849899ea5b4e4f894a -size 1068267 +oid sha256:2537cec2fb310856230977c22e623fb26588e9fe09ca7e6cc748e899b51d9c1d +size 1067568 diff --git a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/inception_v3.dot index 67419b94043..fd10f116ee0 100644 --- a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/inception_v3.dot +++ b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/inception_v3.dot @@ -2853,18 +2853,6 @@ args_0 [op=Placeholder]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; "inception_v3/conv2d_87/Conv2D" [op=Conv2D]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; -"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; -"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; -"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_93/SymmQuant/Abs" [op=Abs]; @@ -2909,14 +2897,18 @@ args_0 [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "inception_v3/batch_normalization_87/FusedBatchNormV3" [op=FusedBatchNormV3]; -"inception_v3/batch_normalization_85/scale" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; +"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; +"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" [op=FakeQuantWithMinMaxVarsPerChannel]; +"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/batch_normalization_93/scale" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp" [op=ReadVariableOp]; @@ -2929,62 +2921,80 @@ args_0 [op=Placeholder]; "inception_v3/activation_91/Relu" [op=Relu]; "inception_v3/activation_88/Relu" [op=Relu]; "inception_v3/activation_87/Relu" [op=Relu]; -"inception_v3/activation_85/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/batch_normalization_85/scale" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; "inception_v3/activation_93/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_85/Relu" [op=Relu]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed9_1/concat/axis" [op=Const]; "inception_v3/mixed9_1/concat" [op=ConcatV2]; "inception_v3/concatenate_1/concat/axis" [op=Const]; "inception_v3/concatenate_1/concat" [op=ConcatV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" [op=Const]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed10/concat/axis" [op=Const]; "inception_v3/mixed10/concat" [op=ConcatV2]; "inception_v3/avg_pool/Mean/reduction_indices" [op=Const]; @@ -6155,19 +6165,6 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_87/Conv2D"; "inception_v3/conv2d_87/Conv2D" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; -"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; -"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_93/SymmQuant/Abs"; "inception_v3/conv2d_93/SymmQuant/Abs" -> "inception_v3/conv2d_93/SymmQuant/add"; @@ -6213,14 +6210,19 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; "inception_v3/batch_normalization_87/FusedBatchNormV3" -> "inception_v3/activation_87/Relu"; -"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; -"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; +"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVarsPerChannel" -> "inception_v3/conv2d_85/Conv2D"; +"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/batch_normalization_93/scale" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/ReadVariableOp/resource" -> "inception_v3/batch_normalization_93/ReadVariableOp"; "inception_v3/batch_normalization_93/ReadVariableOp" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; @@ -6229,82 +6231,90 @@ args_0 -> "inception_v3/input_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/FusedBatchNormV3" -> "inception_v3/activation_93/Relu"; -"inception_v3/activation_92/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_91/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_88/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_85/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_93/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_92/Relu" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/Relu" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/Relu" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; +"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/activation_93/Relu" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_85/Relu" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed9_1/concat/axis" -> "inception_v3/mixed9_1/concat"; "inception_v3/mixed9_1/concat" -> "inception_v3/mixed10/concat"; "inception_v3/concatenate_1/concat/axis" -> "inception_v3/concatenate_1/concat"; "inception_v3/concatenate_1/concat" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/add_1" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/AsymmQuant_5/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/Abs"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/Abs" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add/y" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/add_1"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/add_1" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat/axis" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat" -> "inception_v3/avg_pool/Mean"; "inception_v3/avg_pool/Mean/reduction_indices" -> "inception_v3/avg_pool/Mean"; diff --git a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/mask_rcnn.dot b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/mask_rcnn.dot index aece6a3e028..05beab755bf 100644 --- a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/mask_rcnn.dot +++ b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/mask_rcnn.dot @@ -2205,48 +2205,56 @@ args_0_1 [op=Placeholder]; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "maskrcnn/p2-bn/FusedBatchNormV3" [op=FusedBatchNormV3]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/rpn/SymmQuant/Abs" [op=Abs]; @@ -3186,96 +3194,106 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract/Sub" [op=Sub]; "maskrcnn/tf.expand_dims/ExpandDims/dim" [op=Const]; "maskrcnn/tf.expand_dims/ExpandDims" [op=ExpandDims]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_38/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_37/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_34/AddV2" [op=AddV2]; @@ -3491,22 +3509,26 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs" [op=Abs]; @@ -3547,22 +3569,26 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs" [op=Abs]; @@ -3603,22 +3629,26 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs" [op=Abs]; @@ -3659,22 +3689,26 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs" [op=Abs]; @@ -3715,22 +3749,26 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.concat_4/concat/axis" [op=Const]; "maskrcnn/tf.concat_4/concat" [op=ConcatV2]; "maskrcnn/tf.stack_4/stack" [op=Pack]; @@ -3955,48 +3993,56 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.ones/ones/Const" [op=Const]; "maskrcnn/tf.ones/ones" [op=Fill]; "maskrcnn/tf.math.multiply_30/Mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add_1" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.concat_5/concat/axis" [op=Const]; "maskrcnn/tf.concat_5/concat" [op=ConcatV2]; "maskrcnn/tf.stack_5/stack" [op=Pack]; @@ -4121,32 +4167,36 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_1/truediv" [op=RealDiv]; "maskrcnn/tf.math.subtract_42/Sub/y" [op=Const]; "maskrcnn/tf.math.subtract_42/Sub" [op=Sub]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.cast_23/Cast" [op=Cast]; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack_1" [op=Const]; @@ -4547,24 +4597,26 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_8/truediv" [op=RealDiv]; "maskrcnn/tf.math.truediv_6/truediv/y" [op=Const]; "maskrcnn/tf.math.truediv_6/truediv" [op=RealDiv]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_53/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_51/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_49/AddV2" [op=AddV2]; @@ -5536,24 +5588,26 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_1" [op=Const]; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_2" [op=Const]; "maskrcnn/tf.__operators__.getitem_60/strided_slice" [op=StridedSlice]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_66/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_65/AddV2" [op=AddV2]; "maskrcnn/tf.unstack_5/unstack" [op=Unpack]; @@ -5629,22 +5683,26 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.concat_13/concat/axis" [op=Const]; "maskrcnn/tf.concat_13/concat" [op=ConcatV2]; "maskrcnn/tf.stack_15/stack" [op=Pack]; @@ -5837,32 +5895,36 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_26/truediv" [op=RealDiv]; "maskrcnn/tf.math.subtract_64/Sub/y" [op=Const]; "maskrcnn/tf.math.subtract_64/Sub" [op=Sub]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.cast_37/Cast" [op=Cast]; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack_1" [op=Const]; @@ -6599,24 +6661,26 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_33/truediv" [op=RealDiv]; "maskrcnn/tf.math.truediv_31/truediv/y" [op=Const]; "maskrcnn/tf.math.truediv_31/truediv" [op=RealDiv]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_96/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_94/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_92/AddV2" [op=AddV2]; @@ -10124,7 +10188,7 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p6-bn/FusedBatchNormV3"; "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p6-bn/FusedBatchNormV3"; -"maskrcnn/p6-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/FusedBatchNormV3" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p5-bn/ReadVariableOp/resource" -> "maskrcnn/p5-bn/ReadVariableOp"; "maskrcnn/p5-bn/ReadVariableOp" -> "maskrcnn/p5-bn/FusedBatchNormV3"; "maskrcnn/p5-bn/ReadVariableOp_1/resource" -> "maskrcnn/p5-bn/ReadVariableOp_1"; @@ -10133,7 +10197,7 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p5-bn/FusedBatchNormV3"; "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p5-bn/FusedBatchNormV3"; -"maskrcnn/p5-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/p5-bn/FusedBatchNormV3" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p4-bn/ReadVariableOp/resource" -> "maskrcnn/p4-bn/ReadVariableOp"; "maskrcnn/p4-bn/ReadVariableOp" -> "maskrcnn/p4-bn/FusedBatchNormV3"; "maskrcnn/p4-bn/ReadVariableOp_1/resource" -> "maskrcnn/p4-bn/ReadVariableOp_1"; @@ -10142,7 +10206,7 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p4-bn/FusedBatchNormV3"; "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p4-bn/FusedBatchNormV3"; -"maskrcnn/p4-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/p4-bn/FusedBatchNormV3" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p3-bn/ReadVariableOp/resource" -> "maskrcnn/p3-bn/ReadVariableOp"; "maskrcnn/p3-bn/ReadVariableOp" -> "maskrcnn/p3-bn/FusedBatchNormV3"; "maskrcnn/p3-bn/ReadVariableOp_1/resource" -> "maskrcnn/p3-bn/ReadVariableOp_1"; @@ -10151,7 +10215,7 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p3-bn/FusedBatchNormV3"; "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p3-bn/FusedBatchNormV3"; -"maskrcnn/p3-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/p3-bn/FusedBatchNormV3" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p2-bn/ReadVariableOp/resource" -> "maskrcnn/p2-bn/ReadVariableOp"; "maskrcnn/p2-bn/ReadVariableOp" -> "maskrcnn/p2-bn/FusedBatchNormV3"; "maskrcnn/p2-bn/ReadVariableOp_1/resource" -> "maskrcnn/p2-bn/ReadVariableOp_1"; @@ -10160,72 +10224,72 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p2-bn/FusedBatchNormV3"; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p2-bn/FusedBatchNormV3"; -"maskrcnn/p2-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_18/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_47/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_16/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_45/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_2"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_14/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_43/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_3"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_12/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_41/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_4"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_10/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_39/Reshape"; +"maskrcnn/p2-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/add" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_18/Reshape"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_47/Reshape"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/add" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_1"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_16/Reshape"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_45/Reshape"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/add" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_2"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_14/Reshape"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_43/Reshape"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/add" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_3"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_12/Reshape"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_41/Reshape"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/add" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_4"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_10/Reshape"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_39/Reshape"; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn/SymmQuant_1/Abs/ReadVariableOp"; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn/SymmQuant_2/Abs/ReadVariableOp"; @@ -11288,176 +11352,146 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.multiply_4/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.multiply_4/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_4/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.multiply_4/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_4/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract/Sub"; -"maskrcnn/tf.math.subtract_33/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_25/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_17/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_9/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_33/Sub" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_32/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_25/Sub" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_24/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_17/Sub" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_16/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_9/Sub" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_8/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem/strided_slice"; "maskrcnn/tf.__operators__.getitem/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem/strided_slice"; "maskrcnn/tf.__operators__.getitem/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem/strided_slice"; "maskrcnn/tf.__operators__.getitem/strided_slice" -> "maskrcnn/tf.expand_dims/ExpandDims"; -"maskrcnn/tf.math.subtract_1/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_1/Sub" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.expand_dims/ExpandDims/dim" -> "maskrcnn/tf.expand_dims/ExpandDims"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_4/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_3/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_2/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_1/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack/unstack"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_37/AddV2"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_38/AddV2"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_33/AddV2"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_34/AddV2"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_29/AddV2"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_30/AddV2"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_25/AddV2"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_26/AddV2"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_21/AddV2"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_22/AddV2"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_37/AddV2"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_38/AddV2"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_33/AddV2"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_34/AddV2"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_29/AddV2"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_30/AddV2"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_25/AddV2"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_26/AddV2"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_21/AddV2"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_22/AddV2"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; "maskrcnn/tf.__operators__.add_38/AddV2" -> "maskrcnn/tf.__operators__.add_38/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_37/AddV2" -> "maskrcnn/tf.__operators__.add_37/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_34/AddV2" -> "maskrcnn/tf.__operators__.add_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; @@ -11607,9 +11641,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_36/Sub/y" -> "maskrcnn/tf.math.subtract_36/Sub"; "maskrcnn/tf.math.subtract_36/Sub" -> "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_35/Sub/y" -> "maskrcnn/tf.math.subtract_35/Sub"; -"maskrcnn/tf.math.subtract_35/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_35/Sub" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_34/Sub/y" -> "maskrcnn/tf.math.subtract_34/Sub"; -"maskrcnn/tf.math.subtract_34/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_34/Sub" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_31/Sub/y" -> "maskrcnn/tf.math.subtract_31/Sub"; "maskrcnn/tf.math.subtract_31/Sub" -> "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_30/Sub/y" -> "maskrcnn/tf.math.subtract_30/Sub"; @@ -11619,9 +11653,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_28/Sub/y" -> "maskrcnn/tf.math.subtract_28/Sub"; "maskrcnn/tf.math.subtract_28/Sub" -> "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_27/Sub/y" -> "maskrcnn/tf.math.subtract_27/Sub"; -"maskrcnn/tf.math.subtract_27/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_27/Sub" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_26/Sub/y" -> "maskrcnn/tf.math.subtract_26/Sub"; -"maskrcnn/tf.math.subtract_26/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_26/Sub" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_23/Sub/y" -> "maskrcnn/tf.math.subtract_23/Sub"; "maskrcnn/tf.math.subtract_23/Sub" -> "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_22/Sub/y" -> "maskrcnn/tf.math.subtract_22/Sub"; @@ -11631,9 +11665,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_20/Sub/y" -> "maskrcnn/tf.math.subtract_20/Sub"; "maskrcnn/tf.math.subtract_20/Sub" -> "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_19/Sub/y" -> "maskrcnn/tf.math.subtract_19/Sub"; -"maskrcnn/tf.math.subtract_19/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_19/Sub" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_18/Sub/y" -> "maskrcnn/tf.math.subtract_18/Sub"; -"maskrcnn/tf.math.subtract_18/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_18/Sub" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_15/Sub/y" -> "maskrcnn/tf.math.subtract_15/Sub"; "maskrcnn/tf.math.subtract_15/Sub" -> "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_14/Sub/y" -> "maskrcnn/tf.math.subtract_14/Sub"; @@ -11643,9 +11677,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_12/Sub/y" -> "maskrcnn/tf.math.subtract_12/Sub"; "maskrcnn/tf.math.subtract_12/Sub" -> "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_11/Sub/y" -> "maskrcnn/tf.math.subtract_11/Sub"; -"maskrcnn/tf.math.subtract_11/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_11/Sub" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_10/Sub/y" -> "maskrcnn/tf.math.subtract_10/Sub"; -"maskrcnn/tf.math.subtract_10/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_10/Sub" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_7/Sub/y" -> "maskrcnn/tf.math.subtract_7/Sub"; "maskrcnn/tf.math.subtract_7/Sub" -> "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_6/Sub/y" -> "maskrcnn/tf.math.subtract_6/Sub"; @@ -11655,9 +11689,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_4/Sub/y" -> "maskrcnn/tf.math.subtract_4/Sub"; "maskrcnn/tf.math.subtract_4/Sub" -> "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_3/Sub/y" -> "maskrcnn/tf.math.subtract_3/Sub"; -"maskrcnn/tf.math.subtract_3/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_3/Sub" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_2/Sub/y" -> "maskrcnn/tf.math.subtract_2/Sub"; -"maskrcnn/tf.math.subtract_2/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_2/Sub" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/add"; @@ -11702,22 +11736,28 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_4/stack"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/add"; @@ -11762,22 +11802,28 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_3/stack"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/add"; @@ -11822,22 +11868,28 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_2/stack"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/add"; @@ -11882,22 +11934,28 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_1/stack"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/add"; @@ -11942,22 +12000,28 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack/stack"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; "maskrcnn/tf.concat_4/concat/axis" -> "maskrcnn/tf.concat_4/concat"; "maskrcnn/tf.concat_4/concat" -> "maskrcnn/tf.math.minimum_14/Minimum"; "maskrcnn/tf.stack_4/stack" -> "maskrcnn/tf.math.minimum_14/Minimum"; @@ -12168,31 +12232,31 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.concat_6/concat/axis" -> "maskrcnn/tf.concat_6/concat"; "maskrcnn/tf.concat_6/concat" -> "maskrcnn/tf.math.top_k/TopKV2"; "maskrcnn/tf.math.top_k/TopKV2/k" -> "maskrcnn/tf.math.top_k/TopKV2"; @@ -12219,61 +12283,61 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.ones/ones/Const" -> "maskrcnn/tf.ones/ones"; "maskrcnn/tf.ones/ones" -> "maskrcnn/tf.math.multiply_30/Mul"; "maskrcnn/tf.math.multiply_30/Mul" -> "maskrcnn/tf.stack_5/stack"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; "maskrcnn/tf.concat_5/concat/axis" -> "maskrcnn/tf.concat_5/concat"; "maskrcnn/tf.concat_5/concat" -> "maskrcnn/tf.compat.v1.gather_nd/GatherNd"; "maskrcnn/tf.stack_5/stack" -> "maskrcnn/tf.compat.v1.gather_nd/GatherNd"; @@ -12411,46 +12475,46 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.pow/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_3/truediv"; "maskrcnn/tf.expand_dims_12/ExpandDims/dim" -> "maskrcnn/tf.expand_dims_12/ExpandDims"; "maskrcnn/tf.expand_dims_12/ExpandDims" -> "maskrcnn/tf.math.truediv_1/truediv"; -"maskrcnn/tf.math.truediv_2/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_3/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_2/truediv" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_3/truediv" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_1/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_42/Sub/y" -> "maskrcnn/tf.math.subtract_42/Sub"; "maskrcnn/tf.math.subtract_42/Sub" -> "maskrcnn/tf.cast_23/Cast"; "maskrcnn/tf.math.subtract_42/Sub" -> "maskrcnn/tf.compat.v1.gather_1/GatherV2"; "maskrcnn/tf.math.subtract_42/Sub" -> "maskrcnn/tf.compat.v1.gather/GatherV2"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_14/ExpandDims"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_13/ExpandDims"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_14/ExpandDims"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_13/ExpandDims"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; "maskrcnn/tf.cast_23/Cast" -> "maskrcnn/tf.cast_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; @@ -12782,9 +12846,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.multiply_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.multiply_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_6/truediv"; "maskrcnn/tf.math.subtract_44/Sub/y" -> "maskrcnn/tf.math.subtract_44/Sub"; -"maskrcnn/tf.math.subtract_44/Sub" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/Sub" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_43/Sub/y" -> "maskrcnn/tf.math.subtract_43/Sub"; -"maskrcnn/tf.math.subtract_43/Sub" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_43/Sub" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_54/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_54/strided_slice"; "maskrcnn/tf.__operators__.getitem_54/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_54/strided_slice"; "maskrcnn/tf.__operators__.getitem_54/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_54/strided_slice"; @@ -12904,28 +12968,28 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.truediv_8/truediv" -> "maskrcnn/tf.math.truediv_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.truediv_6/truediv/y" -> "maskrcnn/tf.math.truediv_6/truediv"; "maskrcnn/tf.math.truediv_6/truediv" -> "maskrcnn/tf.math.truediv_6/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_16/ExpandDims"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_15/ExpandDims"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_16/ExpandDims"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_15/ExpandDims"; "maskrcnn/tf.__operators__.add_53/AddV2" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_51/AddV2" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_49/AddV2" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; @@ -13996,42 +14060,36 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.multiply_57/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.multiply_57/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_57/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.multiply_57/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_57/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract_53/Sub"; -"maskrcnn/tf.math.subtract_54/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_54/Sub" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_53/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_60/strided_slice"; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_60/strided_slice"; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_60/strided_slice"; "maskrcnn/tf.__operators__.getitem_60/strided_slice" -> "maskrcnn/tf.unstack_5/unstack"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_65/AddV2"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_66/AddV2"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_65/AddV2"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_66/AddV2"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; "maskrcnn/tf.__operators__.add_66/AddV2" -> "maskrcnn/tf.__operators__.add_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_65/AddV2" -> "maskrcnn/tf.__operators__.add_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.unstack_5/unstack" -> "maskrcnn/tf.math.subtract_60/Sub"; @@ -14069,9 +14127,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_57/Sub/y" -> "maskrcnn/tf.math.subtract_57/Sub"; "maskrcnn/tf.math.subtract_57/Sub" -> "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_56/Sub/y" -> "maskrcnn/tf.math.subtract_56/Sub"; -"maskrcnn/tf.math.subtract_56/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_56/Sub" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_55/Sub/y" -> "maskrcnn/tf.math.subtract_55/Sub"; -"maskrcnn/tf.math.subtract_55/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_55/Sub" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/add"; @@ -14116,22 +14174,28 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_15/stack"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; "maskrcnn/tf.concat_13/concat/axis" -> "maskrcnn/tf.concat_13/concat"; "maskrcnn/tf.concat_13/concat" -> "maskrcnn/tf.math.minimum_22/Minimum"; "maskrcnn/tf.stack_15/stack" -> "maskrcnn/tf.math.minimum_22/Minimum"; @@ -14347,46 +14411,46 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.pow_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_28/truediv"; "maskrcnn/tf.expand_dims_24/ExpandDims/dim" -> "maskrcnn/tf.expand_dims_24/ExpandDims"; "maskrcnn/tf.expand_dims_24/ExpandDims" -> "maskrcnn/tf.math.truediv_26/truediv"; -"maskrcnn/tf.math.truediv_27/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_28/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_27/truediv" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_28/truediv" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_26/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_64/Sub/y" -> "maskrcnn/tf.math.subtract_64/Sub"; "maskrcnn/tf.math.subtract_64/Sub" -> "maskrcnn/tf.cast_37/Cast"; "maskrcnn/tf.math.subtract_64/Sub" -> "maskrcnn/tf.compat.v1.gather_4/GatherV2"; "maskrcnn/tf.math.subtract_64/Sub" -> "maskrcnn/tf.compat.v1.gather_3/GatherV2"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_26/ExpandDims"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_25/ExpandDims"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_26/ExpandDims"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_25/ExpandDims"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; "maskrcnn/tf.cast_37/Cast" -> "maskrcnn/tf.cast_37/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; @@ -14984,9 +15048,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.multiply_61/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.multiply_61/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_61/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_31/truediv"; "maskrcnn/tf.math.subtract_66/Sub/y" -> "maskrcnn/tf.math.subtract_66/Sub"; -"maskrcnn/tf.math.subtract_66/Sub" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/Sub" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_65/Sub/y" -> "maskrcnn/tf.math.subtract_65/Sub"; -"maskrcnn/tf.math.subtract_65/Sub" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_65/Sub" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_131/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_131/strided_slice"; "maskrcnn/tf.__operators__.getitem_131/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_131/strided_slice"; "maskrcnn/tf.__operators__.getitem_131/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_131/strided_slice"; @@ -15225,28 +15289,28 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.truediv_33/truediv" -> "maskrcnn/tf.math.truediv_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.truediv_31/truediv/y" -> "maskrcnn/tf.math.truediv_31/truediv"; "maskrcnn/tf.math.truediv_31/truediv" -> "maskrcnn/tf.math.truediv_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_28/ExpandDims"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_27/ExpandDims"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_28/ExpandDims"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_27/ExpandDims"; "maskrcnn/tf.__operators__.add_96/AddV2" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_94/AddV2" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_92/AddV2" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; diff --git a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/retinanet.pb b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/retinanet.pb index 81aa9856f1b..e5159260643 100644 --- a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/retinanet.pb +++ b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/retinanet.pb @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:0143e323f9b5ae05ff4a20a57731743f34ae5016072983a945104df556a40f61 -size 1407579 +oid sha256:58de305616ea297cd8e1eedbd0a33c19023e529f43e91867dc1cacdfb8e736c2 +size 1412126 diff --git a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/retinanet_quantize_outputs.pb b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/retinanet_quantize_outputs.pb index c7a638af22b..0b003f4594a 100644 --- a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/retinanet_quantize_outputs.pb +++ b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_ch_a_asym_t/retinanet_quantize_outputs.pb @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:ad2926a457d91fa077fef1d9cb80dcbdd8b2817cd691111b14485c32431252d9 -size 1450700 +oid sha256:0467aff14d664581cbdfd79a8ceb311f0d67b950d5872fdff2266719003d5992 +size 1455247 diff --git a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/densenet121.pb b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/densenet121.pb index a7d91095cef..73a0dc04395 100644 --- a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/densenet121.pb +++ b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/densenet121.pb @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:573a0e1bd8822bc7dfa7247d90bd6374105dc3e96dfacb207f4fe23e11126084 -size 983957 +oid sha256:a7f19f2395b8262542522b5bfd204e615827e3449fdfa11c740c3a57c98dfd76 +size 985735 diff --git a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/inception_v3.dot b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/inception_v3.dot index 71d7bc015f9..ce32b42f399 100644 --- a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/inception_v3.dot +++ b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/inception_v3.dot @@ -2755,18 +2755,6 @@ args_0 [op=Placeholder]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/conv2d_87/Conv2D" [op=Conv2D]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; -"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; -"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "inception_v3/conv2d_93/SymmQuant/Abs" [op=Abs]; @@ -2811,14 +2799,18 @@ args_0 [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "inception_v3/batch_normalization_87/FusedBatchNormV3" [op=FusedBatchNormV3]; -"inception_v3/batch_normalization_85/scale" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; -"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/Abs" [op=Abs]; +"inception_v3/conv2d_85/SymmQuant/add/y" [op=Const]; +"inception_v3/conv2d_85/SymmQuant/add" [op=AddV2]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/mul" [op=Mul]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/conv2d_85/Conv2D" [op=Conv2D]; "inception_v3/batch_normalization_93/scale" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp/resource" [op=Placeholder]; "inception_v3/batch_normalization_93/ReadVariableOp" [op=ReadVariableOp]; @@ -2831,56 +2823,74 @@ args_0 [op=Placeholder]; "inception_v3/activation_91/Relu" [op=Relu]; "inception_v3/activation_88/Relu" [op=Relu]; "inception_v3/activation_87/Relu" [op=Relu]; -"inception_v3/activation_85/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/batch_normalization_85/scale" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; +"inception_v3/batch_normalization_85/FusedBatchNormV3" [op=FusedBatchNormV3]; "inception_v3/activation_93/Relu" [op=Relu]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul" [op=Mul]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_91/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/SymmQuant/Abs" [op=Abs]; +"inception_v3/activation_91/fake_quantize/SymmQuant/add/y" [op=Const]; +"inception_v3/activation_91/fake_quantize/SymmQuant/add" [op=AddV2]; +"inception_v3/activation_91/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_91/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_91/fake_quantize/SymmQuant/mul" [op=Mul]; +"inception_v3/activation_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_92/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/SymmQuant/Abs" [op=Abs]; +"inception_v3/activation_92/fake_quantize/SymmQuant/add/y" [op=Const]; +"inception_v3/activation_92/fake_quantize/SymmQuant/add" [op=AddV2]; +"inception_v3/activation_92/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_92/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_92/fake_quantize/SymmQuant/mul" [op=Mul]; +"inception_v3/activation_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_87/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/SymmQuant/Abs" [op=Abs]; +"inception_v3/activation_87/fake_quantize/SymmQuant/add/y" [op=Const]; +"inception_v3/activation_87/fake_quantize/SymmQuant/add" [op=AddV2]; +"inception_v3/activation_87/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_87/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_87/fake_quantize/SymmQuant/mul" [op=Mul]; +"inception_v3/activation_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_88/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/SymmQuant/Abs" [op=Abs]; +"inception_v3/activation_88/fake_quantize/SymmQuant/add/y" [op=Const]; +"inception_v3/activation_88/fake_quantize/SymmQuant/add" [op=AddV2]; +"inception_v3/activation_88/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_88/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_88/fake_quantize/SymmQuant/mul" [op=Mul]; +"inception_v3/activation_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_85/Relu" [op=Relu]; +"inception_v3/activation_85/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/SymmQuant/Abs" [op=Abs]; +"inception_v3/activation_85/fake_quantize/SymmQuant/add/y" [op=Const]; +"inception_v3/activation_85/fake_quantize/SymmQuant/add" [op=AddV2]; +"inception_v3/activation_85/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_85/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_85/fake_quantize/SymmQuant/mul" [op=Mul]; +"inception_v3/activation_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed9_1/concat/axis" [op=Const]; "inception_v3/mixed9_1/concat" [op=ConcatV2]; "inception_v3/concatenate_1/concat/axis" [op=Const]; "inception_v3/concatenate_1/concat" [op=ConcatV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/Abs/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/Abs" [op=Abs]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add/y" [op=Const]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add" [op=AddV2]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul/ReadVariableOp" [op=ReadVariableOp]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul" [op=Mul]; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"inception_v3/activation_93/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/SymmQuant/Abs" [op=Abs]; +"inception_v3/activation_93/fake_quantize/SymmQuant/add/y" [op=Const]; +"inception_v3/activation_93/fake_quantize/SymmQuant/add" [op=AddV2]; +"inception_v3/activation_93/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"inception_v3/activation_93/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"inception_v3/activation_93/fake_quantize/SymmQuant/mul" [op=Mul]; +"inception_v3/activation_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "inception_v3/mixed10/concat/axis" [op=Const]; "inception_v3/mixed10/concat" [op=ConcatV2]; "inception_v3/avg_pool/Mean/reduction_indices" [op=Const]; @@ -5952,19 +5962,6 @@ args_0 -> "inception_v3/input_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVars"; "inception_v3/conv2d_87/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/conv2d_87/Conv2D"; "inception_v3/conv2d_87/Conv2D" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; -"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; -"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/conv2d_85/Conv2D"; -"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp"; "inception_v3/conv2d_93/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_93/SymmQuant/Abs"; "inception_v3/conv2d_93/SymmQuant/Abs" -> "inception_v3/conv2d_93/SymmQuant/add"; @@ -6010,14 +6007,19 @@ args_0 -> "inception_v3/input_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_87/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_87/FusedBatchNormV3"; "inception_v3/batch_normalization_87/FusedBatchNormV3" -> "inception_v3/activation_87/Relu"; -"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; -"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; -"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; -"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/Abs"; +"inception_v3/conv2d_85/SymmQuant/Abs" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add/y" -> "inception_v3/conv2d_85/SymmQuant/add"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/add" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/mul/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/mul"; +"inception_v3/conv2d_85/SymmQuant/mul" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp/resource" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/conv2d_85/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/conv2d_85/Conv2D"; +"inception_v3/conv2d_85/Conv2D" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; "inception_v3/batch_normalization_93/scale" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/ReadVariableOp/resource" -> "inception_v3/batch_normalization_93/ReadVariableOp"; "inception_v3/batch_normalization_93/ReadVariableOp" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; @@ -6026,76 +6028,84 @@ args_0 -> "inception_v3/input_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1"; "inception_v3/batch_normalization_93/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_93/FusedBatchNormV3"; "inception_v3/batch_normalization_93/FusedBatchNormV3" -> "inception_v3/activation_93/Relu"; -"inception_v3/activation_92/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_91/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_88/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_85/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/Abs/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul/ReadVariableOp"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/mul" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_93/Relu" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/mul" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_92/Relu" -> "inception_v3/activation_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/Relu" -> "inception_v3/activation_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/Relu" -> "inception_v3/activation_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/Relu" -> "inception_v3/activation_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/batch_normalization_85/scale" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/ReadVariableOp"; +"inception_v3/batch_normalization_85/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1/resource" -> "inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1"; +"inception_v3/batch_normalization_85/FusedBatchNormV3/ReadVariableOp_1" -> "inception_v3/batch_normalization_85/FusedBatchNormV3"; +"inception_v3/batch_normalization_85/FusedBatchNormV3" -> "inception_v3/activation_85/Relu"; +"inception_v3/activation_93/Relu" -> "inception_v3/activation_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/SymmQuant/Abs"; +"inception_v3/activation_91/fake_quantize/SymmQuant/Abs" -> "inception_v3/activation_91/fake_quantize/SymmQuant/add"; +"inception_v3/activation_91/fake_quantize/SymmQuant/add/y" -> "inception_v3/activation_91/fake_quantize/SymmQuant/add"; +"inception_v3/activation_91/fake_quantize/SymmQuant/add" -> "inception_v3/activation_91/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_91/fake_quantize/SymmQuant/add" -> "inception_v3/activation_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_91/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"inception_v3/activation_91/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_91/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_91/fake_quantize/SymmQuant/mul" -> "inception_v3/activation_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_92/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/SymmQuant/Abs"; +"inception_v3/activation_92/fake_quantize/SymmQuant/Abs" -> "inception_v3/activation_92/fake_quantize/SymmQuant/add"; +"inception_v3/activation_92/fake_quantize/SymmQuant/add/y" -> "inception_v3/activation_92/fake_quantize/SymmQuant/add"; +"inception_v3/activation_92/fake_quantize/SymmQuant/add" -> "inception_v3/activation_92/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_92/fake_quantize/SymmQuant/add" -> "inception_v3/activation_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_92/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"inception_v3/activation_92/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_92/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_92/fake_quantize/SymmQuant/mul" -> "inception_v3/activation_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/concatenate_1/concat"; +"inception_v3/activation_87/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/SymmQuant/Abs"; +"inception_v3/activation_87/fake_quantize/SymmQuant/Abs" -> "inception_v3/activation_87/fake_quantize/SymmQuant/add"; +"inception_v3/activation_87/fake_quantize/SymmQuant/add/y" -> "inception_v3/activation_87/fake_quantize/SymmQuant/add"; +"inception_v3/activation_87/fake_quantize/SymmQuant/add" -> "inception_v3/activation_87/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_87/fake_quantize/SymmQuant/add" -> "inception_v3/activation_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_87/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"inception_v3/activation_87/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_87/fake_quantize/SymmQuant/mul" -> "inception_v3/activation_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_88/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/SymmQuant/Abs"; +"inception_v3/activation_88/fake_quantize/SymmQuant/Abs" -> "inception_v3/activation_88/fake_quantize/SymmQuant/add"; +"inception_v3/activation_88/fake_quantize/SymmQuant/add/y" -> "inception_v3/activation_88/fake_quantize/SymmQuant/add"; +"inception_v3/activation_88/fake_quantize/SymmQuant/add" -> "inception_v3/activation_88/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_88/fake_quantize/SymmQuant/add" -> "inception_v3/activation_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_88/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"inception_v3/activation_88/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_88/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_88/fake_quantize/SymmQuant/mul" -> "inception_v3/activation_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed9_1/concat"; +"inception_v3/activation_85/Relu" -> "inception_v3/activation_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/SymmQuant/Abs"; +"inception_v3/activation_85/fake_quantize/SymmQuant/Abs" -> "inception_v3/activation_85/fake_quantize/SymmQuant/add"; +"inception_v3/activation_85/fake_quantize/SymmQuant/add/y" -> "inception_v3/activation_85/fake_quantize/SymmQuant/add"; +"inception_v3/activation_85/fake_quantize/SymmQuant/add" -> "inception_v3/activation_85/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_85/fake_quantize/SymmQuant/add" -> "inception_v3/activation_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_85/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"inception_v3/activation_85/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_85/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_85/fake_quantize/SymmQuant/mul" -> "inception_v3/activation_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed9_1/concat/axis" -> "inception_v3/mixed9_1/concat"; "inception_v3/mixed9_1/concat" -> "inception_v3/mixed10/concat"; "inception_v3/concatenate_1/concat/axis" -> "inception_v3/concatenate_1/concat"; "inception_v3/concatenate_1/concat" -> "inception_v3/mixed10/concat"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/Abs/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/Abs"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/Abs" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add/y" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/add" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul/ReadVariableOp" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/mul" -> "inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/FakeQuantWithMinMaxVars"; -"inception_v3/activation_87/fake_quantize/unified_scale_group/SymmQuant_5/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; +"inception_v3/activation_93/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/SymmQuant/Abs"; +"inception_v3/activation_93/fake_quantize/SymmQuant/Abs" -> "inception_v3/activation_93/fake_quantize/SymmQuant/add"; +"inception_v3/activation_93/fake_quantize/SymmQuant/add/y" -> "inception_v3/activation_93/fake_quantize/SymmQuant/add"; +"inception_v3/activation_93/fake_quantize/SymmQuant/add" -> "inception_v3/activation_93/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_93/fake_quantize/SymmQuant/add" -> "inception_v3/activation_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "inception_v3/activation_93/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"inception_v3/activation_93/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "inception_v3/activation_93/fake_quantize/SymmQuant/mul"; +"inception_v3/activation_93/fake_quantize/SymmQuant/mul" -> "inception_v3/activation_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"inception_v3/activation_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat/axis" -> "inception_v3/mixed10/concat"; "inception_v3/mixed10/concat" -> "inception_v3/avg_pool/Mean"; "inception_v3/avg_pool/Mean/reduction_indices" -> "inception_v3/avg_pool/Mean"; diff --git a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/mask_rcnn.dot b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/mask_rcnn.dot index 07d3e4ecb2f..3bcb5b40d29 100644 --- a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/mask_rcnn.dot +++ b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/mask_rcnn.dot @@ -2124,43 +2124,51 @@ args_0_1 [op=Placeholder]; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "maskrcnn/p2-bn/FusedBatchNormV3" [op=FusedBatchNormV3]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul" [op=Mul]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/rpn/SymmQuant/Abs" [op=Abs]; @@ -3039,86 +3047,96 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract/Sub" [op=Sub]; "maskrcnn/tf.expand_dims/ExpandDims/dim" [op=Const]; "maskrcnn/tf.expand_dims/ExpandDims" [op=ExpandDims]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_38/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_37/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_34/AddV2" [op=AddV2]; @@ -3320,20 +3338,24 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs" [op=Abs]; @@ -3370,20 +3392,24 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs" [op=Abs]; @@ -3420,20 +3446,24 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs" [op=Abs]; @@ -3470,20 +3500,24 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs" [op=Abs]; @@ -3520,20 +3554,24 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.concat_4/concat/axis" [op=Const]; "maskrcnn/tf.concat_4/concat" [op=ConcatV2]; "maskrcnn/tf.stack_4/stack" [op=Pack]; @@ -3753,43 +3791,51 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.ones/ones/Const" [op=Const]; "maskrcnn/tf.ones/ones" [op=Fill]; "maskrcnn/tf.math.multiply_30/Mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.concat_5/concat/axis" [op=Const]; "maskrcnn/tf.concat_5/concat" [op=ConcatV2]; "maskrcnn/tf.stack_5/stack" [op=Pack]; @@ -3906,29 +3952,33 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_1/truediv" [op=RealDiv]; "maskrcnn/tf.math.subtract_42/Sub/y" [op=Const]; "maskrcnn/tf.math.subtract_42/Sub" [op=Sub]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.cast_23/Cast" [op=Cast]; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack_1" [op=Const]; @@ -4304,22 +4354,24 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_8/truediv" [op=RealDiv]; "maskrcnn/tf.math.truediv_6/truediv/y" [op=Const]; "maskrcnn/tf.math.truediv_6/truediv" [op=RealDiv]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_53/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_51/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_49/AddV2" [op=AddV2]; @@ -5229,22 +5281,24 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_1" [op=Const]; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_2" [op=Const]; "maskrcnn/tf.__operators__.getitem_60/strided_slice" [op=StridedSlice]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_66/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_65/AddV2" [op=AddV2]; "maskrcnn/tf.unstack_5/unstack" [op=Unpack]; @@ -5314,20 +5368,24 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.concat_13/concat/axis" [op=Const]; "maskrcnn/tf.concat_13/concat" [op=ConcatV2]; "maskrcnn/tf.stack_15/stack" [op=Pack]; @@ -5507,29 +5565,33 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_26/truediv" [op=RealDiv]; "maskrcnn/tf.math.subtract_64/Sub/y" [op=Const]; "maskrcnn/tf.math.subtract_64/Sub" [op=Sub]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.cast_37/Cast" [op=Cast]; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack_1" [op=Const]; @@ -6220,22 +6282,24 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_33/truediv" [op=RealDiv]; "maskrcnn/tf.math.truediv_31/truediv/y" [op=Const]; "maskrcnn/tf.math.truediv_31/truediv" [op=RealDiv]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_96/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_94/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_92/AddV2" [op=AddV2]; @@ -9595,7 +9659,7 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p6-bn/FusedBatchNormV3"; "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p6-bn/FusedBatchNormV3"; -"maskrcnn/p6-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/FusedBatchNormV3" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p5-bn/ReadVariableOp/resource" -> "maskrcnn/p5-bn/ReadVariableOp"; "maskrcnn/p5-bn/ReadVariableOp" -> "maskrcnn/p5-bn/FusedBatchNormV3"; "maskrcnn/p5-bn/ReadVariableOp_1/resource" -> "maskrcnn/p5-bn/ReadVariableOp_1"; @@ -9604,7 +9668,7 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p5-bn/FusedBatchNormV3"; "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p5-bn/FusedBatchNormV3"; -"maskrcnn/p5-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/p5-bn/FusedBatchNormV3" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p4-bn/ReadVariableOp/resource" -> "maskrcnn/p4-bn/ReadVariableOp"; "maskrcnn/p4-bn/ReadVariableOp" -> "maskrcnn/p4-bn/FusedBatchNormV3"; "maskrcnn/p4-bn/ReadVariableOp_1/resource" -> "maskrcnn/p4-bn/ReadVariableOp_1"; @@ -9613,7 +9677,7 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p4-bn/FusedBatchNormV3"; "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p4-bn/FusedBatchNormV3"; -"maskrcnn/p4-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/p4-bn/FusedBatchNormV3" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p3-bn/ReadVariableOp/resource" -> "maskrcnn/p3-bn/ReadVariableOp"; "maskrcnn/p3-bn/ReadVariableOp" -> "maskrcnn/p3-bn/FusedBatchNormV3"; "maskrcnn/p3-bn/ReadVariableOp_1/resource" -> "maskrcnn/p3-bn/ReadVariableOp_1"; @@ -9622,7 +9686,7 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p3-bn/FusedBatchNormV3"; "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p3-bn/FusedBatchNormV3"; -"maskrcnn/p3-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/p3-bn/FusedBatchNormV3" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p2-bn/ReadVariableOp/resource" -> "maskrcnn/p2-bn/ReadVariableOp"; "maskrcnn/p2-bn/ReadVariableOp" -> "maskrcnn/p2-bn/FusedBatchNormV3"; "maskrcnn/p2-bn/ReadVariableOp_1/resource" -> "maskrcnn/p2-bn/ReadVariableOp_1"; @@ -9631,67 +9695,67 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p2-bn/FusedBatchNormV3"; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p2-bn/FusedBatchNormV3"; -"maskrcnn/p2-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_18/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_47/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_16/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_45/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_2"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_14/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_43/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_3"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_12/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_41/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_4"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_10/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_39/Reshape"; +"maskrcnn/p2-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/add/y" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/mul" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_18/Reshape"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_47/Reshape"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/add/y" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/mul" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_1"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_16/Reshape"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_45/Reshape"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/add/y" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/mul" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_2"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_14/Reshape"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_43/Reshape"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/add/y" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/mul" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_3"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_12/Reshape"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_41/Reshape"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/add/y" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/mul" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_4"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_10/Reshape"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_39/Reshape"; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn/SymmQuant_1/Abs/ReadVariableOp"; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn/SymmQuant_2/Abs/ReadVariableOp"; @@ -10693,156 +10757,136 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.multiply_4/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.multiply_4/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.multiply_4/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.multiply_4/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_4/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract/Sub"; -"maskrcnn/tf.math.subtract_33/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_25/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_17/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_9/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_33/Sub" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_32/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_25/Sub" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_24/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_17/Sub" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_16/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_9/Sub" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_8/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem/strided_slice"; "maskrcnn/tf.__operators__.getitem/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem/strided_slice"; "maskrcnn/tf.__operators__.getitem/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem/strided_slice"; "maskrcnn/tf.__operators__.getitem/strided_slice" -> "maskrcnn/tf.expand_dims/ExpandDims"; -"maskrcnn/tf.math.subtract_1/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_1/Sub" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.expand_dims/ExpandDims/dim" -> "maskrcnn/tf.expand_dims/ExpandDims"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_4/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_3/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_2/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_1/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack/unstack"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_37/AddV2"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_38/AddV2"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_33/AddV2"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_34/AddV2"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_29/AddV2"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_30/AddV2"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_25/AddV2"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_26/AddV2"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_21/AddV2"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_22/AddV2"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_37/AddV2"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_38/AddV2"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_33/AddV2"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_34/AddV2"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_29/AddV2"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_30/AddV2"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_25/AddV2"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_26/AddV2"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_21/AddV2"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_22/AddV2"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; "maskrcnn/tf.__operators__.add_38/AddV2" -> "maskrcnn/tf.__operators__.add_38/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_37/AddV2" -> "maskrcnn/tf.__operators__.add_37/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_34/AddV2" -> "maskrcnn/tf.__operators__.add_34/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; @@ -10982,9 +11026,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_36/Sub/y" -> "maskrcnn/tf.math.subtract_36/Sub"; "maskrcnn/tf.math.subtract_36/Sub" -> "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_35/Sub/y" -> "maskrcnn/tf.math.subtract_35/Sub"; -"maskrcnn/tf.math.subtract_35/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_35/Sub" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_34/Sub/y" -> "maskrcnn/tf.math.subtract_34/Sub"; -"maskrcnn/tf.math.subtract_34/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_34/Sub" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_31/Sub/y" -> "maskrcnn/tf.math.subtract_31/Sub"; "maskrcnn/tf.math.subtract_31/Sub" -> "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_30/Sub/y" -> "maskrcnn/tf.math.subtract_30/Sub"; @@ -10994,9 +11038,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_28/Sub/y" -> "maskrcnn/tf.math.subtract_28/Sub"; "maskrcnn/tf.math.subtract_28/Sub" -> "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_27/Sub/y" -> "maskrcnn/tf.math.subtract_27/Sub"; -"maskrcnn/tf.math.subtract_27/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_27/Sub" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_26/Sub/y" -> "maskrcnn/tf.math.subtract_26/Sub"; -"maskrcnn/tf.math.subtract_26/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_26/Sub" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_23/Sub/y" -> "maskrcnn/tf.math.subtract_23/Sub"; "maskrcnn/tf.math.subtract_23/Sub" -> "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_22/Sub/y" -> "maskrcnn/tf.math.subtract_22/Sub"; @@ -11006,9 +11050,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_20/Sub/y" -> "maskrcnn/tf.math.subtract_20/Sub"; "maskrcnn/tf.math.subtract_20/Sub" -> "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_19/Sub/y" -> "maskrcnn/tf.math.subtract_19/Sub"; -"maskrcnn/tf.math.subtract_19/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_19/Sub" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_18/Sub/y" -> "maskrcnn/tf.math.subtract_18/Sub"; -"maskrcnn/tf.math.subtract_18/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_18/Sub" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_15/Sub/y" -> "maskrcnn/tf.math.subtract_15/Sub"; "maskrcnn/tf.math.subtract_15/Sub" -> "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_14/Sub/y" -> "maskrcnn/tf.math.subtract_14/Sub"; @@ -11018,9 +11062,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_12/Sub/y" -> "maskrcnn/tf.math.subtract_12/Sub"; "maskrcnn/tf.math.subtract_12/Sub" -> "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_11/Sub/y" -> "maskrcnn/tf.math.subtract_11/Sub"; -"maskrcnn/tf.math.subtract_11/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_11/Sub" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_10/Sub/y" -> "maskrcnn/tf.math.subtract_10/Sub"; -"maskrcnn/tf.math.subtract_10/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_10/Sub" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_7/Sub/y" -> "maskrcnn/tf.math.subtract_7/Sub"; "maskrcnn/tf.math.subtract_7/Sub" -> "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_6/Sub/y" -> "maskrcnn/tf.math.subtract_6/Sub"; @@ -11030,9 +11074,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_4/Sub/y" -> "maskrcnn/tf.math.subtract_4/Sub"; "maskrcnn/tf.math.subtract_4/Sub" -> "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_3/Sub/y" -> "maskrcnn/tf.math.subtract_3/Sub"; -"maskrcnn/tf.math.subtract_3/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_3/Sub" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_2/Sub/y" -> "maskrcnn/tf.math.subtract_2/Sub"; -"maskrcnn/tf.math.subtract_2/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_2/Sub" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/add"; @@ -11073,22 +11117,26 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_4/stack"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/add"; @@ -11129,22 +11177,26 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_3/stack"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/add"; @@ -11185,22 +11237,26 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_2/stack"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/add"; @@ -11241,22 +11297,26 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_1/stack"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/add"; @@ -11297,22 +11357,26 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack/stack"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; "maskrcnn/tf.concat_4/concat/axis" -> "maskrcnn/tf.concat_4/concat"; "maskrcnn/tf.concat_4/concat" -> "maskrcnn/tf.math.minimum_14/Minimum"; "maskrcnn/tf.stack_4/stack" -> "maskrcnn/tf.math.minimum_14/Minimum"; @@ -11518,31 +11582,31 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.concat_6/concat/axis" -> "maskrcnn/tf.concat_6/concat"; "maskrcnn/tf.concat_6/concat" -> "maskrcnn/tf.math.top_k/TopKV2"; "maskrcnn/tf.math.top_k/TopKV2/k" -> "maskrcnn/tf.math.top_k/TopKV2"; @@ -11569,56 +11633,56 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.ones/ones/Const" -> "maskrcnn/tf.ones/ones"; "maskrcnn/tf.ones/ones" -> "maskrcnn/tf.math.multiply_30/Mul"; "maskrcnn/tf.math.multiply_30/Mul" -> "maskrcnn/tf.stack_5/stack"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; "maskrcnn/tf.concat_5/concat/axis" -> "maskrcnn/tf.concat_5/concat"; "maskrcnn/tf.concat_5/concat" -> "maskrcnn/tf.compat.v1.gather_nd/GatherNd"; "maskrcnn/tf.stack_5/stack" -> "maskrcnn/tf.compat.v1.gather_nd/GatherNd"; @@ -11748,43 +11812,43 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.pow/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_3/truediv"; "maskrcnn/tf.expand_dims_12/ExpandDims/dim" -> "maskrcnn/tf.expand_dims_12/ExpandDims"; "maskrcnn/tf.expand_dims_12/ExpandDims" -> "maskrcnn/tf.math.truediv_1/truediv"; -"maskrcnn/tf.math.truediv_2/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_3/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_2/truediv" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_3/truediv" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_1/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_42/Sub/y" -> "maskrcnn/tf.math.subtract_42/Sub"; "maskrcnn/tf.math.subtract_42/Sub" -> "maskrcnn/tf.cast_23/Cast"; "maskrcnn/tf.math.subtract_42/Sub" -> "maskrcnn/tf.compat.v1.gather_1/GatherV2"; "maskrcnn/tf.math.subtract_42/Sub" -> "maskrcnn/tf.compat.v1.gather/GatherV2"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_14/ExpandDims"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_13/ExpandDims"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_14/ExpandDims"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_13/ExpandDims"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; "maskrcnn/tf.cast_23/Cast" -> "maskrcnn/tf.cast_23/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; @@ -12098,9 +12162,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.multiply_32/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.multiply_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_6/truediv"; "maskrcnn/tf.math.subtract_44/Sub/y" -> "maskrcnn/tf.math.subtract_44/Sub"; -"maskrcnn/tf.math.subtract_44/Sub" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/Sub" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_43/Sub/y" -> "maskrcnn/tf.math.subtract_43/Sub"; -"maskrcnn/tf.math.subtract_43/Sub" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_43/Sub" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_54/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_54/strided_slice"; "maskrcnn/tf.__operators__.getitem_54/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_54/strided_slice"; "maskrcnn/tf.__operators__.getitem_54/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_54/strided_slice"; @@ -12213,26 +12277,26 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.truediv_8/truediv" -> "maskrcnn/tf.math.truediv_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.truediv_6/truediv/y" -> "maskrcnn/tf.math.truediv_6/truediv"; "maskrcnn/tf.math.truediv_6/truediv" -> "maskrcnn/tf.math.truediv_6/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_16/ExpandDims"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_15/ExpandDims"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_16/ExpandDims"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_15/ExpandDims"; "maskrcnn/tf.__operators__.add_53/AddV2" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_51/AddV2" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_49/AddV2" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; @@ -13241,38 +13305,34 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.multiply_57/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.multiply_57/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.multiply_57/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.multiply_57/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_57/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract_53/Sub"; -"maskrcnn/tf.math.subtract_54/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_54/Sub" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_53/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_60/strided_slice"; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_60/strided_slice"; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_60/strided_slice"; "maskrcnn/tf.__operators__.getitem_60/strided_slice" -> "maskrcnn/tf.unstack_5/unstack"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_65/AddV2"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_66/AddV2"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_65/AddV2"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_66/AddV2"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; "maskrcnn/tf.__operators__.add_66/AddV2" -> "maskrcnn/tf.__operators__.add_66/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_65/AddV2" -> "maskrcnn/tf.__operators__.add_65/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.unstack_5/unstack" -> "maskrcnn/tf.math.subtract_60/Sub"; @@ -13308,9 +13368,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_57/Sub/y" -> "maskrcnn/tf.math.subtract_57/Sub"; "maskrcnn/tf.math.subtract_57/Sub" -> "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_56/Sub/y" -> "maskrcnn/tf.math.subtract_56/Sub"; -"maskrcnn/tf.math.subtract_56/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_56/Sub" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_55/Sub/y" -> "maskrcnn/tf.math.subtract_55/Sub"; -"maskrcnn/tf.math.subtract_55/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_55/Sub" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/add"; @@ -13351,22 +13411,26 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_15/stack"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; "maskrcnn/tf.concat_13/concat/axis" -> "maskrcnn/tf.concat_13/concat"; "maskrcnn/tf.concat_13/concat" -> "maskrcnn/tf.math.minimum_22/Minimum"; "maskrcnn/tf.stack_15/stack" -> "maskrcnn/tf.math.minimum_22/Minimum"; @@ -13569,43 +13633,43 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.pow_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_28/truediv"; "maskrcnn/tf.expand_dims_24/ExpandDims/dim" -> "maskrcnn/tf.expand_dims_24/ExpandDims"; "maskrcnn/tf.expand_dims_24/ExpandDims" -> "maskrcnn/tf.math.truediv_26/truediv"; -"maskrcnn/tf.math.truediv_27/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_28/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_27/truediv" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_28/truediv" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_26/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_64/Sub/y" -> "maskrcnn/tf.math.subtract_64/Sub"; "maskrcnn/tf.math.subtract_64/Sub" -> "maskrcnn/tf.cast_37/Cast"; "maskrcnn/tf.math.subtract_64/Sub" -> "maskrcnn/tf.compat.v1.gather_4/GatherV2"; "maskrcnn/tf.math.subtract_64/Sub" -> "maskrcnn/tf.compat.v1.gather_3/GatherV2"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_26/ExpandDims"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_25/ExpandDims"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_26/ExpandDims"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_25/ExpandDims"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; "maskrcnn/tf.cast_37/Cast" -> "maskrcnn/tf.cast_37/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; @@ -14171,9 +14235,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.multiply_61/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.multiply_61/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_61/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_31/truediv"; "maskrcnn/tf.math.subtract_66/Sub/y" -> "maskrcnn/tf.math.subtract_66/Sub"; -"maskrcnn/tf.math.subtract_66/Sub" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/Sub" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_65/Sub/y" -> "maskrcnn/tf.math.subtract_65/Sub"; -"maskrcnn/tf.math.subtract_65/Sub" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_65/Sub" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_131/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_131/strided_slice"; "maskrcnn/tf.__operators__.getitem_131/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_131/strided_slice"; "maskrcnn/tf.__operators__.getitem_131/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_131/strided_slice"; @@ -14398,26 +14462,26 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.truediv_33/truediv" -> "maskrcnn/tf.math.truediv_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.truediv_31/truediv/y" -> "maskrcnn/tf.math.truediv_31/truediv"; "maskrcnn/tf.math.truediv_31/truediv" -> "maskrcnn/tf.math.truediv_31/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_28/ExpandDims"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_27/ExpandDims"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_28/ExpandDims"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_27/ExpandDims"; "maskrcnn/tf.__operators__.add_96/AddV2" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_94/AddV2" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_92/AddV2" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; diff --git a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/retinanet.pb b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/retinanet.pb index 377201c73eb..66376909675 100644 --- a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/retinanet.pb +++ b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/retinanet.pb @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:862c5bdf3b4acbdca4ee40ea160ea47410416979d10bfdfb676c4cbeeb977c7a -size 1243980 +oid sha256:4ee6a8475d7e0de82c8811d0fc2d9050353e072b1d100bb389f79e855aae9f63 +size 1247312 diff --git a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/retinanet_quantize_outputs.pb b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/retinanet_quantize_outputs.pb index cfc22c2c6b9..ddf39474249 100644 --- a/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/retinanet_quantize_outputs.pb +++ b/tests/tensorflow/data/reference_graphs/2.5/quantized/w_sym_t_a_sym_t/retinanet_quantize_outputs.pb @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:0aad4bbbcfde49ca6ceab1f01555d2e2cb2c184d9d9db0b7ccb7c51ece0c2734 -size 1275599 +oid sha256:0b56d155ebcbb37fa214d655eeffccbe313c6b878850a7eb490077ca82cf2984 +size 1278931 diff --git a/tests/tensorflow/data/reference_graphs/2.8/quantized/w_sym_ch_a_asym_t/mask_rcnn.dot b/tests/tensorflow/data/reference_graphs/2.8/quantized/w_sym_ch_a_asym_t/mask_rcnn.dot index 216b752f7f1..ae44e57b36f 100644 --- a/tests/tensorflow/data/reference_graphs/2.8/quantized/w_sym_ch_a_asym_t/mask_rcnn.dot +++ b/tests/tensorflow/data/reference_graphs/2.8/quantized/w_sym_ch_a_asym_t/mask_rcnn.dot @@ -2205,48 +2205,56 @@ args_0_1 [op=Placeholder]; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "maskrcnn/p2-bn/FusedBatchNormV3" [op=FusedBatchNormV3]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/rpn/SymmQuant/Abs" [op=Abs]; @@ -2420,56 +2428,56 @@ args_0_1 [op=Placeholder]; "maskrcnn/rpn-box/Conv2D_4" [op=Conv2D]; "maskrcnn/rpn-box/BiasAdd_4/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/rpn-box/BiasAdd_4" [op=BiasAdd]; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/add/y" [op=Const]; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/add" [op=AddV2]; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/add/y" [op=Const]; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/add" [op=AddV2]; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/add/y" [op=Const]; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/add" [op=AddV2]; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/add/y" [op=Const]; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/add" [op=AddV2]; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.reshape_9/Reshape/shape" [op=Const]; "maskrcnn/tf.reshape_9/Reshape" [op=Reshape]; "maskrcnn/tf.reshape_7/Reshape/shape" [op=Const]; @@ -3186,96 +3194,106 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract/Sub" [op=Sub]; "maskrcnn/tf.expand_dims/ExpandDims/dim" [op=Const]; "maskrcnn/tf.expand_dims/ExpandDims" [op=ExpandDims]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_38/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_37/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_34/AddV2" [op=AddV2]; @@ -3491,22 +3509,26 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs" [op=Abs]; @@ -3547,22 +3569,26 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs" [op=Abs]; @@ -3603,22 +3629,26 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs" [op=Abs]; @@ -3659,22 +3689,26 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs" [op=Abs]; @@ -3715,22 +3749,26 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.concat_4/concat/axis" [op=Const]; "maskrcnn/tf.concat_4/concat" [op=ConcatV2]; "maskrcnn/tf.stack_4/stack" [op=Pack]; @@ -3952,48 +3990,56 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.ones/ones/Const" [op=Const]; "maskrcnn/tf.ones/ones" [op=Fill]; "maskrcnn/tf.math.multiply_30/Mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add_1" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.concat_5/concat/axis" [op=Const]; "maskrcnn/tf.concat_5/concat" [op=ConcatV2]; "maskrcnn/tf.stack_5/stack" [op=Pack]; @@ -4118,32 +4164,36 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_1/truediv" [op=RealDiv]; "maskrcnn/tf.math.subtract_42/Sub/y" [op=Const]; "maskrcnn/tf.math.subtract_42/Sub" [op=Sub]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.cast_23/Cast" [op=Cast]; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack_1" [op=Const]; @@ -4544,31 +4594,34 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_8/truediv" [op=RealDiv]; "maskrcnn/tf.math.truediv_6/truediv/y" [op=Const]; "maskrcnn/tf.math.truediv_6/truediv" [op=RealDiv]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_53/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_51/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_49/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_47/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_45/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_43/AddV2" [op=AddV2]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_41/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_43/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_45/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_47/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_49/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_51/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_53/AddV2" [op=AddV2]; +"maskrcnn/tf.stack_7/stack" [op=Pack]; "maskrcnn/tf.__operators__.getitem_52/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_52/strided_slice/stack_1" [op=Const]; "maskrcnn/tf.__operators__.getitem_52/strided_slice/stack_2" [op=Const]; @@ -4671,164 +4724,43 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.expand_dims_15/ExpandDims" [op=ExpandDims]; "maskrcnn/tf.expand_dims_16/ExpandDims/dim" [op=Const]; "maskrcnn/tf.expand_dims_16/ExpandDims" [op=ExpandDims]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.stack_7/stack" [op=Pack]; -"maskrcnn/tf.__operators__.add_52/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_50/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_48/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_46/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_44/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_42/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_40/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_42/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_44/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_46/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_48/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_50/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_52/AddV2" [op=AddV2]; "maskrcnn/tf.concat_12/concat/axis" [op=Const]; "maskrcnn/tf.concat_12/concat" [op=ConcatV2]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.floor/Floor" [op=Floor]; "maskrcnn/tf.stack_6/stack" [op=Pack]; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.floor_1/Floor" [op=Floor]; "maskrcnn/tf.__operators__.getitem_58/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_58/strided_slice/stack_1" [op=Const]; "maskrcnn/tf.__operators__.getitem_58/strided_slice/stack_2" [op=Const]; "maskrcnn/tf.__operators__.getitem_58/strided_slice" [op=StridedSlice]; "maskrcnn/tf.math.maximum_7/Maximum" [op=Maximum]; -"maskrcnn/tf.math.floor_1/Floor" [op=Floor]; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.maximum_7/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.maximum_7/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.maximum_7/fake_quantize/AsymmQuant/Abs" [op=Abs]; @@ -4876,6 +4808,16 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.__operators__.getitem_59/strided_slice" [op=StridedSlice]; "maskrcnn/tf.__operators__.add_55/y" [op=Const]; "maskrcnn/tf.__operators__.add_55/AddV2" [op=AddV2]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/AsymmQuant/Abs" [op=Abs]; @@ -4886,6 +4828,16 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_45/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_45/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_45/fake_quantize_I1/AsymmQuant/Abs" [op=Abs]; @@ -5214,16 +5166,16 @@ args_0_1 [op=Placeholder]; "maskrcnn/box-predict/BiasAdd" [op=BiasAdd]; "maskrcnn/tf.math.subtract_49/Sub/y" [op=Const]; "maskrcnn/tf.math.subtract_49/Sub" [op=Sub]; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.stack_12/stack" [op=Pack]; "maskrcnn/tf.math.multiply_50/Mul" [op=Mul]; "maskrcnn/tf.expand_dims_23/ExpandDims/dim" [op=Const]; @@ -5533,24 +5485,26 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_1" [op=Const]; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_2" [op=Const]; "maskrcnn/tf.__operators__.getitem_60/strided_slice" [op=StridedSlice]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_66/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_65/AddV2" [op=AddV2]; "maskrcnn/tf.unstack_5/unstack" [op=Unpack]; @@ -5626,22 +5580,26 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.concat_13/concat/axis" [op=Const]; "maskrcnn/tf.concat_13/concat" [op=ConcatV2]; "maskrcnn/tf.stack_15/stack" [op=Pack]; @@ -5834,32 +5792,36 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_26/truediv" [op=RealDiv]; "maskrcnn/tf.math.subtract_64/Sub/y" [op=Const]; "maskrcnn/tf.math.subtract_64/Sub" [op=Sub]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.cast_37/Cast" [op=Cast]; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack_1" [op=Const]; @@ -6596,38 +6558,41 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_33/truediv" [op=RealDiv]; "maskrcnn/tf.math.truediv_31/truediv/y" [op=Const]; "maskrcnn/tf.math.truediv_31/truediv" [op=RealDiv]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" [op=AddV2]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_96/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_94/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_92/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_90/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_88/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_86/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_84/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_82/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_80/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_78/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_76/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_74/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_72/AddV2" [op=AddV2]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_70/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_72/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_74/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_76/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_78/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_80/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_82/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_84/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_86/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_88/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_90/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_92/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_94/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_96/AddV2" [op=AddV2]; +"maskrcnn/tf.stack_18/stack" [op=Pack]; "maskrcnn/tf.__operators__.getitem_129/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_129/strided_slice/stack_1" [op=Const]; "maskrcnn/tf.__operators__.getitem_129/strided_slice/stack_2" [op=Const]; @@ -6828,311 +6793,50 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.expand_dims_27/ExpandDims" [op=ExpandDims]; "maskrcnn/tf.expand_dims_28/ExpandDims/dim" [op=Const]; "maskrcnn/tf.expand_dims_28/ExpandDims" [op=ExpandDims]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.stack_18/stack" [op=Pack]; -"maskrcnn/tf.__operators__.add_95/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_93/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_91/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_89/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_87/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_85/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_83/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_81/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_79/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_77/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_75/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_73/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_71/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_69/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_71/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_73/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_75/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_77/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_79/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_81/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_83/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_85/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_87/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_89/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_91/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_93/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_95/AddV2" [op=AddV2]; "maskrcnn/tf.concat_19/concat/axis" [op=Const]; "maskrcnn/tf.concat_19/concat" [op=ConcatV2]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/add_1" [op=AddV2]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.floor_2/Floor" [op=Floor]; "maskrcnn/tf.stack_17/stack" [op=Pack]; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.floor_3/Floor" [op=Floor]; "maskrcnn/tf.__operators__.getitem_135/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_135/strided_slice/stack_1" [op=Const]; "maskrcnn/tf.__operators__.getitem_135/strided_slice/stack_2" [op=Const]; "maskrcnn/tf.__operators__.getitem_135/strided_slice" [op=StridedSlice]; "maskrcnn/tf.math.maximum_11/Maximum" [op=Maximum]; -"maskrcnn/tf.math.floor_3/Floor" [op=Floor]; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.maximum_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.maximum_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.maximum_11/fake_quantize/AsymmQuant/Abs" [op=Abs]; @@ -7180,6 +6884,16 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.__operators__.getitem_136/strided_slice" [op=StridedSlice]; "maskrcnn/tf.__operators__.add_98/y" [op=Const]; "maskrcnn/tf.__operators__.add_98/AddV2" [op=AddV2]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/AsymmQuant/Abs" [op=Abs]; @@ -7190,6 +6904,16 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/AsymmQuant/add_1" [op=AddV2]; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/add_1" [op=AddV2]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_67/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_67/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_67/fake_quantize_I1/AsymmQuant/Abs" [op=Abs]; @@ -10121,7 +9845,7 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p6-bn/FusedBatchNormV3"; "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p6-bn/FusedBatchNormV3"; -"maskrcnn/p6-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/FusedBatchNormV3" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p5-bn/ReadVariableOp/resource" -> "maskrcnn/p5-bn/ReadVariableOp"; "maskrcnn/p5-bn/ReadVariableOp" -> "maskrcnn/p5-bn/FusedBatchNormV3"; "maskrcnn/p5-bn/ReadVariableOp_1/resource" -> "maskrcnn/p5-bn/ReadVariableOp_1"; @@ -10130,7 +9854,7 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p5-bn/FusedBatchNormV3"; "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p5-bn/FusedBatchNormV3"; -"maskrcnn/p5-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/p5-bn/FusedBatchNormV3" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p4-bn/ReadVariableOp/resource" -> "maskrcnn/p4-bn/ReadVariableOp"; "maskrcnn/p4-bn/ReadVariableOp" -> "maskrcnn/p4-bn/FusedBatchNormV3"; "maskrcnn/p4-bn/ReadVariableOp_1/resource" -> "maskrcnn/p4-bn/ReadVariableOp_1"; @@ -10139,7 +9863,7 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p4-bn/FusedBatchNormV3"; "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p4-bn/FusedBatchNormV3"; -"maskrcnn/p4-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/p4-bn/FusedBatchNormV3" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p3-bn/ReadVariableOp/resource" -> "maskrcnn/p3-bn/ReadVariableOp"; "maskrcnn/p3-bn/ReadVariableOp" -> "maskrcnn/p3-bn/FusedBatchNormV3"; "maskrcnn/p3-bn/ReadVariableOp_1/resource" -> "maskrcnn/p3-bn/ReadVariableOp_1"; @@ -10148,7 +9872,7 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p3-bn/FusedBatchNormV3"; "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p3-bn/FusedBatchNormV3"; -"maskrcnn/p3-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/p3-bn/FusedBatchNormV3" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p2-bn/ReadVariableOp/resource" -> "maskrcnn/p2-bn/ReadVariableOp"; "maskrcnn/p2-bn/ReadVariableOp" -> "maskrcnn/p2-bn/FusedBatchNormV3"; "maskrcnn/p2-bn/ReadVariableOp_1/resource" -> "maskrcnn/p2-bn/ReadVariableOp_1"; @@ -10157,72 +9881,72 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p2-bn/FusedBatchNormV3"; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p2-bn/FusedBatchNormV3"; -"maskrcnn/p2-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_18/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_47/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_16/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_45/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_2"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_14/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_43/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_3"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_12/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_41/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/add_1" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_4"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_10/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_39/Reshape"; +"maskrcnn/p2-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/add" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_18/Reshape"; +"maskrcnn/p6-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_47/Reshape"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/add" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_1"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_16/Reshape"; +"maskrcnn/p5-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_45/Reshape"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/add" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_2"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_14/Reshape"; +"maskrcnn/p4-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_43/Reshape"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/add" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_3"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_12/Reshape"; +"maskrcnn/p3-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_41/Reshape"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/add"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/add" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_4"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_10/Reshape"; +"maskrcnn/p2-bn/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_39/Reshape"; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn/SymmQuant_1/Abs/ReadVariableOp"; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn/SymmQuant_2/Abs/ReadVariableOp"; @@ -10399,7 +10123,8 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/rpn-box/BiasAdd/ReadVariableOp/resource" -> "maskrcnn/rpn-box/BiasAdd_3/ReadVariableOp"; "maskrcnn/rpn-box/BiasAdd/ReadVariableOp/resource" -> "maskrcnn/rpn-box/BiasAdd_4/ReadVariableOp"; "maskrcnn/rpn-box/BiasAdd/ReadVariableOp" -> "maskrcnn/rpn-box/BiasAdd"; -"maskrcnn/rpn-box/BiasAdd" -> "maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/rpn-box/BiasAdd" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/rpn-box/BiasAdd" -> Identity_11; "maskrcnn/rpn-box/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/rpn-box/SymmQuant_1/Abs"; "maskrcnn/rpn-box/SymmQuant_1/Abs" -> "maskrcnn/rpn-box/SymmQuant_1/add"; "maskrcnn/rpn-box/SymmQuant_1/add/y" -> "maskrcnn/rpn-box/SymmQuant_1/add"; @@ -10411,7 +10136,8 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/rpn-box/SymmQuant_1/FakeQuantWithMinMaxVarsPerChannel" -> "maskrcnn/rpn-box/Conv2D_1"; "maskrcnn/rpn-box/Conv2D_1" -> "maskrcnn/rpn-box/BiasAdd_1"; "maskrcnn/rpn-box/BiasAdd_1/ReadVariableOp" -> "maskrcnn/rpn-box/BiasAdd_1"; -"maskrcnn/rpn-box/BiasAdd_1" -> "maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/rpn-box/BiasAdd_1" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/rpn-box/BiasAdd_1" -> Identity_10; "maskrcnn/rpn-box/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/rpn-box/SymmQuant_2/Abs"; "maskrcnn/rpn-box/SymmQuant_2/Abs" -> "maskrcnn/rpn-box/SymmQuant_2/add"; "maskrcnn/rpn-box/SymmQuant_2/add/y" -> "maskrcnn/rpn-box/SymmQuant_2/add"; @@ -10423,7 +10149,8 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/rpn-box/SymmQuant_2/FakeQuantWithMinMaxVarsPerChannel" -> "maskrcnn/rpn-box/Conv2D_2"; "maskrcnn/rpn-box/Conv2D_2" -> "maskrcnn/rpn-box/BiasAdd_2"; "maskrcnn/rpn-box/BiasAdd_2/ReadVariableOp" -> "maskrcnn/rpn-box/BiasAdd_2"; -"maskrcnn/rpn-box/BiasAdd_2" -> "maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/rpn-box/BiasAdd_2" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/rpn-box/BiasAdd_2" -> Identity_9; "maskrcnn/rpn-box/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/rpn-box/SymmQuant_3/Abs"; "maskrcnn/rpn-box/SymmQuant_3/Abs" -> "maskrcnn/rpn-box/SymmQuant_3/add"; "maskrcnn/rpn-box/SymmQuant_3/add/y" -> "maskrcnn/rpn-box/SymmQuant_3/add"; @@ -10435,7 +10162,8 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/rpn-box/SymmQuant_3/FakeQuantWithMinMaxVarsPerChannel" -> "maskrcnn/rpn-box/Conv2D_3"; "maskrcnn/rpn-box/Conv2D_3" -> "maskrcnn/rpn-box/BiasAdd_3"; "maskrcnn/rpn-box/BiasAdd_3/ReadVariableOp" -> "maskrcnn/rpn-box/BiasAdd_3"; -"maskrcnn/rpn-box/BiasAdd_3" -> "maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/rpn-box/BiasAdd_3" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/rpn-box/BiasAdd_3" -> Identity_8; "maskrcnn/rpn-box/SymmQuant_4/Abs/ReadVariableOp" -> "maskrcnn/rpn-box/SymmQuant_4/Abs"; "maskrcnn/rpn-box/SymmQuant_4/Abs" -> "maskrcnn/rpn-box/SymmQuant_4/add"; "maskrcnn/rpn-box/SymmQuant_4/add/y" -> "maskrcnn/rpn-box/SymmQuant_4/add"; @@ -10447,67 +10175,63 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/rpn-box/SymmQuant_4/FakeQuantWithMinMaxVarsPerChannel" -> "maskrcnn/rpn-box/Conv2D_4"; "maskrcnn/rpn-box/Conv2D_4" -> "maskrcnn/rpn-box/BiasAdd_4"; "maskrcnn/rpn-box/BiasAdd_4/ReadVariableOp" -> "maskrcnn/rpn-box/BiasAdd_4"; -"maskrcnn/rpn-box/BiasAdd_4" -> "maskrcnn/rpn-box/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/Abs"; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/Abs" -> "maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/add/y" -> "maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/add" -> "maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/add_1"; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/add_1"; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/add_1" -> "maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_9/Reshape"; -"maskrcnn/rpn-box/fake_quantize_4/AsymmQuant/FakeQuantWithMinMaxVars" -> Identity_11; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/Abs"; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/Abs" -> "maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/add/y" -> "maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/add" -> "maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/add_1"; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/add_1"; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/add_1" -> "maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_7/Reshape"; -"maskrcnn/rpn-box/fake_quantize_3/AsymmQuant/FakeQuantWithMinMaxVars" -> Identity_10; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/Abs"; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/Abs" -> "maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/add/y" -> "maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/add" -> "maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/add_1"; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/add_1"; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/add_1" -> "maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_5/Reshape"; -"maskrcnn/rpn-box/fake_quantize_2/AsymmQuant/FakeQuantWithMinMaxVars" -> Identity_9; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/Abs"; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/Abs" -> "maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/add/y" -> "maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/add" -> "maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/add_1"; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/add_1"; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/add_1" -> "maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_3/Reshape"; -"maskrcnn/rpn-box/fake_quantize_1/AsymmQuant/FakeQuantWithMinMaxVars" -> Identity_8; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/rpn-box/fake_quantize/AsymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/rpn-box/fake_quantize/AsymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/add" -> "maskrcnn/rpn-box/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/rpn-box/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_1/Reshape"; -"maskrcnn/rpn-box/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> Identity_7; +"maskrcnn/rpn-box/BiasAdd_4" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/rpn-box/BiasAdd_4" -> Identity_7; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/Abs"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/Abs" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/add/y" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/add" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/add_1" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_9/Reshape"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/Abs"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/Abs" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/add/y" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/add" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/add_1" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_7/Reshape"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/Abs"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/Abs" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/add/y" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/add" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/add_1" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_5/Reshape"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/Abs"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/Abs" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/add/y" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/add" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/add_1" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_3/Reshape"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/Abs"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/Abs" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/add/y" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/add" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/add_1" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_1/Reshape"; "maskrcnn/tf.reshape_9/Reshape/shape" -> "maskrcnn/tf.reshape_9/Reshape"; "maskrcnn/tf.reshape_9/Reshape" -> "maskrcnn/tf.__operators__.getitem_20/strided_slice"; "maskrcnn/tf.reshape_9/Reshape" -> "maskrcnn/tf.__operators__.getitem_19/strided_slice"; @@ -11285,176 +11009,146 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.multiply_4/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.multiply_4/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_4/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.multiply_4/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_4/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract/Sub"; -"maskrcnn/tf.math.subtract_33/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_25/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_17/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_9/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_33/Sub" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_32/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_25/Sub" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_24/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_17/Sub" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_16/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_9/Sub" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_8/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem/strided_slice"; "maskrcnn/tf.__operators__.getitem/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem/strided_slice"; "maskrcnn/tf.__operators__.getitem/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem/strided_slice"; "maskrcnn/tf.__operators__.getitem/strided_slice" -> "maskrcnn/tf.expand_dims/ExpandDims"; -"maskrcnn/tf.math.subtract_1/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_1/Sub" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.expand_dims/ExpandDims/dim" -> "maskrcnn/tf.expand_dims/ExpandDims"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_4/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_3/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_2/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_1/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack/unstack"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_37/AddV2"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_38/AddV2"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_33/AddV2"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_34/AddV2"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_29/AddV2"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_30/AddV2"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_25/AddV2"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_26/AddV2"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_21/AddV2"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_22/AddV2"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_37/AddV2"; +"maskrcnn/tf.math.subtract_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_38/AddV2"; +"maskrcnn/tf.math.subtract_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_33/AddV2"; +"maskrcnn/tf.math.subtract_24/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_34/AddV2"; +"maskrcnn/tf.math.subtract_25/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_29/AddV2"; +"maskrcnn/tf.math.subtract_16/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_30/AddV2"; +"maskrcnn/tf.math.subtract_17/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_25/AddV2"; +"maskrcnn/tf.math.subtract_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_26/AddV2"; +"maskrcnn/tf.math.subtract_9/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_21/AddV2"; +"maskrcnn/tf.math.subtract/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_22/AddV2"; +"maskrcnn/tf.math.subtract_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; "maskrcnn/tf.__operators__.add_38/AddV2" -> "maskrcnn/tf.__operators__.add_38/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_37/AddV2" -> "maskrcnn/tf.__operators__.add_37/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_34/AddV2" -> "maskrcnn/tf.__operators__.add_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; @@ -11604,9 +11298,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_36/Sub/y" -> "maskrcnn/tf.math.subtract_36/Sub"; "maskrcnn/tf.math.subtract_36/Sub" -> "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_35/Sub/y" -> "maskrcnn/tf.math.subtract_35/Sub"; -"maskrcnn/tf.math.subtract_35/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_35/Sub" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_34/Sub/y" -> "maskrcnn/tf.math.subtract_34/Sub"; -"maskrcnn/tf.math.subtract_34/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_34/Sub" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_31/Sub/y" -> "maskrcnn/tf.math.subtract_31/Sub"; "maskrcnn/tf.math.subtract_31/Sub" -> "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_30/Sub/y" -> "maskrcnn/tf.math.subtract_30/Sub"; @@ -11616,9 +11310,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_28/Sub/y" -> "maskrcnn/tf.math.subtract_28/Sub"; "maskrcnn/tf.math.subtract_28/Sub" -> "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_27/Sub/y" -> "maskrcnn/tf.math.subtract_27/Sub"; -"maskrcnn/tf.math.subtract_27/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_27/Sub" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_26/Sub/y" -> "maskrcnn/tf.math.subtract_26/Sub"; -"maskrcnn/tf.math.subtract_26/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_26/Sub" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_23/Sub/y" -> "maskrcnn/tf.math.subtract_23/Sub"; "maskrcnn/tf.math.subtract_23/Sub" -> "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_22/Sub/y" -> "maskrcnn/tf.math.subtract_22/Sub"; @@ -11628,9 +11322,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_20/Sub/y" -> "maskrcnn/tf.math.subtract_20/Sub"; "maskrcnn/tf.math.subtract_20/Sub" -> "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_19/Sub/y" -> "maskrcnn/tf.math.subtract_19/Sub"; -"maskrcnn/tf.math.subtract_19/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_19/Sub" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_18/Sub/y" -> "maskrcnn/tf.math.subtract_18/Sub"; -"maskrcnn/tf.math.subtract_18/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_18/Sub" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_15/Sub/y" -> "maskrcnn/tf.math.subtract_15/Sub"; "maskrcnn/tf.math.subtract_15/Sub" -> "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_14/Sub/y" -> "maskrcnn/tf.math.subtract_14/Sub"; @@ -11640,9 +11334,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_12/Sub/y" -> "maskrcnn/tf.math.subtract_12/Sub"; "maskrcnn/tf.math.subtract_12/Sub" -> "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_11/Sub/y" -> "maskrcnn/tf.math.subtract_11/Sub"; -"maskrcnn/tf.math.subtract_11/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_11/Sub" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_10/Sub/y" -> "maskrcnn/tf.math.subtract_10/Sub"; -"maskrcnn/tf.math.subtract_10/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_10/Sub" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_7/Sub/y" -> "maskrcnn/tf.math.subtract_7/Sub"; "maskrcnn/tf.math.subtract_7/Sub" -> "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_6/Sub/y" -> "maskrcnn/tf.math.subtract_6/Sub"; @@ -11652,9 +11346,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_4/Sub/y" -> "maskrcnn/tf.math.subtract_4/Sub"; "maskrcnn/tf.math.subtract_4/Sub" -> "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_3/Sub/y" -> "maskrcnn/tf.math.subtract_3/Sub"; -"maskrcnn/tf.math.subtract_3/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_3/Sub" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_2/Sub/y" -> "maskrcnn/tf.math.subtract_2/Sub"; -"maskrcnn/tf.math.subtract_2/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_2/Sub" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_36/fake_quantize/AsymmQuant/add"; @@ -11699,22 +11393,28 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_39/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_4/stack"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_34/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_35/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_28/fake_quantize/AsymmQuant/add"; @@ -11759,22 +11459,28 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_3/stack"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_20/fake_quantize/AsymmQuant/add"; @@ -11819,22 +11525,28 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_2/stack"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_18/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_19/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_12/fake_quantize/AsymmQuant/add"; @@ -11879,22 +11591,28 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_15/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_1/stack"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_10/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_4/fake_quantize/AsymmQuant/add"; @@ -11939,22 +11657,28 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack/stack"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; "maskrcnn/tf.concat_4/concat/axis" -> "maskrcnn/tf.concat_4/concat"; "maskrcnn/tf.concat_4/concat" -> "maskrcnn/tf.math.minimum_14/Minimum"; "maskrcnn/tf.stack_4/stack" -> "maskrcnn/tf.math.minimum_14/Minimum"; @@ -12165,31 +11889,31 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.concat_6/concat/axis" -> "maskrcnn/tf.concat_6/concat"; "maskrcnn/tf.concat_6/concat" -> "maskrcnn/tf.math.top_k/TopKV2"; "maskrcnn/tf.math.top_k/TopKV2/k" -> "maskrcnn/tf.math.top_k/TopKV2"; @@ -12213,61 +11937,61 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.ones/ones/Const" -> "maskrcnn/tf.ones/ones"; "maskrcnn/tf.ones/ones" -> "maskrcnn/tf.math.multiply_30/Mul"; "maskrcnn/tf.math.multiply_30/Mul" -> "maskrcnn/tf.stack_5/stack"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add_1"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/AsymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I2/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I3/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add_1"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/add_1" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I4/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; "maskrcnn/tf.concat_5/concat/axis" -> "maskrcnn/tf.concat_5/concat"; "maskrcnn/tf.concat_5/concat" -> "maskrcnn/tf.compat.v1.gather_nd/GatherNd"; "maskrcnn/tf.stack_5/stack" -> "maskrcnn/tf.compat.v1.gather_nd/GatherNd"; @@ -12405,46 +12129,46 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.pow/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_3/truediv"; "maskrcnn/tf.expand_dims_12/ExpandDims/dim" -> "maskrcnn/tf.expand_dims_12/ExpandDims"; "maskrcnn/tf.expand_dims_12/ExpandDims" -> "maskrcnn/tf.math.truediv_1/truediv"; -"maskrcnn/tf.math.truediv_2/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_3/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_2/truediv" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_3/truediv" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_1/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_42/Sub/y" -> "maskrcnn/tf.math.subtract_42/Sub"; "maskrcnn/tf.math.subtract_42/Sub" -> "maskrcnn/tf.cast_23/Cast"; "maskrcnn/tf.math.subtract_42/Sub" -> "maskrcnn/tf.compat.v1.gather_1/GatherV2"; "maskrcnn/tf.math.subtract_42/Sub" -> "maskrcnn/tf.compat.v1.gather/GatherV2"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_14/ExpandDims"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_13/ExpandDims"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_14/ExpandDims"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_13/ExpandDims"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; "maskrcnn/tf.cast_23/Cast" -> "maskrcnn/tf.cast_23/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; @@ -12776,9 +12500,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.multiply_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.multiply_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_32/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_6/truediv"; "maskrcnn/tf.math.subtract_44/Sub/y" -> "maskrcnn/tf.math.subtract_44/Sub"; -"maskrcnn/tf.math.subtract_44/Sub" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/Sub" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_43/Sub/y" -> "maskrcnn/tf.math.subtract_43/Sub"; -"maskrcnn/tf.math.subtract_43/Sub" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_43/Sub" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_54/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_54/strided_slice"; "maskrcnn/tf.__operators__.getitem_54/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_54/strided_slice"; "maskrcnn/tf.__operators__.getitem_54/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_54/strided_slice"; @@ -12898,35 +12622,37 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.truediv_8/truediv" -> "maskrcnn/tf.math.truediv_8/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.truediv_6/truediv/y" -> "maskrcnn/tf.math.truediv_6/truediv"; "maskrcnn/tf.math.truediv_6/truediv" -> "maskrcnn/tf.math.truediv_6/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_16/ExpandDims"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_15/ExpandDims"; -"maskrcnn/tf.__operators__.add_53/AddV2" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_51/AddV2" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_49/AddV2" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_47/AddV2" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_45/AddV2" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_43/AddV2" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_41/AddV2" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_16/ExpandDims"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_15/ExpandDims"; +"maskrcnn/tf.__operators__.add_41/AddV2" -> "maskrcnn/tf.stack_7/stack"; +"maskrcnn/tf.__operators__.add_43/AddV2" -> "maskrcnn/tf.stack_7/stack"; +"maskrcnn/tf.__operators__.add_45/AddV2" -> "maskrcnn/tf.stack_7/stack"; +"maskrcnn/tf.__operators__.add_47/AddV2" -> "maskrcnn/tf.stack_7/stack"; +"maskrcnn/tf.__operators__.add_49/AddV2" -> "maskrcnn/tf.stack_7/stack"; +"maskrcnn/tf.__operators__.add_51/AddV2" -> "maskrcnn/tf.stack_7/stack"; +"maskrcnn/tf.__operators__.add_53/AddV2" -> "maskrcnn/tf.stack_7/stack"; +"maskrcnn/tf.stack_7/stack" -> "maskrcnn/tf.math.floor/Floor"; +"maskrcnn/tf.stack_7/stack" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_52/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_52/strided_slice"; "maskrcnn/tf.__operators__.getitem_52/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_52/strided_slice"; "maskrcnn/tf.__operators__.getitem_52/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_52/strided_slice"; @@ -13036,183 +12762,49 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.expand_dims_15/ExpandDims" -> "maskrcnn/tf.concat_12/concat"; "maskrcnn/tf.expand_dims_16/ExpandDims/dim" -> "maskrcnn/tf.expand_dims_16/ExpandDims"; "maskrcnn/tf.expand_dims_16/ExpandDims" -> "maskrcnn/tf.concat_12/concat"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_7/stack"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_7/stack"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_7/stack"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_7/stack"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_7/stack"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_7/stack"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_7/stack"; -"maskrcnn/tf.stack_7/stack" -> "maskrcnn/tf.math.floor/Floor"; -"maskrcnn/tf.stack_7/stack" -> "maskrcnn/tf.math.subtract_45/Sub"; -"maskrcnn/tf.__operators__.add_52/AddV2" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_50/AddV2" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_48/AddV2" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_46/AddV2" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_44/AddV2" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_42/AddV2" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_40/AddV2" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.__operators__.add_40/AddV2" -> "maskrcnn/tf.stack_6/stack"; +"maskrcnn/tf.__operators__.add_42/AddV2" -> "maskrcnn/tf.stack_6/stack"; +"maskrcnn/tf.__operators__.add_44/AddV2" -> "maskrcnn/tf.stack_6/stack"; +"maskrcnn/tf.__operators__.add_46/AddV2" -> "maskrcnn/tf.stack_6/stack"; +"maskrcnn/tf.__operators__.add_48/AddV2" -> "maskrcnn/tf.stack_6/stack"; +"maskrcnn/tf.__operators__.add_50/AddV2" -> "maskrcnn/tf.stack_6/stack"; +"maskrcnn/tf.__operators__.add_52/AddV2" -> "maskrcnn/tf.stack_6/stack"; "maskrcnn/tf.concat_12/concat/axis" -> "maskrcnn/tf.concat_12/concat"; "maskrcnn/tf.concat_12/concat" -> "maskrcnn/tf.__operators__.getitem_58/strided_slice"; "maskrcnn/tf.concat_12/concat" -> "maskrcnn/tf.__operators__.getitem_56/strided_slice"; "maskrcnn/tf.concat_12/concat" -> "maskrcnn/tf.__operators__.getitem_59/strided_slice"; "maskrcnn/tf.concat_12/concat" -> "maskrcnn/tf.__operators__.getitem_57/strided_slice"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_6/stack"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_6/stack"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_6/stack"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_6/stack"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_6/stack"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_6/stack"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_6/stack"; -"maskrcnn/tf.math.floor/Floor" -> "maskrcnn/tf.math.maximum_7/Maximum"; +"maskrcnn/tf.math.floor/Floor" -> "maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.stack_6/stack" -> "maskrcnn/tf.math.floor_1/Floor"; -"maskrcnn/tf.stack_6/stack" -> "maskrcnn/tf.math.subtract_46/Sub"; +"maskrcnn/tf.stack_6/stack" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.maximum_7/Maximum"; +"maskrcnn/tf.math.floor_1/Floor" -> "maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_58/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_58/strided_slice"; "maskrcnn/tf.__operators__.getitem_58/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_58/strided_slice"; "maskrcnn/tf.__operators__.getitem_58/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_58/strided_slice"; "maskrcnn/tf.__operators__.getitem_58/strided_slice" -> "maskrcnn/tf.expand_dims_19/ExpandDims"; "maskrcnn/tf.math.maximum_7/Maximum" -> "maskrcnn/tf.math.maximum_7/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.floor_1/Floor" -> "maskrcnn/tf.math.maximum_6/Maximum"; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor_1/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.maximum_6/Maximum"; "maskrcnn/tf.math.maximum_7/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.maximum_7/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.maximum_7/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.maximum_7/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.maximum_7/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.maximum_7/fake_quantize/AsymmQuant/add"; @@ -13267,6 +12859,17 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.__operators__.getitem_59/strided_slice" -> "maskrcnn/tf.expand_dims_20/ExpandDims"; "maskrcnn/tf.__operators__.add_55/y" -> "maskrcnn/tf.__operators__.add_55/AddV2"; "maskrcnn/tf.__operators__.add_55/AddV2" -> "maskrcnn/tf.__operators__.add_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract_46/Sub"; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I1/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I1/AsymmQuant/add"; @@ -13278,6 +12881,17 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_46/fake_quantize_I1/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract_46/Sub"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract_45/Sub"; "maskrcnn/tf.math.subtract_45/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_45/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I1/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_45/fake_quantize_I1/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I1/AsymmQuant/add"; @@ -13635,21 +13249,21 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/box-predict/Tensordot" -> "maskrcnn/box-predict/BiasAdd"; "maskrcnn/box-predict/BiasAdd/ReadVariableOp/resource" -> "maskrcnn/box-predict/BiasAdd/ReadVariableOp"; "maskrcnn/box-predict/BiasAdd/ReadVariableOp" -> "maskrcnn/box-predict/BiasAdd"; -"maskrcnn/box-predict/BiasAdd" -> "maskrcnn/box-predict/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/box-predict/BiasAdd" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/box-predict/BiasAdd" -> Identity; "maskrcnn/tf.math.subtract_49/Sub/y" -> "maskrcnn/tf.math.subtract_49/Sub"; "maskrcnn/tf.math.subtract_49/Sub" -> "maskrcnn/tf.math.multiply_50/Mul"; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/box-predict/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/box-predict/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/box-predict/fake_quantize/AsymmQuant/add"; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/box-predict/fake_quantize/AsymmQuant/add"; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/add" -> "maskrcnn/box-predict/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/box-predict/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/box-predict/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/box-predict/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/box-predict/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/box-predict/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_35/Reshape"; -"maskrcnn/box-predict/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> Identity; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/Abs"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/Abs" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/add/y" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/add" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/add_1" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_35/Reshape"; "maskrcnn/tf.stack_12/stack" -> "maskrcnn/tf.reshape_35/Reshape"; "maskrcnn/tf.math.multiply_50/Mul" -> "maskrcnn/tf.stack_14/stack"; "maskrcnn/tf.math.multiply_50/Mul" -> "maskrcnn/tf.stack_13/stack"; @@ -13990,42 +13604,36 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.multiply_57/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.multiply_57/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_57/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.multiply_57/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_57/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract_53/Sub"; -"maskrcnn/tf.math.subtract_54/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_54/Sub" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_53/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_60/strided_slice"; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_60/strided_slice"; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_60/strided_slice"; "maskrcnn/tf.__operators__.getitem_60/strided_slice" -> "maskrcnn/tf.unstack_5/unstack"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_65/AddV2"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_66/AddV2"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_65/AddV2"; +"maskrcnn/tf.math.subtract_53/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_66/AddV2"; +"maskrcnn/tf.math.subtract_54/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; "maskrcnn/tf.__operators__.add_66/AddV2" -> "maskrcnn/tf.__operators__.add_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_65/AddV2" -> "maskrcnn/tf.__operators__.add_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.unstack_5/unstack" -> "maskrcnn/tf.math.subtract_60/Sub"; @@ -14063,9 +13671,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_57/Sub/y" -> "maskrcnn/tf.math.subtract_57/Sub"; "maskrcnn/tf.math.subtract_57/Sub" -> "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_56/Sub/y" -> "maskrcnn/tf.math.subtract_56/Sub"; -"maskrcnn/tf.math.subtract_56/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_56/Sub" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_55/Sub/y" -> "maskrcnn/tf.math.subtract_55/Sub"; -"maskrcnn/tf.math.subtract_55/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_55/Sub" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_57/fake_quantize/AsymmQuant/add"; @@ -14110,22 +13718,28 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_60/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_15/stack"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add_1"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/add_1" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/AsymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_55/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_56/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; "maskrcnn/tf.concat_13/concat/axis" -> "maskrcnn/tf.concat_13/concat"; "maskrcnn/tf.concat_13/concat" -> "maskrcnn/tf.math.minimum_22/Minimum"; "maskrcnn/tf.stack_15/stack" -> "maskrcnn/tf.math.minimum_22/Minimum"; @@ -14341,46 +13955,46 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.pow_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_28/truediv"; "maskrcnn/tf.expand_dims_24/ExpandDims/dim" -> "maskrcnn/tf.expand_dims_24/ExpandDims"; "maskrcnn/tf.expand_dims_24/ExpandDims" -> "maskrcnn/tf.math.truediv_26/truediv"; -"maskrcnn/tf.math.truediv_27/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_28/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_27/truediv" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_28/truediv" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_26/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_64/Sub/y" -> "maskrcnn/tf.math.subtract_64/Sub"; "maskrcnn/tf.math.subtract_64/Sub" -> "maskrcnn/tf.cast_37/Cast"; "maskrcnn/tf.math.subtract_64/Sub" -> "maskrcnn/tf.compat.v1.gather_4/GatherV2"; "maskrcnn/tf.math.subtract_64/Sub" -> "maskrcnn/tf.compat.v1.gather_3/GatherV2"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_26/ExpandDims"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_25/ExpandDims"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/Abs"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add_1"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/add_1" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/AsymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_27/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_26/ExpandDims"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_28/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_25/ExpandDims"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_26/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; "maskrcnn/tf.cast_37/Cast" -> "maskrcnn/tf.cast_37/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; @@ -14978,9 +14592,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.multiply_61/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.multiply_61/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_61/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_31/truediv"; "maskrcnn/tf.math.subtract_66/Sub/y" -> "maskrcnn/tf.math.subtract_66/Sub"; -"maskrcnn/tf.math.subtract_66/Sub" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/Sub" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_65/Sub/y" -> "maskrcnn/tf.math.subtract_65/Sub"; -"maskrcnn/tf.math.subtract_65/Sub" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_65/Sub" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_131/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_131/strided_slice"; "maskrcnn/tf.__operators__.getitem_131/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_131/strided_slice"; "maskrcnn/tf.__operators__.getitem_131/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_131/strided_slice"; @@ -15219,42 +14833,44 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.truediv_33/truediv" -> "maskrcnn/tf.math.truediv_33/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.truediv_31/truediv/y" -> "maskrcnn/tf.math.truediv_31/truediv"; "maskrcnn/tf.math.truediv_31/truediv" -> "maskrcnn/tf.math.truediv_31/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add_1"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_28/ExpandDims"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add_1"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/add_1" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/AsymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_27/ExpandDims"; -"maskrcnn/tf.__operators__.add_96/AddV2" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_94/AddV2" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_92/AddV2" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_90/AddV2" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_88/AddV2" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_86/AddV2" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_84/AddV2" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_82/AddV2" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_80/AddV2" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_78/AddV2" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_76/AddV2" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_74/AddV2" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_72/AddV2" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_70/AddV2" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_28/ExpandDims"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_65/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_27/ExpandDims"; +"maskrcnn/tf.__operators__.add_70/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_72/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_74/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_76/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_78/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_80/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_82/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_84/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_86/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_88/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_90/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_92/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_94/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_96/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.stack_18/stack" -> "maskrcnn/tf.math.floor_2/Floor"; +"maskrcnn/tf.stack_18/stack" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_129/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_129/strided_slice"; "maskrcnn/tf.__operators__.getitem_129/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_129/strided_slice"; "maskrcnn/tf.__operators__.getitem_129/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_129/strided_slice"; @@ -15469,344 +15085,56 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.expand_dims_27/ExpandDims" -> "maskrcnn/tf.concat_19/concat"; "maskrcnn/tf.expand_dims_28/ExpandDims/dim" -> "maskrcnn/tf.expand_dims_28/ExpandDims"; "maskrcnn/tf.expand_dims_28/ExpandDims" -> "maskrcnn/tf.concat_19/concat"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.stack_18/stack" -> "maskrcnn/tf.math.floor_2/Floor"; -"maskrcnn/tf.stack_18/stack" -> "maskrcnn/tf.math.subtract_67/Sub"; -"maskrcnn/tf.__operators__.add_95/AddV2" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_93/AddV2" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_91/AddV2" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_89/AddV2" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_87/AddV2" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_85/AddV2" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_83/AddV2" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_81/AddV2" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_79/AddV2" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_77/AddV2" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_75/AddV2" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_73/AddV2" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_71/AddV2" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_69/AddV2" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.__operators__.add_69/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_71/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_73/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_75/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_77/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_79/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_81/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_83/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_85/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_87/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_89/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_91/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_93/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_95/AddV2" -> "maskrcnn/tf.stack_17/stack"; "maskrcnn/tf.concat_19/concat/axis" -> "maskrcnn/tf.concat_19/concat"; "maskrcnn/tf.concat_19/concat" -> "maskrcnn/tf.__operators__.getitem_135/strided_slice"; "maskrcnn/tf.concat_19/concat" -> "maskrcnn/tf.__operators__.getitem_133/strided_slice"; "maskrcnn/tf.concat_19/concat" -> "maskrcnn/tf.__operators__.getitem_136/strided_slice"; "maskrcnn/tf.concat_19/concat" -> "maskrcnn/tf.__operators__.getitem_134/strided_slice"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/add"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/add_1"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.math.floor_2/Floor" -> "maskrcnn/tf.math.maximum_11/Maximum"; +"maskrcnn/tf.math.floor_2/Floor" -> "maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.stack_17/stack" -> "maskrcnn/tf.math.floor_3/Floor"; -"maskrcnn/tf.stack_17/stack" -> "maskrcnn/tf.math.subtract_68/Sub"; +"maskrcnn/tf.stack_17/stack" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor_2/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.maximum_11/Maximum"; +"maskrcnn/tf.math.floor_3/Floor" -> "maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_135/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_135/strided_slice"; "maskrcnn/tf.__operators__.getitem_135/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_135/strided_slice"; "maskrcnn/tf.__operators__.getitem_135/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_135/strided_slice"; "maskrcnn/tf.__operators__.getitem_135/strided_slice" -> "maskrcnn/tf.expand_dims_31/ExpandDims"; "maskrcnn/tf.math.maximum_11/Maximum" -> "maskrcnn/tf.math.maximum_11/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.floor_3/Floor" -> "maskrcnn/tf.math.maximum_10/Maximum"; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/Abs"; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/add/y" -> "maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/add"; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/add" -> "maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/add_1"; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/add_1" -> "maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor_3/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.maximum_10/Maximum"; "maskrcnn/tf.math.maximum_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.maximum_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.maximum_11/fake_quantize/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.maximum_11/fake_quantize/AsymmQuant/Abs"; "maskrcnn/tf.math.maximum_11/fake_quantize/AsymmQuant/Abs" -> "maskrcnn/tf.math.maximum_11/fake_quantize/AsymmQuant/add"; @@ -15861,6 +15189,17 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.__operators__.getitem_136/strided_slice" -> "maskrcnn/tf.expand_dims_32/ExpandDims"; "maskrcnn/tf.__operators__.add_98/y" -> "maskrcnn/tf.__operators__.add_98/AddV2"; "maskrcnn/tf.__operators__.add_98/AddV2" -> "maskrcnn/tf.__operators__.add_98/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract_68/Sub"; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I1/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I1/AsymmQuant/add"; @@ -15872,6 +15211,17 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/AsymmQuant/FakeQuantWithMinMaxVar "maskrcnn/tf.math.subtract_68/fake_quantize_I1/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract_68/Sub"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/Abs"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/add/y" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/add"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/add" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/ReadVariableOp"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/ReadVariableOp" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/add_1"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/add_1" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars/ReadVariableOp" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/AsymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract_67/Sub"; "maskrcnn/tf.math.subtract_67/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_67/fake_quantize_I1/AsymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I1/AsymmQuant/Abs"; "maskrcnn/tf.math.subtract_67/fake_quantize_I1/AsymmQuant/Abs" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I1/AsymmQuant/add"; diff --git a/tests/tensorflow/data/reference_graphs/2.8/quantized/w_sym_t_a_sym_t/mask_rcnn.dot b/tests/tensorflow/data/reference_graphs/2.8/quantized/w_sym_t_a_sym_t/mask_rcnn.dot index e2d61566ef4..8ee854eb0b4 100644 --- a/tests/tensorflow/data/reference_graphs/2.8/quantized/w_sym_t_a_sym_t/mask_rcnn.dot +++ b/tests/tensorflow/data/reference_graphs/2.8/quantized/w_sym_t_a_sym_t/mask_rcnn.dot @@ -2124,43 +2124,51 @@ args_0_1 [op=Placeholder]; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1/resource" [op=Placeholder]; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1" [op=ReadVariableOp]; "maskrcnn/p2-bn/FusedBatchNormV3" [op=FusedBatchNormV3]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/Abs" [op=Abs]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add/y" [op=Const]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add" [op=AddV2]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul" [op=Mul]; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/rpn/SymmQuant/Abs" [op=Abs]; @@ -2329,51 +2337,51 @@ args_0_1 [op=Placeholder]; "maskrcnn/rpn-box/Conv2D_4" [op=Conv2D]; "maskrcnn/rpn-box/BiasAdd_4/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/rpn-box/BiasAdd_4" [op=BiasAdd]; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/Abs" [op=Abs]; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/add/y" [op=Const]; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/add" [op=AddV2]; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/mul" [op=Mul]; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/Abs" [op=Abs]; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/add/y" [op=Const]; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/add" [op=AddV2]; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/mul" [op=Mul]; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/Abs" [op=Abs]; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/add/y" [op=Const]; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/add" [op=AddV2]; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/mul" [op=Mul]; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/Abs" [op=Abs]; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/add/y" [op=Const]; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/add" [op=AddV2]; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/mul" [op=Mul]; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.reshape_9/Reshape/shape" [op=Const]; "maskrcnn/tf.reshape_9/Reshape" [op=Reshape]; "maskrcnn/tf.reshape_7/Reshape/shape" [op=Const]; @@ -3039,86 +3047,96 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract/Sub" [op=Sub]; "maskrcnn/tf.expand_dims/ExpandDims/dim" [op=Const]; "maskrcnn/tf.expand_dims/ExpandDims" [op=ExpandDims]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_38/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_37/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_34/AddV2" [op=AddV2]; @@ -3320,20 +3338,24 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs" [op=Abs]; @@ -3370,20 +3392,24 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs" [op=Abs]; @@ -3420,20 +3446,24 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs" [op=Abs]; @@ -3470,20 +3500,24 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs" [op=Abs]; @@ -3520,20 +3554,24 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.concat_4/concat/axis" [op=Const]; "maskrcnn/tf.concat_4/concat" [op=ConcatV2]; "maskrcnn/tf.stack_4/stack" [op=Pack]; @@ -3750,43 +3788,51 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.ones/ones/Const" [op=Const]; "maskrcnn/tf.ones/ones" [op=Fill]; "maskrcnn/tf.math.multiply_30/Mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/Abs" [op=Abs]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add/y" [op=Const]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add" [op=AddV2]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul" [op=Mul]; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.concat_5/concat/axis" [op=Const]; "maskrcnn/tf.concat_5/concat" [op=ConcatV2]; "maskrcnn/tf.stack_5/stack" [op=Pack]; @@ -3903,29 +3949,33 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_1/truediv" [op=RealDiv]; "maskrcnn/tf.math.subtract_42/Sub/y" [op=Const]; "maskrcnn/tf.math.subtract_42/Sub" [op=Sub]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.cast_23/Cast" [op=Cast]; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack_1" [op=Const]; @@ -4301,29 +4351,32 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_8/truediv" [op=RealDiv]; "maskrcnn/tf.math.truediv_6/truediv/y" [op=Const]; "maskrcnn/tf.math.truediv_6/truediv" [op=RealDiv]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_53/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_51/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_49/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_47/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_45/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_43/AddV2" [op=AddV2]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_41/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_43/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_45/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_47/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_49/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_51/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_53/AddV2" [op=AddV2]; +"maskrcnn/tf.stack_7/stack" [op=Pack]; "maskrcnn/tf.__operators__.getitem_52/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_52/strided_slice/stack_1" [op=Const]; "maskrcnn/tf.__operators__.getitem_52/strided_slice/stack_2" [op=Const]; @@ -4419,150 +4472,41 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.expand_dims_15/ExpandDims" [op=ExpandDims]; "maskrcnn/tf.expand_dims_16/ExpandDims/dim" [op=Const]; "maskrcnn/tf.expand_dims_16/ExpandDims" [op=ExpandDims]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.stack_7/stack" [op=Pack]; -"maskrcnn/tf.__operators__.add_52/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_50/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_48/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_46/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_44/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_42/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_40/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_42/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_44/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_46/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_48/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_50/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_52/AddV2" [op=AddV2]; "maskrcnn/tf.concat_12/concat/axis" [op=Const]; "maskrcnn/tf.concat_12/concat" [op=ConcatV2]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.floor/Floor" [op=Floor]; "maskrcnn/tf.stack_6/stack" [op=Pack]; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.floor_1/Floor" [op=Floor]; "maskrcnn/tf.__operators__.getitem_58/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_58/strided_slice/stack_1" [op=Const]; "maskrcnn/tf.__operators__.getitem_58/strided_slice/stack_2" [op=Const]; "maskrcnn/tf.__operators__.getitem_58/strided_slice" [op=StridedSlice]; "maskrcnn/tf.math.maximum_7/Maximum" [op=Maximum]; -"maskrcnn/tf.math.floor_1/Floor" [op=Floor]; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.maximum_7/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.maximum_7/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.maximum_7/fake_quantize/SymmQuant/Abs" [op=Abs]; @@ -4607,6 +4551,15 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.__operators__.getitem_59/strided_slice" [op=StridedSlice]; "maskrcnn/tf.__operators__.add_55/y" [op=Const]; "maskrcnn/tf.__operators__.add_55/AddV2" [op=AddV2]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/SymmQuant/Abs" [op=Abs]; @@ -4616,6 +4569,15 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_45/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_45/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_45/fake_quantize_I1/SymmQuant/Abs" [op=Abs]; @@ -4930,15 +4892,15 @@ args_0_1 [op=Placeholder]; "maskrcnn/box-predict/BiasAdd" [op=BiasAdd]; "maskrcnn/tf.math.subtract_49/Sub/y" [op=Const]; "maskrcnn/tf.math.subtract_49/Sub" [op=Sub]; -"maskrcnn/box-predict/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/box-predict/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/box-predict/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/box-predict/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/box-predict/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/box-predict/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/box-predict/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/box-predict/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/box-predict/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.stack_12/stack" [op=Pack]; "maskrcnn/tf.math.multiply_50/Mul" [op=Mul]; "maskrcnn/tf.expand_dims_23/ExpandDims/dim" [op=Const]; @@ -5226,22 +5188,24 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_1" [op=Const]; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_2" [op=Const]; "maskrcnn/tf.__operators__.getitem_60/strided_slice" [op=StridedSlice]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_66/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_65/AddV2" [op=AddV2]; "maskrcnn/tf.unstack_5/unstack" [op=Unpack]; @@ -5311,20 +5275,24 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.concat_13/concat/axis" [op=Const]; "maskrcnn/tf.concat_13/concat" [op=ConcatV2]; "maskrcnn/tf.stack_15/stack" [op=Pack]; @@ -5504,29 +5472,33 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_26/truediv" [op=RealDiv]; "maskrcnn/tf.math.subtract_64/Sub/y" [op=Const]; "maskrcnn/tf.math.subtract_64/Sub" [op=Sub]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/Abs" [op=Abs]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add/y" [op=Const]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add" [op=AddV2]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul" [op=Mul]; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.cast_37/Cast" [op=Cast]; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack_1" [op=Const]; @@ -6217,36 +6189,39 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.truediv_33/truediv" [op=RealDiv]; "maskrcnn/tf.math.truediv_31/truediv/y" [op=Const]; "maskrcnn/tf.math.truediv_31/truediv" [op=RealDiv]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/Abs" [op=Abs]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add/y" [op=Const]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add" [op=AddV2]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul" [op=Mul]; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_96/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_94/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_92/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_90/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_88/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_86/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_84/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_82/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_80/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_78/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_76/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_74/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_72/AddV2" [op=AddV2]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.__operators__.add_70/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_72/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_74/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_76/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_78/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_80/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_82/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_84/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_86/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_88/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_90/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_92/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_94/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_96/AddV2" [op=AddV2]; +"maskrcnn/tf.stack_18/stack" [op=Pack]; "maskrcnn/tf.__operators__.getitem_129/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_129/strided_slice/stack_1" [op=Const]; "maskrcnn/tf.__operators__.getitem_129/strided_slice/stack_2" [op=Const]; @@ -6433,283 +6408,48 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.expand_dims_27/ExpandDims" [op=ExpandDims]; "maskrcnn/tf.expand_dims_28/ExpandDims/dim" [op=Const]; "maskrcnn/tf.expand_dims_28/ExpandDims" [op=ExpandDims]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.stack_18/stack" [op=Pack]; -"maskrcnn/tf.__operators__.add_95/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_93/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_91/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_89/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_87/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_85/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_83/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_81/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_79/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_77/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_75/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_73/AddV2" [op=AddV2]; -"maskrcnn/tf.__operators__.add_71/AddV2" [op=AddV2]; "maskrcnn/tf.__operators__.add_69/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_71/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_73/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_75/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_77/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_79/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_81/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_83/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_85/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_87/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_89/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_91/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_93/AddV2" [op=AddV2]; +"maskrcnn/tf.__operators__.add_95/AddV2" [op=AddV2]; "maskrcnn/tf.concat_19/concat/axis" [op=Const]; "maskrcnn/tf.concat_19/concat" [op=ConcatV2]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/Abs" [op=Abs]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/add/y" [op=Const]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/add" [op=AddV2]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/mul" [op=Mul]; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.floor_2/Floor" [op=Floor]; "maskrcnn/tf.stack_17/stack" [op=Pack]; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.floor_3/Floor" [op=Floor]; "maskrcnn/tf.__operators__.getitem_135/strided_slice/stack" [op=Const]; "maskrcnn/tf.__operators__.getitem_135/strided_slice/stack_1" [op=Const]; "maskrcnn/tf.__operators__.getitem_135/strided_slice/stack_2" [op=Const]; "maskrcnn/tf.__operators__.getitem_135/strided_slice" [op=StridedSlice]; "maskrcnn/tf.math.maximum_11/Maximum" [op=Maximum]; -"maskrcnn/tf.math.floor_3/Floor" [op=Floor]; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.maximum_11/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.maximum_11/fake_quantize/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.maximum_11/fake_quantize/SymmQuant/Abs" [op=Abs]; @@ -6754,6 +6494,15 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.__operators__.getitem_136/strided_slice" [op=StridedSlice]; "maskrcnn/tf.__operators__.add_98/y" [op=Const]; "maskrcnn/tf.__operators__.add_98/AddV2" [op=AddV2]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/SymmQuant/Abs" [op=Abs]; @@ -6763,6 +6512,15 @@ args_0_1 [op=Placeholder]; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/SymmQuant/mul" [op=Mul]; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/Abs" [op=Abs]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/add/y" [op=Const]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/add" [op=AddV2]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" [op=Placeholder]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" [op=ReadVariableOp]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/mul" [op=Mul]; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" [op=FakeQuantWithMinMaxVars]; "maskrcnn/tf.math.subtract_67/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp/resource" [op=Placeholder]; "maskrcnn/tf.math.subtract_67/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp" [op=ReadVariableOp]; "maskrcnn/tf.math.subtract_67/fake_quantize_I1/SymmQuant/Abs" [op=Abs]; @@ -9592,7 +9350,7 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p6-bn/FusedBatchNormV3"; "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p6-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p6-bn/FusedBatchNormV3"; -"maskrcnn/p6-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/FusedBatchNormV3" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p5-bn/ReadVariableOp/resource" -> "maskrcnn/p5-bn/ReadVariableOp"; "maskrcnn/p5-bn/ReadVariableOp" -> "maskrcnn/p5-bn/FusedBatchNormV3"; "maskrcnn/p5-bn/ReadVariableOp_1/resource" -> "maskrcnn/p5-bn/ReadVariableOp_1"; @@ -9601,7 +9359,7 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p5-bn/FusedBatchNormV3"; "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p5-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p5-bn/FusedBatchNormV3"; -"maskrcnn/p5-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/p5-bn/FusedBatchNormV3" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p4-bn/ReadVariableOp/resource" -> "maskrcnn/p4-bn/ReadVariableOp"; "maskrcnn/p4-bn/ReadVariableOp" -> "maskrcnn/p4-bn/FusedBatchNormV3"; "maskrcnn/p4-bn/ReadVariableOp_1/resource" -> "maskrcnn/p4-bn/ReadVariableOp_1"; @@ -9610,7 +9368,7 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p4-bn/FusedBatchNormV3"; "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p4-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p4-bn/FusedBatchNormV3"; -"maskrcnn/p4-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/p4-bn/FusedBatchNormV3" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p3-bn/ReadVariableOp/resource" -> "maskrcnn/p3-bn/ReadVariableOp"; "maskrcnn/p3-bn/ReadVariableOp" -> "maskrcnn/p3-bn/FusedBatchNormV3"; "maskrcnn/p3-bn/ReadVariableOp_1/resource" -> "maskrcnn/p3-bn/ReadVariableOp_1"; @@ -9619,7 +9377,7 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p3-bn/FusedBatchNormV3"; "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p3-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p3-bn/FusedBatchNormV3"; -"maskrcnn/p3-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/p3-bn/FusedBatchNormV3" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/p2-bn/ReadVariableOp/resource" -> "maskrcnn/p2-bn/ReadVariableOp"; "maskrcnn/p2-bn/ReadVariableOp" -> "maskrcnn/p2-bn/FusedBatchNormV3"; "maskrcnn/p2-bn/ReadVariableOp_1/resource" -> "maskrcnn/p2-bn/ReadVariableOp_1"; @@ -9628,67 +9386,67 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp" -> "maskrcnn/p2-bn/FusedBatchNormV3"; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1/resource" -> "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1"; "maskrcnn/p2-bn/FusedBatchNormV3/ReadVariableOp_1" -> "maskrcnn/p2-bn/FusedBatchNormV3"; -"maskrcnn/p2-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul/ReadVariableOp"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_18/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_47/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_1"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_16/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_45/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_2"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_14/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_43/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_3"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_12/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_41/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/Abs"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/Abs" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add/y" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/add" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/mul" -> "maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_4"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_10/Reshape"; -"maskrcnn/p2-bn/fake_quantize/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_39/Reshape"; +"maskrcnn/p2-bn/FusedBatchNormV3" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/Abs" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/add/y" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/mul" -> "maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_18/Reshape"; +"maskrcnn/p6-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_47/Reshape"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/Abs" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/add/y" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/mul" -> "maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_1"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_16/Reshape"; +"maskrcnn/p5-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_45/Reshape"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/Abs" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/add/y" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/mul" -> "maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_2"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_14/Reshape"; +"maskrcnn/p4-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_43/Reshape"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/Abs" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/add/y" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/mul" -> "maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_3"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_12/Reshape"; +"maskrcnn/p3-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_41/Reshape"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/Abs" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/add/y" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/add"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/add" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/mul"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/mul" -> "maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn/Conv2D_4"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_10/Reshape"; +"maskrcnn/p2-bn/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_39/Reshape"; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn/SymmQuant_1/Abs/ReadVariableOp"; "maskrcnn/rpn/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn/SymmQuant_2/Abs/ReadVariableOp"; @@ -9860,7 +9618,8 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/rpn-box/BiasAdd/ReadVariableOp/resource" -> "maskrcnn/rpn-box/BiasAdd_3/ReadVariableOp"; "maskrcnn/rpn-box/BiasAdd/ReadVariableOp/resource" -> "maskrcnn/rpn-box/BiasAdd_4/ReadVariableOp"; "maskrcnn/rpn-box/BiasAdd/ReadVariableOp" -> "maskrcnn/rpn-box/BiasAdd"; -"maskrcnn/rpn-box/BiasAdd" -> "maskrcnn/rpn-box/fake_quantize_4/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/rpn-box/BiasAdd" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/rpn-box/BiasAdd" -> Identity_11; "maskrcnn/rpn-box/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/rpn-box/SymmQuant_1/Abs"; "maskrcnn/rpn-box/SymmQuant_1/Abs" -> "maskrcnn/rpn-box/SymmQuant_1/add"; "maskrcnn/rpn-box/SymmQuant_1/add/y" -> "maskrcnn/rpn-box/SymmQuant_1/add"; @@ -9872,7 +9631,8 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/rpn-box/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn-box/Conv2D_1"; "maskrcnn/rpn-box/Conv2D_1" -> "maskrcnn/rpn-box/BiasAdd_1"; "maskrcnn/rpn-box/BiasAdd_1/ReadVariableOp" -> "maskrcnn/rpn-box/BiasAdd_1"; -"maskrcnn/rpn-box/BiasAdd_1" -> "maskrcnn/rpn-box/fake_quantize_3/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/rpn-box/BiasAdd_1" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/rpn-box/BiasAdd_1" -> Identity_10; "maskrcnn/rpn-box/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/rpn-box/SymmQuant_2/Abs"; "maskrcnn/rpn-box/SymmQuant_2/Abs" -> "maskrcnn/rpn-box/SymmQuant_2/add"; "maskrcnn/rpn-box/SymmQuant_2/add/y" -> "maskrcnn/rpn-box/SymmQuant_2/add"; @@ -9884,7 +9644,8 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/rpn-box/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn-box/Conv2D_2"; "maskrcnn/rpn-box/Conv2D_2" -> "maskrcnn/rpn-box/BiasAdd_2"; "maskrcnn/rpn-box/BiasAdd_2/ReadVariableOp" -> "maskrcnn/rpn-box/BiasAdd_2"; -"maskrcnn/rpn-box/BiasAdd_2" -> "maskrcnn/rpn-box/fake_quantize_2/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/rpn-box/BiasAdd_2" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/rpn-box/BiasAdd_2" -> Identity_9; "maskrcnn/rpn-box/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/rpn-box/SymmQuant_3/Abs"; "maskrcnn/rpn-box/SymmQuant_3/Abs" -> "maskrcnn/rpn-box/SymmQuant_3/add"; "maskrcnn/rpn-box/SymmQuant_3/add/y" -> "maskrcnn/rpn-box/SymmQuant_3/add"; @@ -9896,7 +9657,8 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/rpn-box/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn-box/Conv2D_3"; "maskrcnn/rpn-box/Conv2D_3" -> "maskrcnn/rpn-box/BiasAdd_3"; "maskrcnn/rpn-box/BiasAdd_3/ReadVariableOp" -> "maskrcnn/rpn-box/BiasAdd_3"; -"maskrcnn/rpn-box/BiasAdd_3" -> "maskrcnn/rpn-box/fake_quantize_1/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/rpn-box/BiasAdd_3" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/rpn-box/BiasAdd_3" -> Identity_8; "maskrcnn/rpn-box/SymmQuant_4/Abs/ReadVariableOp" -> "maskrcnn/rpn-box/SymmQuant_4/Abs"; "maskrcnn/rpn-box/SymmQuant_4/Abs" -> "maskrcnn/rpn-box/SymmQuant_4/add"; "maskrcnn/rpn-box/SymmQuant_4/add/y" -> "maskrcnn/rpn-box/SymmQuant_4/add"; @@ -9908,62 +9670,58 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/rpn-box/SymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/rpn-box/Conv2D_4"; "maskrcnn/rpn-box/Conv2D_4" -> "maskrcnn/rpn-box/BiasAdd_4"; "maskrcnn/rpn-box/BiasAdd_4/ReadVariableOp" -> "maskrcnn/rpn-box/BiasAdd_4"; -"maskrcnn/rpn-box/BiasAdd_4" -> "maskrcnn/rpn-box/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_4/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_4/SymmQuant/Abs"; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/Abs" -> "maskrcnn/rpn-box/fake_quantize_4/SymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/add/y" -> "maskrcnn/rpn-box/fake_quantize_4/SymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/add" -> "maskrcnn/rpn-box/fake_quantize_4/SymmQuant/mul"; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/add" -> "maskrcnn/rpn-box/fake_quantize_4/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_4/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_4/SymmQuant/mul"; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/mul" -> "maskrcnn/rpn-box/fake_quantize_4/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_9/Reshape"; -"maskrcnn/rpn-box/fake_quantize_4/SymmQuant/FakeQuantWithMinMaxVars" -> Identity_11; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_3/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_3/SymmQuant/Abs"; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/Abs" -> "maskrcnn/rpn-box/fake_quantize_3/SymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/add/y" -> "maskrcnn/rpn-box/fake_quantize_3/SymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/add" -> "maskrcnn/rpn-box/fake_quantize_3/SymmQuant/mul"; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/add" -> "maskrcnn/rpn-box/fake_quantize_3/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_3/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_3/SymmQuant/mul"; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/mul" -> "maskrcnn/rpn-box/fake_quantize_3/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_7/Reshape"; -"maskrcnn/rpn-box/fake_quantize_3/SymmQuant/FakeQuantWithMinMaxVars" -> Identity_10; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_2/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_2/SymmQuant/Abs"; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/Abs" -> "maskrcnn/rpn-box/fake_quantize_2/SymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/add/y" -> "maskrcnn/rpn-box/fake_quantize_2/SymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/add" -> "maskrcnn/rpn-box/fake_quantize_2/SymmQuant/mul"; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/add" -> "maskrcnn/rpn-box/fake_quantize_2/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_2/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_2/SymmQuant/mul"; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/mul" -> "maskrcnn/rpn-box/fake_quantize_2/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_5/Reshape"; -"maskrcnn/rpn-box/fake_quantize_2/SymmQuant/FakeQuantWithMinMaxVars" -> Identity_9; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_1/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_1/SymmQuant/Abs"; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/Abs" -> "maskrcnn/rpn-box/fake_quantize_1/SymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/add/y" -> "maskrcnn/rpn-box/fake_quantize_1/SymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/add" -> "maskrcnn/rpn-box/fake_quantize_1/SymmQuant/mul"; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/add" -> "maskrcnn/rpn-box/fake_quantize_1/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize_1/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize_1/SymmQuant/mul"; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/mul" -> "maskrcnn/rpn-box/fake_quantize_1/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_3/Reshape"; -"maskrcnn/rpn-box/fake_quantize_1/SymmQuant/FakeQuantWithMinMaxVars" -> Identity_8; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize/SymmQuant/Abs"; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/Abs" -> "maskrcnn/rpn-box/fake_quantize/SymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/add/y" -> "maskrcnn/rpn-box/fake_quantize/SymmQuant/add"; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/add" -> "maskrcnn/rpn-box/fake_quantize/SymmQuant/mul"; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/add" -> "maskrcnn/rpn-box/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/rpn-box/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/rpn-box/fake_quantize/SymmQuant/mul"; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/mul" -> "maskrcnn/rpn-box/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_1/Reshape"; -"maskrcnn/rpn-box/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> Identity_7; +"maskrcnn/rpn-box/BiasAdd_4" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/rpn-box/BiasAdd_4" -> Identity_7; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/Abs"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/Abs" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/add/y" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/mul" -> "maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_9/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_9/Reshape"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/Abs"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/Abs" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/add/y" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/mul" -> "maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_7/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_7/Reshape"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/Abs"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/Abs" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/add/y" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/mul" -> "maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_5/Reshape"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/Abs"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/Abs" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/add/y" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/mul" -> "maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_3/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_3/Reshape"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/Abs"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/Abs" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/add/y" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/mul" -> "maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_1/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_1/Reshape"; "maskrcnn/tf.reshape_9/Reshape/shape" -> "maskrcnn/tf.reshape_9/Reshape"; "maskrcnn/tf.reshape_9/Reshape" -> "maskrcnn/tf.__operators__.getitem_20/strided_slice"; "maskrcnn/tf.reshape_9/Reshape" -> "maskrcnn/tf.__operators__.getitem_19/strided_slice"; @@ -10690,156 +10448,136 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.multiply_4/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.multiply_4/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.multiply_4/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.multiply_4/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_4/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract/Sub"; -"maskrcnn/tf.math.subtract_33/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_25/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_17/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_9/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_33/Sub" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_32/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_25/Sub" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_24/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_17/Sub" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_16/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_9/Sub" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_8/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem/strided_slice"; "maskrcnn/tf.__operators__.getitem/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem/strided_slice"; "maskrcnn/tf.__operators__.getitem/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem/strided_slice"; "maskrcnn/tf.__operators__.getitem/strided_slice" -> "maskrcnn/tf.expand_dims/ExpandDims"; -"maskrcnn/tf.math.subtract_1/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_1/Sub" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.expand_dims/ExpandDims/dim" -> "maskrcnn/tf.expand_dims/ExpandDims"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_4/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_3/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_2/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack_1/unstack"; "maskrcnn/tf.expand_dims/ExpandDims" -> "maskrcnn/tf.unstack/unstack"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_37/AddV2"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_38/AddV2"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_33/AddV2"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_34/AddV2"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_29/AddV2"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_30/AddV2"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_25/AddV2"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_26/AddV2"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_21/AddV2"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_22/AddV2"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_37/AddV2"; +"maskrcnn/tf.math.subtract_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_38/AddV2"; +"maskrcnn/tf.math.subtract_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_33/AddV2"; +"maskrcnn/tf.math.subtract_24/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_34/AddV2"; +"maskrcnn/tf.math.subtract_25/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_29/AddV2"; +"maskrcnn/tf.math.subtract_16/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_30/AddV2"; +"maskrcnn/tf.math.subtract_17/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_25/AddV2"; +"maskrcnn/tf.math.subtract_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_26/AddV2"; +"maskrcnn/tf.math.subtract_9/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_21/AddV2"; +"maskrcnn/tf.math.subtract/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_22/AddV2"; +"maskrcnn/tf.math.subtract_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; "maskrcnn/tf.__operators__.add_38/AddV2" -> "maskrcnn/tf.__operators__.add_38/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_37/AddV2" -> "maskrcnn/tf.__operators__.add_37/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_34/AddV2" -> "maskrcnn/tf.__operators__.add_34/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; @@ -10979,9 +10717,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_36/Sub/y" -> "maskrcnn/tf.math.subtract_36/Sub"; "maskrcnn/tf.math.subtract_36/Sub" -> "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_35/Sub/y" -> "maskrcnn/tf.math.subtract_35/Sub"; -"maskrcnn/tf.math.subtract_35/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_35/Sub" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_34/Sub/y" -> "maskrcnn/tf.math.subtract_34/Sub"; -"maskrcnn/tf.math.subtract_34/Sub" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_34/Sub" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_31/Sub/y" -> "maskrcnn/tf.math.subtract_31/Sub"; "maskrcnn/tf.math.subtract_31/Sub" -> "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_30/Sub/y" -> "maskrcnn/tf.math.subtract_30/Sub"; @@ -10991,9 +10729,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_28/Sub/y" -> "maskrcnn/tf.math.subtract_28/Sub"; "maskrcnn/tf.math.subtract_28/Sub" -> "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_27/Sub/y" -> "maskrcnn/tf.math.subtract_27/Sub"; -"maskrcnn/tf.math.subtract_27/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_27/Sub" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_26/Sub/y" -> "maskrcnn/tf.math.subtract_26/Sub"; -"maskrcnn/tf.math.subtract_26/Sub" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_26/Sub" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_23/Sub/y" -> "maskrcnn/tf.math.subtract_23/Sub"; "maskrcnn/tf.math.subtract_23/Sub" -> "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_22/Sub/y" -> "maskrcnn/tf.math.subtract_22/Sub"; @@ -11003,9 +10741,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_20/Sub/y" -> "maskrcnn/tf.math.subtract_20/Sub"; "maskrcnn/tf.math.subtract_20/Sub" -> "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_19/Sub/y" -> "maskrcnn/tf.math.subtract_19/Sub"; -"maskrcnn/tf.math.subtract_19/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_19/Sub" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_18/Sub/y" -> "maskrcnn/tf.math.subtract_18/Sub"; -"maskrcnn/tf.math.subtract_18/Sub" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_18/Sub" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_15/Sub/y" -> "maskrcnn/tf.math.subtract_15/Sub"; "maskrcnn/tf.math.subtract_15/Sub" -> "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_14/Sub/y" -> "maskrcnn/tf.math.subtract_14/Sub"; @@ -11015,9 +10753,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_12/Sub/y" -> "maskrcnn/tf.math.subtract_12/Sub"; "maskrcnn/tf.math.subtract_12/Sub" -> "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_11/Sub/y" -> "maskrcnn/tf.math.subtract_11/Sub"; -"maskrcnn/tf.math.subtract_11/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_11/Sub" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_10/Sub/y" -> "maskrcnn/tf.math.subtract_10/Sub"; -"maskrcnn/tf.math.subtract_10/Sub" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_10/Sub" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_7/Sub/y" -> "maskrcnn/tf.math.subtract_7/Sub"; "maskrcnn/tf.math.subtract_7/Sub" -> "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_6/Sub/y" -> "maskrcnn/tf.math.subtract_6/Sub"; @@ -11027,9 +10765,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_4/Sub/y" -> "maskrcnn/tf.math.subtract_4/Sub"; "maskrcnn/tf.math.subtract_4/Sub" -> "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_3/Sub/y" -> "maskrcnn/tf.math.subtract_3/Sub"; -"maskrcnn/tf.math.subtract_3/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_3/Sub" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_2/Sub/y" -> "maskrcnn/tf.math.subtract_2/Sub"; -"maskrcnn/tf.math.subtract_2/Sub" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_2/Sub" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_36/fake_quantize/SymmQuant/add"; @@ -11070,22 +10808,26 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_39/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_4/stack"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_32/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_34/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_35/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_4/concat"; "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_28/fake_quantize/SymmQuant/add"; @@ -11126,22 +10868,26 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_31/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_3/stack"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_24/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_3/concat"; "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_20/fake_quantize/SymmQuant/add"; @@ -11182,22 +10928,26 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_23/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_2/stack"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_16/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_18/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_19/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_2/concat"; "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_12/fake_quantize/SymmQuant/add"; @@ -11238,22 +10988,26 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_15/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_1/stack"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_8/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_10/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_11/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_1/concat"; "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_4/fake_quantize/SymmQuant/add"; @@ -11294,22 +11048,26 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_7/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack/stack"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat/concat"; "maskrcnn/tf.concat_4/concat/axis" -> "maskrcnn/tf.concat_4/concat"; "maskrcnn/tf.concat_4/concat" -> "maskrcnn/tf.math.minimum_14/Minimum"; "maskrcnn/tf.stack_4/stack" -> "maskrcnn/tf.math.minimum_14/Minimum"; @@ -11515,31 +11273,31 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_1/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_2/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_3/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/iou_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/score_threshold" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/Const" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression/max_output_size_per_class" -> "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression"; "maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_6/concat"; -"maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.image.combined_non_max_suppression_4/combined_non_max_suppression/CombinedNonMaxSuppression" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.concat_6/concat/axis" -> "maskrcnn/tf.concat_6/concat"; "maskrcnn/tf.concat_6/concat" -> "maskrcnn/tf.math.top_k/TopKV2"; "maskrcnn/tf.math.top_k/TopKV2/k" -> "maskrcnn/tf.math.top_k/TopKV2"; @@ -11563,56 +11321,56 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.ones/ones/Const" -> "maskrcnn/tf.ones/ones"; "maskrcnn/tf.ones/ones" -> "maskrcnn/tf.math.multiply_30/Mul"; "maskrcnn/tf.math.multiply_30/Mul" -> "maskrcnn/tf.stack_5/stack"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul/ReadVariableOp"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/Abs"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.concat_5/fake_quantize_I0/unified_scale_group/SymmQuant_4/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I2/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I3/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/Abs" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add/y" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/add" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/mul" -> "maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.concat_5/fake_quantize_I4/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_5/concat"; "maskrcnn/tf.concat_5/concat/axis" -> "maskrcnn/tf.concat_5/concat"; "maskrcnn/tf.concat_5/concat" -> "maskrcnn/tf.compat.v1.gather_nd/GatherNd"; "maskrcnn/tf.stack_5/stack" -> "maskrcnn/tf.compat.v1.gather_nd/GatherNd"; @@ -11742,43 +11500,43 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.pow/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_3/truediv"; "maskrcnn/tf.expand_dims_12/ExpandDims/dim" -> "maskrcnn/tf.expand_dims_12/ExpandDims"; "maskrcnn/tf.expand_dims_12/ExpandDims" -> "maskrcnn/tf.math.truediv_1/truediv"; -"maskrcnn/tf.math.truediv_2/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_3/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_2/truediv" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_3/truediv" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_1/truediv" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_42/Sub/y" -> "maskrcnn/tf.math.subtract_42/Sub"; "maskrcnn/tf.math.subtract_42/Sub" -> "maskrcnn/tf.cast_23/Cast"; "maskrcnn/tf.math.subtract_42/Sub" -> "maskrcnn/tf.compat.v1.gather_1/GatherV2"; "maskrcnn/tf.math.subtract_42/Sub" -> "maskrcnn/tf.compat.v1.gather/GatherV2"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_14/ExpandDims"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_13/ExpandDims"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_1/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_14/ExpandDims"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_13/ExpandDims"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; "maskrcnn/tf.cast_23/Cast" -> "maskrcnn/tf.cast_23/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; "maskrcnn/tf.__operators__.getitem_27/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_27/strided_slice"; @@ -12092,9 +11850,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.multiply_32/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.multiply_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_32/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_6/truediv"; "maskrcnn/tf.math.subtract_44/Sub/y" -> "maskrcnn/tf.math.subtract_44/Sub"; -"maskrcnn/tf.math.subtract_44/Sub" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/Sub" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_43/Sub/y" -> "maskrcnn/tf.math.subtract_43/Sub"; -"maskrcnn/tf.math.subtract_43/Sub" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_43/Sub" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_54/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_54/strided_slice"; "maskrcnn/tf.__operators__.getitem_54/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_54/strided_slice"; "maskrcnn/tf.__operators__.getitem_54/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_54/strided_slice"; @@ -12207,33 +11965,35 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.truediv_8/truediv" -> "maskrcnn/tf.math.truediv_8/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.truediv_6/truediv/y" -> "maskrcnn/tf.math.truediv_6/truediv"; "maskrcnn/tf.math.truediv_6/truediv" -> "maskrcnn/tf.math.truediv_6/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_16/ExpandDims"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_43/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_15/ExpandDims"; -"maskrcnn/tf.__operators__.add_53/AddV2" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_51/AddV2" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_49/AddV2" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_47/AddV2" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_45/AddV2" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_43/AddV2" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_41/AddV2" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_16/ExpandDims"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_15/ExpandDims"; +"maskrcnn/tf.__operators__.add_41/AddV2" -> "maskrcnn/tf.stack_7/stack"; +"maskrcnn/tf.__operators__.add_43/AddV2" -> "maskrcnn/tf.stack_7/stack"; +"maskrcnn/tf.__operators__.add_45/AddV2" -> "maskrcnn/tf.stack_7/stack"; +"maskrcnn/tf.__operators__.add_47/AddV2" -> "maskrcnn/tf.stack_7/stack"; +"maskrcnn/tf.__operators__.add_49/AddV2" -> "maskrcnn/tf.stack_7/stack"; +"maskrcnn/tf.__operators__.add_51/AddV2" -> "maskrcnn/tf.stack_7/stack"; +"maskrcnn/tf.__operators__.add_53/AddV2" -> "maskrcnn/tf.stack_7/stack"; +"maskrcnn/tf.stack_7/stack" -> "maskrcnn/tf.math.floor/Floor"; +"maskrcnn/tf.stack_7/stack" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_52/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_52/strided_slice"; "maskrcnn/tf.__operators__.getitem_52/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_52/strided_slice"; "maskrcnn/tf.__operators__.getitem_52/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_52/strided_slice"; @@ -12336,169 +12096,47 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.expand_dims_15/ExpandDims" -> "maskrcnn/tf.concat_12/concat"; "maskrcnn/tf.expand_dims_16/ExpandDims/dim" -> "maskrcnn/tf.expand_dims_16/ExpandDims"; "maskrcnn/tf.expand_dims_16/ExpandDims" -> "maskrcnn/tf.concat_12/concat"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_41/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_7/stack"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_43/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_7/stack"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_45/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_7/stack"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_47/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_7/stack"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_49/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_7/stack"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_51/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_7/stack"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_7/stack"; -"maskrcnn/tf.stack_7/stack" -> "maskrcnn/tf.math.floor/Floor"; -"maskrcnn/tf.stack_7/stack" -> "maskrcnn/tf.math.subtract_45/Sub"; -"maskrcnn/tf.__operators__.add_52/AddV2" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_50/AddV2" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_48/AddV2" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_46/AddV2" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_44/AddV2" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_42/AddV2" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_40/AddV2" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.__operators__.add_40/AddV2" -> "maskrcnn/tf.stack_6/stack"; +"maskrcnn/tf.__operators__.add_42/AddV2" -> "maskrcnn/tf.stack_6/stack"; +"maskrcnn/tf.__operators__.add_44/AddV2" -> "maskrcnn/tf.stack_6/stack"; +"maskrcnn/tf.__operators__.add_46/AddV2" -> "maskrcnn/tf.stack_6/stack"; +"maskrcnn/tf.__operators__.add_48/AddV2" -> "maskrcnn/tf.stack_6/stack"; +"maskrcnn/tf.__operators__.add_50/AddV2" -> "maskrcnn/tf.stack_6/stack"; +"maskrcnn/tf.__operators__.add_52/AddV2" -> "maskrcnn/tf.stack_6/stack"; "maskrcnn/tf.concat_12/concat/axis" -> "maskrcnn/tf.concat_12/concat"; "maskrcnn/tf.concat_12/concat" -> "maskrcnn/tf.__operators__.getitem_58/strided_slice"; "maskrcnn/tf.concat_12/concat" -> "maskrcnn/tf.__operators__.getitem_56/strided_slice"; "maskrcnn/tf.concat_12/concat" -> "maskrcnn/tf.__operators__.getitem_59/strided_slice"; "maskrcnn/tf.concat_12/concat" -> "maskrcnn/tf.__operators__.getitem_57/strided_slice"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_40/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_6/stack"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_42/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_6/stack"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_44/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_6/stack"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_46/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_6/stack"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_48/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_6/stack"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_50/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_6/stack"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_52/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_6/stack"; -"maskrcnn/tf.math.floor/Floor" -> "maskrcnn/tf.math.maximum_7/Maximum"; +"maskrcnn/tf.math.floor/Floor" -> "maskrcnn/tf.math.floor/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.stack_6/stack" -> "maskrcnn/tf.math.floor_1/Floor"; -"maskrcnn/tf.stack_6/stack" -> "maskrcnn/tf.math.subtract_46/Sub"; +"maskrcnn/tf.stack_6/stack" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.floor/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.floor/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.floor/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.floor/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.floor/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.floor/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.floor/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.maximum_7/Maximum"; +"maskrcnn/tf.math.floor_1/Floor" -> "maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_58/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_58/strided_slice"; "maskrcnn/tf.__operators__.getitem_58/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_58/strided_slice"; "maskrcnn/tf.__operators__.getitem_58/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_58/strided_slice"; "maskrcnn/tf.__operators__.getitem_58/strided_slice" -> "maskrcnn/tf.expand_dims_19/ExpandDims"; "maskrcnn/tf.math.maximum_7/Maximum" -> "maskrcnn/tf.math.maximum_7/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.floor_1/Floor" -> "maskrcnn/tf.math.maximum_6/Maximum"; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor_1/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.maximum_6/Maximum"; "maskrcnn/tf.math.maximum_7/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.maximum_7/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.maximum_7/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.maximum_7/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.maximum_7/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.maximum_7/fake_quantize/SymmQuant/add"; @@ -12550,6 +12188,16 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.__operators__.getitem_59/strided_slice" -> "maskrcnn/tf.expand_dims_20/ExpandDims"; "maskrcnn/tf.__operators__.add_55/y" -> "maskrcnn/tf.__operators__.add_55/AddV2"; "maskrcnn/tf.__operators__.add_55/AddV2" -> "maskrcnn/tf.__operators__.add_55/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_46/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract_46/Sub"; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I1/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I1/SymmQuant/add"; @@ -12560,6 +12208,16 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_46/fake_quantize_I1/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I1/SymmQuant/mul"; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_46/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_46/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract_46/Sub"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_45/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract_45/Sub"; "maskrcnn/tf.math.subtract_45/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_45/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I1/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_45/fake_quantize_I1/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_45/fake_quantize_I1/SymmQuant/add"; @@ -12903,20 +12561,20 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/box-predict/Tensordot" -> "maskrcnn/box-predict/BiasAdd"; "maskrcnn/box-predict/BiasAdd/ReadVariableOp/resource" -> "maskrcnn/box-predict/BiasAdd/ReadVariableOp"; "maskrcnn/box-predict/BiasAdd/ReadVariableOp" -> "maskrcnn/box-predict/BiasAdd"; -"maskrcnn/box-predict/BiasAdd" -> "maskrcnn/box-predict/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/box-predict/BiasAdd" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/box-predict/BiasAdd" -> Identity; "maskrcnn/tf.math.subtract_49/Sub/y" -> "maskrcnn/tf.math.subtract_49/Sub"; "maskrcnn/tf.math.subtract_49/Sub" -> "maskrcnn/tf.math.multiply_50/Mul"; -"maskrcnn/box-predict/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/box-predict/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/box-predict/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/box-predict/fake_quantize/SymmQuant/Abs"; -"maskrcnn/box-predict/fake_quantize/SymmQuant/Abs" -> "maskrcnn/box-predict/fake_quantize/SymmQuant/add"; -"maskrcnn/box-predict/fake_quantize/SymmQuant/add/y" -> "maskrcnn/box-predict/fake_quantize/SymmQuant/add"; -"maskrcnn/box-predict/fake_quantize/SymmQuant/add" -> "maskrcnn/box-predict/fake_quantize/SymmQuant/mul"; -"maskrcnn/box-predict/fake_quantize/SymmQuant/add" -> "maskrcnn/box-predict/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/box-predict/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/box-predict/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/box-predict/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/box-predict/fake_quantize/SymmQuant/mul"; -"maskrcnn/box-predict/fake_quantize/SymmQuant/mul" -> "maskrcnn/box-predict/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/box-predict/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_35/Reshape"; -"maskrcnn/box-predict/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> Identity; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/Abs"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/Abs" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/add/y" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/mul" -> "maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.reshape_35/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.reshape_35/Reshape"; "maskrcnn/tf.stack_12/stack" -> "maskrcnn/tf.reshape_35/Reshape"; "maskrcnn/tf.math.multiply_50/Mul" -> "maskrcnn/tf.stack_14/stack"; "maskrcnn/tf.math.multiply_50/Mul" -> "maskrcnn/tf.stack_13/stack"; @@ -13235,38 +12893,34 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.multiply_57/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.multiply_57/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.multiply_57/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.multiply_57/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_57/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract_53/Sub"; -"maskrcnn/tf.math.subtract_54/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_54/Sub" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_53/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_60/strided_slice"; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_60/strided_slice"; "maskrcnn/tf.__operators__.getitem_60/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_60/strided_slice"; "maskrcnn/tf.__operators__.getitem_60/strided_slice" -> "maskrcnn/tf.unstack_5/unstack"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_65/AddV2"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_66/AddV2"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_65/AddV2"; +"maskrcnn/tf.math.subtract_53/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.add_66/AddV2"; +"maskrcnn/tf.math.subtract_54/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; "maskrcnn/tf.__operators__.add_66/AddV2" -> "maskrcnn/tf.__operators__.add_66/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.add_65/AddV2" -> "maskrcnn/tf.__operators__.add_65/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.unstack_5/unstack" -> "maskrcnn/tf.math.subtract_60/Sub"; @@ -13302,9 +12956,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_57/Sub/y" -> "maskrcnn/tf.math.subtract_57/Sub"; "maskrcnn/tf.math.subtract_57/Sub" -> "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_56/Sub/y" -> "maskrcnn/tf.math.subtract_56/Sub"; -"maskrcnn/tf.math.subtract_56/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_56/Sub" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_55/Sub/y" -> "maskrcnn/tf.math.subtract_55/Sub"; -"maskrcnn/tf.math.subtract_55/Sub" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_55/Sub" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_57/fake_quantize/SymmQuant/add"; @@ -13345,22 +12999,26 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/mul"; "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_60/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_15/stack"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/Abs"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/Abs" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add/y" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/add" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/mul" -> "maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_53/fake_quantize/unified_scale_group/SymmQuant_3/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_55/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_56/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.concat_13/concat"; "maskrcnn/tf.concat_13/concat/axis" -> "maskrcnn/tf.concat_13/concat"; "maskrcnn/tf.concat_13/concat" -> "maskrcnn/tf.math.minimum_22/Minimum"; "maskrcnn/tf.stack_15/stack" -> "maskrcnn/tf.math.minimum_22/Minimum"; @@ -13563,43 +13221,43 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.pow_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_28/truediv"; "maskrcnn/tf.expand_dims_24/ExpandDims/dim" -> "maskrcnn/tf.expand_dims_24/ExpandDims"; "maskrcnn/tf.expand_dims_24/ExpandDims" -> "maskrcnn/tf.math.truediv_26/truediv"; -"maskrcnn/tf.math.truediv_27/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_28/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_27/truediv" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_28/truediv" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_26/truediv" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_64/Sub/y" -> "maskrcnn/tf.math.subtract_64/Sub"; "maskrcnn/tf.math.subtract_64/Sub" -> "maskrcnn/tf.cast_37/Cast"; "maskrcnn/tf.math.subtract_64/Sub" -> "maskrcnn/tf.compat.v1.gather_4/GatherV2"; "maskrcnn/tf.math.subtract_64/Sub" -> "maskrcnn/tf.compat.v1.gather_3/GatherV2"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_26/ExpandDims"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_25/ExpandDims"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/Abs"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/mul" -> "maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.truediv_26/fake_quantize/unified_scale_group/SymmQuant_2/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_27/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_26/ExpandDims"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_28/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_25/ExpandDims"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.truediv_26/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; "maskrcnn/tf.cast_37/Cast" -> "maskrcnn/tf.cast_37/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; "maskrcnn/tf.__operators__.getitem_76/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_76/strided_slice"; @@ -14165,9 +13823,9 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.multiply_61/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.multiply_61/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.multiply_61/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.truediv_31/truediv"; "maskrcnn/tf.math.subtract_66/Sub/y" -> "maskrcnn/tf.math.subtract_66/Sub"; -"maskrcnn/tf.math.subtract_66/Sub" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/Sub" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_65/Sub/y" -> "maskrcnn/tf.math.subtract_65/Sub"; -"maskrcnn/tf.math.subtract_65/Sub" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_65/Sub" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_131/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_131/strided_slice"; "maskrcnn/tf.__operators__.getitem_131/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_131/strided_slice"; "maskrcnn/tf.__operators__.getitem_131/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_131/strided_slice"; @@ -14392,40 +14050,42 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.truediv_33/truediv" -> "maskrcnn/tf.math.truediv_33/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.truediv_31/truediv/y" -> "maskrcnn/tf.math.truediv_31/truediv"; "maskrcnn/tf.math.truediv_31/truediv" -> "maskrcnn/tf.math.truediv_31/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_28/ExpandDims"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/Abs"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/Abs" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add/y" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/mul" -> "maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.subtract_65/fake_quantize/unified_scale_group/SymmQuant_1/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_27/ExpandDims"; -"maskrcnn/tf.__operators__.add_96/AddV2" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_94/AddV2" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_92/AddV2" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_90/AddV2" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_88/AddV2" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_86/AddV2" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_84/AddV2" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_82/AddV2" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_80/AddV2" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_78/AddV2" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_76/AddV2" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_74/AddV2" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_72/AddV2" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_70/AddV2" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_66/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_28/ExpandDims"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_65/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.expand_dims_27/ExpandDims"; +"maskrcnn/tf.__operators__.add_70/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_72/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_74/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_76/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_78/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_80/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_82/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_84/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_86/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_88/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_90/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_92/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_94/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.__operators__.add_96/AddV2" -> "maskrcnn/tf.stack_18/stack"; +"maskrcnn/tf.stack_18/stack" -> "maskrcnn/tf.math.floor_2/Floor"; +"maskrcnn/tf.stack_18/stack" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_129/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_129/strided_slice"; "maskrcnn/tf.__operators__.getitem_129/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_129/strided_slice"; "maskrcnn/tf.__operators__.getitem_129/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_129/strided_slice"; @@ -14626,316 +14286,54 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.expand_dims_27/ExpandDims" -> "maskrcnn/tf.concat_19/concat"; "maskrcnn/tf.expand_dims_28/ExpandDims/dim" -> "maskrcnn/tf.expand_dims_28/ExpandDims"; "maskrcnn/tf.expand_dims_28/ExpandDims" -> "maskrcnn/tf.concat_19/concat"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_70/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_72/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_74/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_76/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_78/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_80/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_82/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_84/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_86/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_88/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_90/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_92/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_94/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_96/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_18/stack"; -"maskrcnn/tf.stack_18/stack" -> "maskrcnn/tf.math.floor_2/Floor"; -"maskrcnn/tf.stack_18/stack" -> "maskrcnn/tf.math.subtract_67/Sub"; -"maskrcnn/tf.__operators__.add_95/AddV2" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_93/AddV2" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_91/AddV2" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_89/AddV2" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_87/AddV2" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_85/AddV2" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_83/AddV2" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_81/AddV2" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_79/AddV2" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_77/AddV2" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_75/AddV2" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_73/AddV2" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_71/AddV2" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_69/AddV2" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.__operators__.add_69/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_71/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_73/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_75/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_77/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_79/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_81/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_83/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_85/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_87/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_89/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_91/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_93/AddV2" -> "maskrcnn/tf.stack_17/stack"; +"maskrcnn/tf.__operators__.add_95/AddV2" -> "maskrcnn/tf.stack_17/stack"; "maskrcnn/tf.concat_19/concat/axis" -> "maskrcnn/tf.concat_19/concat"; "maskrcnn/tf.concat_19/concat" -> "maskrcnn/tf.__operators__.getitem_135/strided_slice"; "maskrcnn/tf.concat_19/concat" -> "maskrcnn/tf.__operators__.getitem_133/strided_slice"; "maskrcnn/tf.concat_19/concat" -> "maskrcnn/tf.__operators__.getitem_136/strided_slice"; "maskrcnn/tf.concat_19/concat" -> "maskrcnn/tf.__operators__.getitem_134/strided_slice"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_69/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_71/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_73/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_75/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_77/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_79/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_81/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_83/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_85/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_87/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_89/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_91/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_93/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/Abs/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/Abs"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/add"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/mul/ReadVariableOp"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/mul"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.__operators__.add_95/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.stack_17/stack"; -"maskrcnn/tf.math.floor_2/Floor" -> "maskrcnn/tf.math.maximum_11/Maximum"; +"maskrcnn/tf.math.floor_2/Floor" -> "maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.stack_17/stack" -> "maskrcnn/tf.math.floor_3/Floor"; -"maskrcnn/tf.stack_17/stack" -> "maskrcnn/tf.math.subtract_68/Sub"; +"maskrcnn/tf.stack_17/stack" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor_2/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.maximum_11/Maximum"; +"maskrcnn/tf.math.floor_3/Floor" -> "maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.__operators__.getitem_135/strided_slice/stack" -> "maskrcnn/tf.__operators__.getitem_135/strided_slice"; "maskrcnn/tf.__operators__.getitem_135/strided_slice/stack_1" -> "maskrcnn/tf.__operators__.getitem_135/strided_slice"; "maskrcnn/tf.__operators__.getitem_135/strided_slice/stack_2" -> "maskrcnn/tf.__operators__.getitem_135/strided_slice"; "maskrcnn/tf.__operators__.getitem_135/strided_slice" -> "maskrcnn/tf.expand_dims_31/ExpandDims"; "maskrcnn/tf.math.maximum_11/Maximum" -> "maskrcnn/tf.math.maximum_11/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; -"maskrcnn/tf.math.floor_3/Floor" -> "maskrcnn/tf.math.maximum_10/Maximum"; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/Abs"; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/add/y" -> "maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/add"; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/add" -> "maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/mul"; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/mul" -> "maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.floor_3/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.maximum_10/Maximum"; "maskrcnn/tf.math.maximum_11/fake_quantize/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.maximum_11/fake_quantize/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.maximum_11/fake_quantize/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.maximum_11/fake_quantize/SymmQuant/Abs"; "maskrcnn/tf.math.maximum_11/fake_quantize/SymmQuant/Abs" -> "maskrcnn/tf.math.maximum_11/fake_quantize/SymmQuant/add"; @@ -14987,6 +14385,16 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.__operators__.getitem_136/strided_slice" -> "maskrcnn/tf.expand_dims_32/ExpandDims"; "maskrcnn/tf.__operators__.add_98/y" -> "maskrcnn/tf.__operators__.add_98/AddV2"; "maskrcnn/tf.__operators__.add_98/AddV2" -> "maskrcnn/tf.__operators__.add_98/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_68/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract_68/Sub"; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I1/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I1/SymmQuant/add"; @@ -14997,6 +14405,16 @@ args_0_1 -> "maskrcnn/image_info/fake_quantize/SymmQuant/FakeQuantWithMinMaxVars "maskrcnn/tf.math.subtract_68/fake_quantize_I1/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I1/SymmQuant/mul"; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_68/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars"; "maskrcnn/tf.math.subtract_68/fake_quantize_I1/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract_68/Sub"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/Abs"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/add/y" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/add"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/add" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/mul/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/mul/ReadVariableOp"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/mul/ReadVariableOp" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/mul"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/mul" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars"; +"maskrcnn/tf.math.subtract_67/fake_quantize_I0/SymmQuant/FakeQuantWithMinMaxVars" -> "maskrcnn/tf.math.subtract_67/Sub"; "maskrcnn/tf.math.subtract_67/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp/resource" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp"; "maskrcnn/tf.math.subtract_67/fake_quantize_I1/SymmQuant/Abs/ReadVariableOp" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I1/SymmQuant/Abs"; "maskrcnn/tf.math.subtract_67/fake_quantize_I1/SymmQuant/Abs" -> "maskrcnn/tf.math.subtract_67/fake_quantize_I1/SymmQuant/add"; diff --git a/tests/tensorflow/pruning/test_flops_pruning.py b/tests/tensorflow/pruning/test_flops_pruning.py index 43a42a7eccf..893afc9834c 100644 --- a/tests/tensorflow/pruning/test_flops_pruning.py +++ b/tests/tensorflow/pruning/test_flops_pruning.py @@ -138,7 +138,7 @@ def test_flops_calulation_for_spec_layers( next_nodes = shape_pruner.get_next_nodes(original_graph, pruning_groups) # Check output_shapes are empty in graph for node in original_graph.get_all_nodes(): - assert node.data["output_shape"] is None + assert node.attributes["output_shape"] is None assert compression_ctrl._calculate_num_of_sparse_elements_by_node() == ref_num_of_sparse diff --git a/tests/tensorflow/pruning/test_tensor_processor.py b/tests/tensorflow/pruning/test_tensor_processor.py index 3b7e72f3e99..0ec75d388ba 100644 --- a/tests/tensorflow/pruning/test_tensor_processor.py +++ b/tests/tensorflow/pruning/test_tensor_processor.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import pytest import tensorflow as tf diff --git a/tests/tensorflow/quantization/test_ptq_params.py b/tests/tensorflow/quantization/test_ptq_params.py new file mode 100644 index 00000000000..d42ff974815 --- /dev/null +++ b/tests/tensorflow/quantization/test_ptq_params.py @@ -0,0 +1,102 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import pytest + +from nncf import NNCFConfig +from nncf.common.quantization.structs import QuantizationPreset +from nncf.parameters import TargetDevice +from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters +from nncf.quantization.advanced_parameters import OverflowFix +from nncf.quantization.advanced_parameters import QuantizationMode +from nncf.quantization.advanced_parameters import QuantizationParameters +from nncf.quantization.range_estimator import RangeEstimatorParametersSet +from nncf.scopes import IgnoredScope +from nncf.tensorflow.quantization.quantize_model import _create_nncf_config + + +@pytest.mark.parametrize( + "params", + ( + { + "preset": QuantizationPreset.MIXED, + "target_device": TargetDevice.ANY, + "subset_size": 1, + "ignored_scope": IgnoredScope(names=["node_1"]), + "advanced_parameters": AdvancedQuantizationParameters( + overflow_fix=OverflowFix.DISABLE, quantize_outputs=True, disable_bias_correction=True + ), + }, + { + "preset": QuantizationPreset.MIXED, + "target_device": TargetDevice.ANY, + "subset_size": 2, + "ignored_scope": None, + "advanced_parameters": AdvancedQuantizationParameters( + overflow_fix=OverflowFix.ENABLE, quantize_outputs=False, disable_bias_correction=False + ), + }, + { + "preset": QuantizationPreset.MIXED, + "target_device": TargetDevice.ANY, + "subset_size": 3, + "ignored_scope": IgnoredScope(names=["node_1"]), + "advanced_parameters": AdvancedQuantizationParameters( + overflow_fix=OverflowFix.FIRST_LAYER, quantize_outputs=True, disable_bias_correction=False + ), + }, + { + "preset": QuantizationPreset.MIXED, + "target_device": TargetDevice.ANY, + "subset_size": 4, + "ignored_scope": IgnoredScope(names=["node_1"]), + "advanced_parameters": AdvancedQuantizationParameters( + overflow_fix=OverflowFix.FIRST_LAYER, + quantize_outputs=True, + disable_bias_correction=False, + activations_quantization_params=QuantizationParameters(num_bits=8, mode=QuantizationMode.SYMMETRIC), + activations_range_estimator_params=RangeEstimatorParametersSet.MEAN_MINMAX, + weights_quantization_params=QuantizationParameters(num_bits=8, mode=QuantizationMode.SYMMETRIC), + weights_range_estimator_params=RangeEstimatorParametersSet.MEAN_MINMAX, + ), + }, + ), +) +def test_create_nncf_config(params): + config = _create_nncf_config(**params) + + assert config["compression"]["overflow_fix"] == params["advanced_parameters"].overflow_fix.value + assert config["compression"]["quantize_outputs"] == params["advanced_parameters"].quantize_outputs + + assert config["compression"]["preset"] == params["preset"].value + + range_config = config["compression"]["initializer"]["range"] + if isinstance(range_config, dict): + assert range_config["num_init_samples"] == params["subset_size"] + assert range_config["type"] == "mean_min_max" + else: + for rc in range_config: + assert rc["num_init_samples"] == params["subset_size"] + assert rc["type"] == "mean_min_max" + + num_bn_samples = config["compression"]["initializer"]["batchnorm_adaptation"]["num_bn_adaptation_samples"] + if params["advanced_parameters"].disable_bias_correction is True: + assert num_bn_samples == 0 + else: + assert num_bn_samples == params["subset_size"] + + ref_scope = params["ignored_scope"].names if params["ignored_scope"] is not None else [] + assert config["compression"].get("ignored_scopes", []) == ref_scope + + # To validate NNCFConfig requared input_info + config["input_info"] = {"sample_size": [1, 2, 224, 224]} + NNCFConfig.validate(config) diff --git a/tests/tensorflow/quantization/test_transform_fn.py b/tests/tensorflow/quantization/test_transform_fn.py new file mode 100644 index 00000000000..e2d6255f2f5 --- /dev/null +++ b/tests/tensorflow/quantization/test_transform_fn.py @@ -0,0 +1,62 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import tensorflow as tf + +import nncf +from tests.tensorflow.test_models.sequential_model import SequentialModel as ModelWithSingleInput + + +def ModelWithMultipleInputs(): + input_0 = tf.keras.Input(shape=(32, 32, 3)) + input_1 = tf.keras.Input(shape=(32, 32, 3)) + + output_0 = tf.keras.layers.Conv2D(64, 3)(input_0) + output_1 = tf.keras.layers.Conv2D(64, 3)(input_1) + output = tf.keras.layers.Add()([output_0, output_1]) + return tf.keras.Model([input_0, input_1], output) + + +dataset = [ + { + "input_0": tf.zeros((1, 32, 32, 3), dtype=tf.float32), + "input_1": tf.zeros((1, 32, 32, 3), dtype=tf.float32), + } +] + + +def single_input_transform_fn(data_item): + return data_item["input_0"] + + +def multiple_inputs_transform_fn(data_item): + return data_item["input_0"], data_item["input_1"] + + +@pytest.mark.parametrize( + "model,transform_fn", + [ + [ModelWithSingleInput(input_shape=(32, 32, 3)), single_input_transform_fn], + [ModelWithMultipleInputs(), multiple_inputs_transform_fn], + ], + ids=[ + "single_input", + "multiple_inputs", + ], +) +def test_transform_fn(model, transform_fn): + # Check the transformation function + _ = model(transform_fn(next(iter(dataset)))) + + # Start quantization + calibration_dataset = nncf.Dataset(dataset, transform_fn) + _ = nncf.quantize(model, calibration_dataset) diff --git a/tests/tensorflow/quantization/test_unified_scales.py b/tests/tensorflow/quantization/test_unified_scales.py index 35d544dd67d..dd35e981b98 100644 --- a/tests/tensorflow/quantization/test_unified_scales.py +++ b/tests/tensorflow/quantization/test_unified_scales.py @@ -144,11 +144,15 @@ def test_shared_op_unified_scales(target_device): nncf_config = get_basic_quantization_config() nncf_config["target_device"] = target_device + non_weight_quantizers_ref = 8 + if target_device == "VPU": + non_weight_quantizers_ref = 5 + model = get_shared_conv_test_model() compressed_model, _ = create_compressed_model_and_algo_for_test(model, nncf_config, force_no_init=True) non_weight_quantizers = len(collect_fake_quantize_layers(compressed_model)) - assert non_weight_quantizers == 5 + assert non_weight_quantizers == non_weight_quantizers_ref total_quantizations = get_total_quantizations(compressed_model) assert total_quantizations == 8 diff --git a/tests/tensorflow/requirements.txt b/tests/tensorflow/requirements.txt index 6303d10db49..f8add682a62 100644 --- a/tests/tensorflow/requirements.txt +++ b/tests/tensorflow/requirements.txt @@ -1,6 +1,7 @@ PyYAML -tensorflow_addons~=0.19.0 +tensorflow_addons~=0.20.0 pytest +pytest-cov pytest-mock pytest-dependency yattag>=1.14.0 diff --git a/tests/tensorflow/sota_checkpoints_eval.json b/tests/tensorflow/sota_checkpoints_eval.json index b969f28a525..39a4c2a5257 100644 --- a/tests/tensorflow/sota_checkpoints_eval.json +++ b/tests/tensorflow/sota_checkpoints_eval.json @@ -205,7 +205,8 @@ "compression_description": "INT8", "mean_value": "[103.939,116.779,123.68]", "diff_fp32_min": -1, - "diff_fp32_max": 0.1 + "diff_fp32_max": 0.1, + "diff_target_max": 0.15 }, "resnet50_imagenet_rb_sparsity_int8": { "config": "examples/tensorflow/classification/configs/sparsity_quantization/resnet50_imagenet_rb_sparsity_int8.json", @@ -292,7 +293,8 @@ "batch_per_gpu": 15, "mean_value": "[123.675,116.28,103.53]", "scale_value": "[58.395,57.12,57.375]", - "reverse_input_channels": true + "reverse_input_channels": true, + "diff_target_max": 0.15 }, "retinanet_coco_magnitude_sparsity": { "config": "examples/tensorflow/object_detection/configs/sparsity/retinanet_coco_magnitude_sparsity.json", diff --git a/tests/tensorflow/sparsity/rb/utils.py b/tests/tensorflow/sparsity/rb/utils.py index 97856775370..738c35274f0 100644 --- a/tests/tensorflow/sparsity/rb/utils.py +++ b/tests/tensorflow/sparsity/rb/utils.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import tensorflow as tf from nncf.tensorflow.functions import logit diff --git a/tests/tensorflow/test_compressed_graph.py b/tests/tensorflow/test_compressed_graph.py index a53d651e2da..45dca492610 100644 --- a/tests/tensorflow/test_compressed_graph.py +++ b/tests/tensorflow/test_compressed_graph.py @@ -15,7 +15,7 @@ import networkx as nx import pytest import tensorflow as tf -from pkg_resources import parse_version +from packaging import version from nncf import NNCFConfig from nncf.common.hardware.config import HWConfigType @@ -356,7 +356,7 @@ def prepare_and_check_nx_graph( def check_model_graph(compressed_model, ref_graph_filename, ref_graph_dir, rename_resource_nodes): - tf_version = parse_version(tf.__version__).base_version + tf_version = version.parse(tf.__version__).base_version tf_version_major, tf_version_minor = tuple(map(int, tf_version.split(".")))[:2] data_dir = os.path.join( os.path.dirname(os.path.abspath(__file__)), "data", "reference_graphs", f"{tf_version_major}.{tf_version_minor}" diff --git a/tests/tensorflow/test_ignored_scopes.py b/tests/tensorflow/test_ignored_scopes.py index 7bbc31b1713..367be75ebe8 100644 --- a/tests/tensorflow/test_ignored_scopes.py +++ b/tests/tensorflow/test_ignored_scopes.py @@ -14,6 +14,7 @@ import tensorflow as tf from tensorflow.keras import layers +from nncf.config.schemata.defaults import VALIDATE_SCOPES from nncf.tensorflow.algorithm_selector import TF_COMPRESSION_ALGORITHMS from nncf.tensorflow.layers.wrapper import NNCFWrapper from nncf.tensorflow.quantization import FakeQuantize @@ -61,11 +62,20 @@ def test_ignored_scopes(): @pytest.mark.parametrize("algo_name", TF_COMPRESSION_ALGORITHMS.registry_dict.keys() - NOT_SUPPORT_SCOPES_ALGO) -def test_raise_runtimeerror_for_not_matched_scope_names(algo_name): +@pytest.mark.parametrize("validate_scopes", (True, False, None)) +def test_raise_runtimeerror_for_not_matched_scope_names(algo_name, validate_scopes): model = get_mock_model() config = get_empty_config() - config["compression"] = {"algorithm": algo_name, "ignored_scopes": ["unknown"]} + config["compression"] = { + "algorithm": algo_name, + "ignored_scopes": ["unknown"], + } - with pytest.raises(RuntimeError) as exc_info: + if validate_scopes is not None: + config["compression"]["validate_scopes"] = validate_scopes + + if validate_scopes or (validate_scopes is None and VALIDATE_SCOPES is True): + with pytest.raises(RuntimeError, match="scope definitions"): + create_compressed_model_and_algo_for_test(model, config) + else: create_compressed_model_and_algo_for_test(model, config) - assert "No match has been found among the model" in str(exc_info.value) diff --git a/tests/tensorflow/test_models/inception_resnet_v2.py b/tests/tensorflow/test_models/inception_resnet_v2.py index bde9c633aba..be4025ea630 100644 --- a/tests/tensorflow/test_models/inception_resnet_v2.py +++ b/tests/tensorflow/test_models/inception_resnet_v2.py @@ -10,6 +10,7 @@ # limitations under the License. import tensorflow as tf +from keras import layers as keras_layers from nncf.tensorflow.tf_internals import backend from nncf.tensorflow.tf_internals import imagenet_utils @@ -142,12 +143,21 @@ def inception_resnet_block(x, scale, block_type, block_idx, activation="relu"): mixed, backend.int_shape(x)[channel_axis], 1, activation=None, use_bias=True, name=block_name + "_conv" ) - x = layers.Lambda( - lambda inputs, scale: inputs[0] + inputs[1] * scale, - output_shape=backend.int_shape(x)[1:], - arguments={"scale": scale}, - name=block_name, - )([x, up]) + x = CustomScaleLayer(scale)([x, up]) if activation is not None: x = layers.Activation(activation, name=block_name + "_ac")(x) return x + + +class CustomScaleLayer(keras_layers.Layer): + def __init__(self, scale, **kwargs): + super().__init__(**kwargs) + self.scale = scale + + def get_config(self): + config = super().get_config() + config.update({"scale": self.scale}) + return config + + def call(self, inputs): + return inputs[0] + inputs[1] * self.scale diff --git a/tests/tensorflow/test_transformations.py b/tests/tensorflow/test_transformations.py index 64dd6fdf67d..40811afd1cc 100644 --- a/tests/tensorflow/test_transformations.py +++ b/tests/tensorflow/test_transformations.py @@ -13,7 +13,7 @@ import pytest import tensorflow as tf -from pkg_resources import parse_version +from packaging import version from tensorflow.keras import layers from tensorflow.keras import models @@ -523,7 +523,7 @@ def apply_insert_before(model): def check_graphs(model, ref_graph_filename): - tf_version = parse_version(tf.__version__).base_version + tf_version = version.parse(tf.__version__).base_version tf_version_major, tf_version_minor = tuple(map(int, tf_version.split(".")))[:2] data_dir = os.path.join( os.path.dirname(os.path.abspath(__file__)), "data", "model_transormer", f"{tf_version_major}.{tf_version_minor}" diff --git a/tests/torch/accuracy_aware_training/test_accuracy_aware_config.py b/tests/torch/accuracy_aware_training/test_accuracy_aware_config.py index 47969968663..76e02294032 100644 --- a/tests/torch/accuracy_aware_training/test_accuracy_aware_config.py +++ b/tests/torch/accuracy_aware_training/test_accuracy_aware_config.py @@ -104,6 +104,6 @@ def mock_validate_fn(model): if must_raise: with pytest.raises(RuntimeError): - _ = create_accuracy_aware_training_loop(config, compression_ctrl, dump_checkpoints=False) + _ = create_accuracy_aware_training_loop(config, compression_ctrl, 0, dump_checkpoints=False) else: - _ = create_accuracy_aware_training_loop(config, compression_ctrl, dump_checkpoints=False) + _ = create_accuracy_aware_training_loop(config, compression_ctrl, 0, dump_checkpoints=False) diff --git a/tests/torch/accuracy_aware_training/test_training_loop.py b/tests/torch/accuracy_aware_training/test_training_loop.py index 3c295eeca40..30e51d56a3f 100644 --- a/tests/torch/accuracy_aware_training/test_training_loop.py +++ b/tests/torch/accuracy_aware_training/test_training_loop.py @@ -33,8 +33,8 @@ from tests.torch.sparsity.magnitude.test_helpers import get_basic_magnitude_sparsity_config -@pytest.fixture(scope="module") -def finetuned_master_lenet(): +@pytest.fixture(scope="module", name="finetuned_master_lenet") +def fixture_finetuned_master_lenet(): learning_rate = 1e-3 finetuning_steps = 10 with set_torch_seed(): @@ -56,8 +56,8 @@ def finetuned_master_lenet(): return model, train_loader -@pytest.fixture -def finetuned_lenet(finetuned_master_lenet): +@pytest.fixture(name="finetuned_lenet") +def fixture_finetuned_lenet(finetuned_master_lenet): model, loader = finetuned_master_lenet return deepcopy(model), loader diff --git a/tests/torch/binarization/test_timeout_extension_loader.py b/tests/torch/binarization/test_timeout_extension_loader.py new file mode 100644 index 00000000000..7f74c511b0b --- /dev/null +++ b/tests/torch/binarization/test_timeout_extension_loader.py @@ -0,0 +1,45 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from pathlib import Path + +import pytest +import torch + +from nncf.torch.binarization.extensions import BinarizedFunctionsCPU +from nncf.torch.binarization.extensions import BinarizedFunctionsCUDA +from nncf.torch.extensions import EXTENSION_LOAD_TIMEOUT_ENV_VAR +from nncf.torch.extensions import ExtensionLoaderTimeoutException +from tests.shared.isolation_runner import ISOLATION_RUN_ENV_VAR +from tests.shared.isolation_runner import run_pytest_case_function_in_separate_process + + +@pytest.mark.skipif(ISOLATION_RUN_ENV_VAR not in os.environ, reason="Should be run via isolation proxy") +def test_timeout_extension_loader_isolated(tmp_path, use_cuda): + if not torch.cuda.is_available() and use_cuda is True: + pytest.skip("Skipping CUDA test cases for CPU only setups") + + quant_func = BinarizedFunctionsCUDA if use_cuda else BinarizedFunctionsCPU + + os.environ[EXTENSION_LOAD_TIMEOUT_ENV_VAR] = "1" + os.environ["TORCH_EXTENSIONS_DIR"] = tmp_path.as_posix() + + # pylint: disable=protected-access + build_dir = Path(quant_func._loader.get_build_dir()) + lock_file = build_dir / "lock" + lock_file.touch() + with pytest.raises(ExtensionLoaderTimeoutException): + quant_func.get("ActivationBinarize_forward") + + +def test_timeout_extension_loader(): + run_pytest_case_function_in_separate_process(test_timeout_extension_loader_isolated) diff --git a/tests/torch/composite/test_sparsity_quantization.py b/tests/torch/composite/test_sparsity_quantization.py index 53cf124bdb9..dc95253b652 100644 --- a/tests/torch/composite/test_sparsity_quantization.py +++ b/tests/torch/composite/test_sparsity_quantization.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from nncf.config import NNCFConfig from nncf.torch.composite_compression import CompositeCompressionAlgorithmController from nncf.torch.module_operations import UpdateWeight diff --git a/tests/torch/data/reference_graphs/nas/efficient_net_b0_depth.dot b/tests/torch/data/reference_graphs/nas/efficient_net_b0_depth.dot index 8f7095b5749..67aa12dce45 100644 --- a/tests/torch/data/reference_graphs/nas/efficient_net_b0_depth.dot +++ b/tests/torch/data/reference_graphs/nas/efficient_net_b0_depth.dot @@ -133,104 +133,95 @@ strict digraph { "131 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=131, label="sigmoid_IW672_OW672_#170", style=filled, type=sigmoid]; "132 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" [id=132, label="__mul___IW[672, 672]_OW672_#171", style=filled, type=__mul__]; "133 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=133, label="__getitem___#133", style=filled, type=__getitem__]; -"134 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" [id=134, label="linear_#134", style=filled, type=linear]; -"135 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [id=135, label="view_#135", style=filled, type=view]; -"136 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [id=136, label="view_#136", style=filled, type=view]; -"137 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=137, label="pad_IW672_OW672_#172", style=filled, type=pad]; -"138 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=138, label="DW_conv2d_IW672_OW672_G22_#173", style=filled, type=conv2d]; -"139 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=139, label="batch_norm_IW672_OW672_#174", style=filled, type=batch_norm]; -"140 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=140, label="sigmoid_IW672_OW672_#175", style=filled, type=sigmoid]; -"141 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" [id=141, label="__mul___IW[672, 672]_OW672_#176", style=filled, type=__mul__]; -"142 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/adaptive_avg_pool2d_0" [id=142, label="adaptive_avg_pool2d_IW672_OW672_#177", style=filled, type=adaptive_avg_pool2d]; -"143 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=143, label="conv2d_IW672_OW28_G21_#178", style=filled, type=conv2d]; -"144 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=144, label="sigmoid_IW28_OW28_#179", style=filled, type=sigmoid]; -"145 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" [id=145, label="__mul___IW[28, 28]_OW28_#180", style=filled, type=__mul__]; -"146 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=146, label="conv2d_IW28_OW672_G22_#181", style=filled, type=conv2d]; -"147 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/sigmoid_0" [id=147, label="sigmoid_IW672_OW672_#182", style=filled, type=sigmoid]; -"148 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" [id=148, label="__mul___IW[672, 672]_OW672_#183", style=filled, type=__mul__]; -"149 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=149, label="conv2d_IW672_OW112_G25_#184", style=filled, type=conv2d]; -"150 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=150, label="batch_norm_IW112_OW112_#185", style=filled, type=batch_norm]; -"151 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" [id=151, label="__add___IW[112, 112]_OW112_#186", style=filled, type=__add__]; -"152 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=152, label="conv2d_IW112_OW672_G27_#206", style=filled, type=conv2d]; -"153 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=153, label="batch_norm_IW672_OW672_#207", style=filled, type=batch_norm]; -"154 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=154, label="sigmoid_IW672_OW672_#208", style=filled, type=sigmoid]; -"155 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" [id=155, label="__mul___IW[672, 672]_OW672_#209", style=filled, type=__mul__]; -"156 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=156, label="__getitem___#156", style=filled, type=__getitem__]; -"157 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" [id=157, label="linear_#157", style=filled, type=linear]; -"158 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [id=158, label="view_#158", style=filled, type=view]; -"159 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [id=159, label="view_#159", style=filled, type=view]; -"160 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=160, label="pad_IW672_OW672_#210", style=filled, type=pad]; -"161 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=161, label="DW_conv2d_IW672_OW672_G27_#211", style=filled, type=conv2d]; -"162 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=162, label="batch_norm_IW672_OW672_#212", style=filled, type=batch_norm]; -"163 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=163, label="sigmoid_IW672_OW672_#213", style=filled, type=sigmoid]; -"164 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" [id=164, label="__mul___IW[672, 672]_OW672_#214", style=filled, type=__mul__]; -"165 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/adaptive_avg_pool2d_0" [id=165, label="adaptive_avg_pool2d_IW672_OW672_#215", style=filled, type=adaptive_avg_pool2d]; -"166 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=166, label="conv2d_IW672_OW28_G26_#216", style=filled, type=conv2d]; -"167 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=167, label="sigmoid_IW28_OW28_#217", style=filled, type=sigmoid]; -"168 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" [id=168, label="__mul___IW[28, 28]_OW28_#218", style=filled, type=__mul__]; -"169 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=169, label="conv2d_IW28_OW672_G27_#219", style=filled, type=conv2d]; -"170 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/sigmoid_0" [id=170, label="sigmoid_IW672_OW672_#220", style=filled, type=sigmoid]; -"171 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" [id=171, label="__mul___IW[672, 672]_OW672_#221", style=filled, type=__mul__]; -"172 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=172, label="conv2d_IW672_OW192_G34_#222", style=filled, type=conv2d]; -"173 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=173, label="batch_norm_IW192_OW192_#223", style=filled, type=batch_norm]; -"174 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=174, label="conv2d_IW192_OW1152_G29_#224", style=filled, type=conv2d]; -"175 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=175, label="batch_norm_IW1152_OW1152_#225", style=filled, type=batch_norm]; -"176 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=176, label="sigmoid_IW1152_OW1152_#226", style=filled, type=sigmoid]; -"177 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" [id=177, label="__mul___IW[1152, 1152]_OW1152_#227", style=filled, type=__mul__]; -"178 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=178, label="__getitem___#178", style=filled, type=__getitem__]; -"179 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=179, label="pad_IW1152_OW1152_#228", style=filled, type=pad]; -"180 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=180, label="DW_conv2d_IW1152_OW1152_G29_#229", style=filled, type=conv2d]; -"181 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=181, label="batch_norm_IW1152_OW1152_#230", style=filled, type=batch_norm]; -"182 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=182, label="sigmoid_IW1152_OW1152_#231", style=filled, type=sigmoid]; -"183 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" [id=183, label="__mul___IW[1152, 1152]_OW1152_#232", style=filled, type=__mul__]; -"184 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/adaptive_avg_pool2d_0" [id=184, label="adaptive_avg_pool2d_IW1152_OW1152_#233", style=filled, type=adaptive_avg_pool2d]; -"185 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=185, label="conv2d_IW1152_OW48_G28_#234", style=filled, type=conv2d]; -"186 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=186, label="sigmoid_IW48_OW48_#235", style=filled, type=sigmoid]; -"187 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" [id=187, label="__mul___IW[48, 48]_OW48_#236", style=filled, type=__mul__]; -"188 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=188, label="conv2d_IW48_OW1152_G29_#237", style=filled, type=conv2d]; -"189 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/sigmoid_0" [id=189, label="sigmoid_IW1152_OW1152_#238", style=filled, type=sigmoid]; -"190 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" [id=190, label="__mul___IW[1152, 1152]_OW1152_#239", style=filled, type=__mul__]; -"191 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=191, label="conv2d_IW1152_OW192_G34_#240", style=filled, type=conv2d]; -"192 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=192, label="batch_norm_IW192_OW192_#241", style=filled, type=batch_norm]; -"193 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" [id=193, label="__add___IW[192, 192]_OW192_#242", style=filled, type=__add__]; -"194 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=194, label="conv2d_IW192_OW1152_G31_#243", style=filled, type=conv2d]; -"195 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=195, label="batch_norm_IW1152_OW1152_#244", style=filled, type=batch_norm]; -"196 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=196, label="sigmoid_IW1152_OW1152_#245", style=filled, type=sigmoid]; -"197 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" [id=197, label="__mul___IW[1152, 1152]_OW1152_#246", style=filled, type=__mul__]; -"198 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=198, label="__getitem___#198", style=filled, type=__getitem__]; -"199 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" [id=199, label="linear_#199", style=filled, type=linear]; -"200 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [id=200, label="view_#200", style=filled, type=view]; -"201 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [id=201, label="view_#201", style=filled, type=view]; -"202 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=202, label="pad_IW1152_OW1152_#247", style=filled, type=pad]; -"203 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=203, label="DW_conv2d_IW1152_OW1152_G31_#248", style=filled, type=conv2d]; -"204 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=204, label="batch_norm_IW1152_OW1152_#249", style=filled, type=batch_norm]; -"205 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=205, label="sigmoid_IW1152_OW1152_#250", style=filled, type=sigmoid]; -"206 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" [id=206, label="__mul___IW[1152, 1152]_OW1152_#251", style=filled, type=__mul__]; -"207 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/adaptive_avg_pool2d_0" [id=207, label="adaptive_avg_pool2d_IW1152_OW1152_#252", style=filled, type=adaptive_avg_pool2d]; -"208 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=208, label="conv2d_IW1152_OW48_G30_#253", style=filled, type=conv2d]; -"209 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=209, label="sigmoid_IW48_OW48_#254", style=filled, type=sigmoid]; -"210 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" [id=210, label="__mul___IW[48, 48]_OW48_#255", style=filled, type=__mul__]; -"211 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=211, label="conv2d_IW48_OW1152_G31_#256", style=filled, type=conv2d]; -"212 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/sigmoid_0" [id=212, label="sigmoid_IW1152_OW1152_#257", style=filled, type=sigmoid]; -"213 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" [id=213, label="__mul___IW[1152, 1152]_OW1152_#258", style=filled, type=__mul__]; -"214 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=214, label="conv2d_IW1152_OW192_G34_#259", style=filled, type=conv2d]; -"215 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=215, label="batch_norm_IW192_OW192_#260", style=filled, type=batch_norm]; -"216 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" [id=216, label="__add___IW[192, 192]_OW192_#261", style=filled, type=__add__]; -"217 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=217, label="conv2d_IW192_OW1152_G36_#281", style=filled, type=conv2d]; -"218 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=218, label="batch_norm_IW1152_OW1152_#282", style=filled, type=batch_norm]; -"219 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=219, label="sigmoid_IW1152_OW1152_#283", style=filled, type=sigmoid]; -"220 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" [id=220, label="__mul___IW[1152, 1152]_OW1152_#284", style=filled, type=__mul__]; -"221 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=221, label="conv2d_IW1152_OW320_G39_#297", style=filled, type=conv2d]; -"222 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=222, label="batch_norm_IW320_OW320_#298", style=filled, type=batch_norm]; -"223 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_head]/conv2d_0" [color=lightblue, id=223, label="conv2d_IW320_OW1280_G37_#299", style=filled, type=conv2d]; -"224 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=224, label="batch_norm_IW1280_OW1280_#300", style=filled, type=batch_norm]; -"225 EfficientNet/MemoryEfficientSwish[_swish]/sigmoid_1" [id=225, label="sigmoid_IW1280_OW1280_#301", style=filled, type=sigmoid]; -"226 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" [id=226, label="__mul___IW[1280, 1280]_OW1280_#302", style=filled, type=__mul__]; -"227 EfficientNet/AdaptiveAvgPool2d[_avg_pooling]/adaptive_avg_pool2d_0" [id=227, label="adaptive_avg_pool2d_IW1280_OW1280_#303", style=filled, type=adaptive_avg_pool2d]; -"228 EfficientNet/flatten_0" [id=228, label="flatten_IW1280_OW1280_#304", style=filled, type=flatten]; -"229 EfficientNet/Dropout[_dropout]/dropout_0" [id=229, label="dropout_IW1280_OW1280_#305", style=filled, type=dropout]; -"230 EfficientNet/NNCFLinear[_fc]/linear_0" [id=230, label="linear_IW1280_#306", style=filled, type=linear]; -"231 /nncf_model_output_0" [id=231, label="nncf_model_output_#307", style=filled, type=nncf_model_output]; +"134 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=134, label="pad_IW672_OW672_#172", style=filled, type=pad]; +"135 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=135, label="DW_conv2d_IW672_OW672_G22_#173", style=filled, type=conv2d]; +"136 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=136, label="batch_norm_IW672_OW672_#174", style=filled, type=batch_norm]; +"137 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=137, label="sigmoid_IW672_OW672_#175", style=filled, type=sigmoid]; +"138 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" [id=138, label="__mul___IW[672, 672]_OW672_#176", style=filled, type=__mul__]; +"139 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/adaptive_avg_pool2d_0" [id=139, label="adaptive_avg_pool2d_IW672_OW672_#177", style=filled, type=adaptive_avg_pool2d]; +"140 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=140, label="conv2d_IW672_OW28_G21_#178", style=filled, type=conv2d]; +"141 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=141, label="sigmoid_IW28_OW28_#179", style=filled, type=sigmoid]; +"142 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" [id=142, label="__mul___IW[28, 28]_OW28_#180", style=filled, type=__mul__]; +"143 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=143, label="conv2d_IW28_OW672_G22_#181", style=filled, type=conv2d]; +"144 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/sigmoid_0" [id=144, label="sigmoid_IW672_OW672_#182", style=filled, type=sigmoid]; +"145 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" [id=145, label="__mul___IW[672, 672]_OW672_#183", style=filled, type=__mul__]; +"146 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=146, label="conv2d_IW672_OW112_G25_#184", style=filled, type=conv2d]; +"147 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=147, label="batch_norm_IW112_OW112_#185", style=filled, type=batch_norm]; +"148 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" [id=148, label="__add___IW[112, 112]_OW112_#186", style=filled, type=__add__]; +"149 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=149, label="conv2d_IW112_OW672_G27_#206", style=filled, type=conv2d]; +"150 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=150, label="batch_norm_IW672_OW672_#207", style=filled, type=batch_norm]; +"151 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=151, label="sigmoid_IW672_OW672_#208", style=filled, type=sigmoid]; +"152 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" [id=152, label="__mul___IW[672, 672]_OW672_#209", style=filled, type=__mul__]; +"153 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=153, label="__getitem___#153", style=filled, type=__getitem__]; +"154 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=154, label="pad_IW672_OW672_#210", style=filled, type=pad]; +"155 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=155, label="DW_conv2d_IW672_OW672_G27_#211", style=filled, type=conv2d]; +"156 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=156, label="batch_norm_IW672_OW672_#212", style=filled, type=batch_norm]; +"157 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=157, label="sigmoid_IW672_OW672_#213", style=filled, type=sigmoid]; +"158 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" [id=158, label="__mul___IW[672, 672]_OW672_#214", style=filled, type=__mul__]; +"159 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/adaptive_avg_pool2d_0" [id=159, label="adaptive_avg_pool2d_IW672_OW672_#215", style=filled, type=adaptive_avg_pool2d]; +"160 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=160, label="conv2d_IW672_OW28_G26_#216", style=filled, type=conv2d]; +"161 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=161, label="sigmoid_IW28_OW28_#217", style=filled, type=sigmoid]; +"162 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" [id=162, label="__mul___IW[28, 28]_OW28_#218", style=filled, type=__mul__]; +"163 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=163, label="conv2d_IW28_OW672_G27_#219", style=filled, type=conv2d]; +"164 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/sigmoid_0" [id=164, label="sigmoid_IW672_OW672_#220", style=filled, type=sigmoid]; +"165 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" [id=165, label="__mul___IW[672, 672]_OW672_#221", style=filled, type=__mul__]; +"166 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=166, label="conv2d_IW672_OW192_G34_#222", style=filled, type=conv2d]; +"167 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=167, label="batch_norm_IW192_OW192_#223", style=filled, type=batch_norm]; +"168 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=168, label="conv2d_IW192_OW1152_G29_#224", style=filled, type=conv2d]; +"169 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=169, label="batch_norm_IW1152_OW1152_#225", style=filled, type=batch_norm]; +"170 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=170, label="sigmoid_IW1152_OW1152_#226", style=filled, type=sigmoid]; +"171 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" [id=171, label="__mul___IW[1152, 1152]_OW1152_#227", style=filled, type=__mul__]; +"172 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=172, label="__getitem___#172", style=filled, type=__getitem__]; +"173 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=173, label="pad_IW1152_OW1152_#228", style=filled, type=pad]; +"174 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=174, label="DW_conv2d_IW1152_OW1152_G29_#229", style=filled, type=conv2d]; +"175 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=175, label="batch_norm_IW1152_OW1152_#230", style=filled, type=batch_norm]; +"176 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=176, label="sigmoid_IW1152_OW1152_#231", style=filled, type=sigmoid]; +"177 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" [id=177, label="__mul___IW[1152, 1152]_OW1152_#232", style=filled, type=__mul__]; +"178 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/adaptive_avg_pool2d_0" [id=178, label="adaptive_avg_pool2d_IW1152_OW1152_#233", style=filled, type=adaptive_avg_pool2d]; +"179 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=179, label="conv2d_IW1152_OW48_G28_#234", style=filled, type=conv2d]; +"180 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=180, label="sigmoid_IW48_OW48_#235", style=filled, type=sigmoid]; +"181 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" [id=181, label="__mul___IW[48, 48]_OW48_#236", style=filled, type=__mul__]; +"182 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=182, label="conv2d_IW48_OW1152_G29_#237", style=filled, type=conv2d]; +"183 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/sigmoid_0" [id=183, label="sigmoid_IW1152_OW1152_#238", style=filled, type=sigmoid]; +"184 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" [id=184, label="__mul___IW[1152, 1152]_OW1152_#239", style=filled, type=__mul__]; +"185 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=185, label="conv2d_IW1152_OW192_G34_#240", style=filled, type=conv2d]; +"186 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=186, label="batch_norm_IW192_OW192_#241", style=filled, type=batch_norm]; +"187 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" [id=187, label="__add___IW[192, 192]_OW192_#242", style=filled, type=__add__]; +"188 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=188, label="conv2d_IW192_OW1152_G31_#243", style=filled, type=conv2d]; +"189 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=189, label="batch_norm_IW1152_OW1152_#244", style=filled, type=batch_norm]; +"190 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=190, label="sigmoid_IW1152_OW1152_#245", style=filled, type=sigmoid]; +"191 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" [id=191, label="__mul___IW[1152, 1152]_OW1152_#246", style=filled, type=__mul__]; +"192 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=192, label="__getitem___#192", style=filled, type=__getitem__]; +"193 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=193, label="pad_IW1152_OW1152_#247", style=filled, type=pad]; +"194 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=194, label="DW_conv2d_IW1152_OW1152_G31_#248", style=filled, type=conv2d]; +"195 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=195, label="batch_norm_IW1152_OW1152_#249", style=filled, type=batch_norm]; +"196 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=196, label="sigmoid_IW1152_OW1152_#250", style=filled, type=sigmoid]; +"197 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" [id=197, label="__mul___IW[1152, 1152]_OW1152_#251", style=filled, type=__mul__]; +"198 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/adaptive_avg_pool2d_0" [id=198, label="adaptive_avg_pool2d_IW1152_OW1152_#252", style=filled, type=adaptive_avg_pool2d]; +"199 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=199, label="conv2d_IW1152_OW48_G30_#253", style=filled, type=conv2d]; +"200 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=200, label="sigmoid_IW48_OW48_#254", style=filled, type=sigmoid]; +"201 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" [id=201, label="__mul___IW[48, 48]_OW48_#255", style=filled, type=__mul__]; +"202 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=202, label="conv2d_IW48_OW1152_G31_#256", style=filled, type=conv2d]; +"203 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/sigmoid_0" [id=203, label="sigmoid_IW1152_OW1152_#257", style=filled, type=sigmoid]; +"204 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" [id=204, label="__mul___IW[1152, 1152]_OW1152_#258", style=filled, type=__mul__]; +"205 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=205, label="conv2d_IW1152_OW192_G34_#259", style=filled, type=conv2d]; +"206 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=206, label="batch_norm_IW192_OW192_#260", style=filled, type=batch_norm]; +"207 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" [id=207, label="__add___IW[192, 192]_OW192_#261", style=filled, type=__add__]; +"208 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=208, label="conv2d_IW192_OW1152_G36_#281", style=filled, type=conv2d]; +"209 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=209, label="batch_norm_IW1152_OW1152_#282", style=filled, type=batch_norm]; +"210 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=210, label="sigmoid_IW1152_OW1152_#283", style=filled, type=sigmoid]; +"211 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" [id=211, label="__mul___IW[1152, 1152]_OW1152_#284", style=filled, type=__mul__]; +"212 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=212, label="conv2d_IW1152_OW320_G39_#297", style=filled, type=conv2d]; +"213 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=213, label="batch_norm_IW320_OW320_#298", style=filled, type=batch_norm]; +"214 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_head]/conv2d_0" [color=lightblue, id=214, label="conv2d_IW320_OW1280_G37_#299", style=filled, type=conv2d]; +"215 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=215, label="batch_norm_IW1280_OW1280_#300", style=filled, type=batch_norm]; +"216 EfficientNet/MemoryEfficientSwish[_swish]/sigmoid_1" [id=216, label="sigmoid_IW1280_OW1280_#301", style=filled, type=sigmoid]; +"217 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" [id=217, label="__mul___IW[1280, 1280]_OW1280_#302", style=filled, type=__mul__]; +"218 EfficientNet/AdaptiveAvgPool2d[_avg_pooling]/adaptive_avg_pool2d_0" [id=218, label="adaptive_avg_pool2d_IW1280_OW1280_#303", style=filled, type=adaptive_avg_pool2d]; +"219 EfficientNet/flatten_0" [id=219, label="flatten_IW1280_OW1280_#304", style=filled, type=flatten]; +"220 EfficientNet/Dropout[_dropout]/dropout_0" [id=220, label="dropout_IW1280_OW1280_#305", style=filled, type=dropout]; +"221 EfficientNet/NNCFLinear[_fc]/linear_0" [id=221, label="linear_IW1280_#306", style=filled, type=linear]; +"222 /nncf_model_output_0" [id=222, label="nncf_model_output_#307", style=filled, type=nncf_model_output]; "0 /nncf_model_input_0" -> "1 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_stem]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 3, 240, 240)", style=solid]; "1 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_stem]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "2 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_stem]/ZeroPad2d[static_padding]/pad_0" [label="(1, 3, 240, 240)", style=solid]; "2 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_stem]/ZeroPad2d[static_padding]/pad_0" -> "3 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_stem]/conv2d_0" [label="(1, 3, 241, 241)", style=solid]; @@ -388,127 +379,118 @@ strict digraph { "126 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___0" -> "127 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 480, 15, 15)", style=solid]; "127 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "128 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 112, 15, 15)", style=solid]; "128 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "129 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 112, 15, 15)", style=solid]; -"128 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "151 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" [label="(1, 112, 15, 15)", style=solid]; +"128 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "148 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" [label="(1, 112, 15, 15)", style=solid]; "129 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "130 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; "130 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "131 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 672, 15, 15)", style=solid]; "130 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "132 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; "131 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "132 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; "132 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" -> "133 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 672, 15, 15)", style=solid]; -"133 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "137 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 672, 13, 13)", style=solid]; -"134 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" -> "135 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [label="(672, 9)", style=solid]; -"135 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" -> "136 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [label="(672, 1, 9)", style=solid]; -"136 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" -> "138 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(672, 1, 3, 3)", style=solid]; -"137 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "138 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 672, 17, 17)", style=solid]; -"138 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "139 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; -"139 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "140 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 672, 15, 15)", style=solid]; -"139 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "141 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 15, 15)", style=solid]; -"140 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "141 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 15, 15)", style=solid]; -"141 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" -> "142 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/adaptive_avg_pool2d_0" [label="(1, 672, 15, 15)", style=solid]; -"141 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" -> "148 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; -"142 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/adaptive_avg_pool2d_0" -> "143 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 672, 1, 1)", style=solid]; -"143 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "144 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 28, 1, 1)", style=solid]; -"143 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "145 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; -"144 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "145 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; -"145 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" -> "146 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 28, 1, 1)", style=solid]; -"146 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "147 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/sigmoid_0" [label="(1, 672, 1, 1)", style=solid]; -"147 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/sigmoid_0" -> "148 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" [label="(1, 672, 1, 1)", style=solid]; -"148 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" -> "149 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 672, 15, 15)", style=solid]; -"149 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "150 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 112, 15, 15)", style=solid]; -"150 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "151 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" [label="(1, 112, 15, 15)", style=solid]; -"151 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" -> "152 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 112, 15, 15)", style=solid]; -"152 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "153 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; -"153 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "154 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 672, 15, 15)", style=solid]; -"153 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "155 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; -"154 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "155 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; -"155 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" -> "156 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 672, 15, 15)", style=solid]; -"156 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "160 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 672, 13, 13)", style=solid]; -"157 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" -> "158 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [label="(672, 9)", style=solid]; -"158 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" -> "159 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [label="(672, 1, 9)", style=solid]; -"159 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" -> "161 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(672, 1, 3, 3)", style=solid]; -"160 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "161 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 672, 16, 16)", style=solid]; -"161 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "162 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 672, 7, 7)", style=solid]; -"162 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "163 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 672, 7, 7)", style=solid]; -"162 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "164 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 7, 7)", style=solid]; -"163 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "164 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 7, 7)", style=solid]; -"164 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" -> "165 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/adaptive_avg_pool2d_0" [label="(1, 672, 7, 7)", style=solid]; -"164 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" -> "171 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" [label="(1, 672, 7, 7)", style=solid]; -"165 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/adaptive_avg_pool2d_0" -> "166 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 672, 1, 1)", style=solid]; -"166 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "167 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 28, 1, 1)", style=solid]; -"166 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "168 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; -"167 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "168 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; -"168 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" -> "169 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 28, 1, 1)", style=solid]; -"169 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "170 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/sigmoid_0" [label="(1, 672, 1, 1)", style=solid]; -"170 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/sigmoid_0" -> "171 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" [label="(1, 672, 1, 1)", style=solid]; -"171 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" -> "172 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 672, 7, 7)", style=solid]; -"172 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "173 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; -"173 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "174 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; -"173 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "193 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" [label="(1, 192, 7, 7)", style=solid]; -"174 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "175 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"175 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "176 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; -"175 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "177 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"176 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "177 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"177 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" -> "178 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 1152, 7, 7)", style=solid]; -"178 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "179 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 1152, 7, 7)", style=solid]; -"179 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "180 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 1152, 11, 11)", style=solid]; -"180 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "181 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"181 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "182 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1152, 7, 7)", style=solid]; -"181 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "183 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; -"182 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "183 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; -"183 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" -> "184 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/adaptive_avg_pool2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"183 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" -> "190 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"184 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/adaptive_avg_pool2d_0" -> "185 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 1152, 1, 1)", style=solid]; -"185 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "186 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 48, 1, 1)", style=solid]; -"185 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "187 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; -"186 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "187 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; -"187 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" -> "188 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 48, 1, 1)", style=solid]; -"188 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "189 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/sigmoid_0" [label="(1, 1152, 1, 1)", style=solid]; -"189 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/sigmoid_0" -> "190 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" [label="(1, 1152, 1, 1)", style=solid]; -"190 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" -> "191 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"191 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "192 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; -"192 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "193 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" [label="(1, 192, 7, 7)", style=solid]; -"193 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" -> "194 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; -"193 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" -> "216 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" [label="(1, 192, 7, 7)", style=solid]; -"194 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "195 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"195 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "196 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; -"195 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "197 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"196 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "197 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"197 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" -> "198 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 1152, 7, 7)", style=solid]; -"198 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "202 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 1152, 5, 5)", style=solid]; -"199 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" -> "200 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [label="(1152, 9)", style=solid]; -"200 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" -> "201 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [label="(1152, 1, 9)", style=solid]; -"201 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" -> "203 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1152, 1, 3, 3)", style=solid]; -"202 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "203 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 1152, 9, 9)", style=solid]; -"203 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "204 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"204 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "205 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1152, 7, 7)", style=solid]; -"204 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "206 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; -"205 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "206 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; -"206 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" -> "207 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/adaptive_avg_pool2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"206 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" -> "213 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"207 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/adaptive_avg_pool2d_0" -> "208 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 1152, 1, 1)", style=solid]; -"208 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "209 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 48, 1, 1)", style=solid]; -"208 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "210 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; -"209 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "210 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; -"210 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" -> "211 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 48, 1, 1)", style=solid]; -"211 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "212 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/sigmoid_0" [label="(1, 1152, 1, 1)", style=solid]; -"212 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/sigmoid_0" -> "213 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" [label="(1, 1152, 1, 1)", style=solid]; -"213 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" -> "214 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"214 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "215 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; -"215 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "216 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" [label="(1, 192, 7, 7)", style=solid]; -"216 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" -> "217 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; -"217 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "218 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"218 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "219 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; -"218 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "220 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"219 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "220 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"220 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" -> "221 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"221 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "222 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 320, 7, 7)", style=solid]; -"222 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "223 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_head]/conv2d_0" [label="(1, 320, 7, 7)", style=solid]; -"223 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_head]/conv2d_0" -> "224 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1280, 7, 7)", style=solid]; -"224 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "225 EfficientNet/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1280, 7, 7)", style=solid]; -"224 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "226 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1280, 7, 7)", style=solid]; -"225 EfficientNet/MemoryEfficientSwish[_swish]/sigmoid_1" -> "226 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1280, 7, 7)", style=solid]; -"226 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" -> "227 EfficientNet/AdaptiveAvgPool2d[_avg_pooling]/adaptive_avg_pool2d_0" [label="(1, 1280, 7, 7)", style=solid]; -"227 EfficientNet/AdaptiveAvgPool2d[_avg_pooling]/adaptive_avg_pool2d_0" -> "228 EfficientNet/flatten_0" [label="(1, 1280, 1, 1)", style=solid]; -"228 EfficientNet/flatten_0" -> "229 EfficientNet/Dropout[_dropout]/dropout_0" [label="(1, 1280)", style=solid]; -"229 EfficientNet/Dropout[_dropout]/dropout_0" -> "230 EfficientNet/NNCFLinear[_fc]/linear_0" [label="(1, 1280)", style=solid]; -"230 EfficientNet/NNCFLinear[_fc]/linear_0" -> "231 /nncf_model_output_0" [label="(1, 1000)", style=solid]; +"133 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "134 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 672, 15, 15)", style=solid]; +"134 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "135 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 672, 19, 19)", style=solid]; +"135 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "136 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; +"136 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "137 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 672, 15, 15)", style=solid]; +"136 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "138 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 15, 15)", style=solid]; +"137 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "138 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 15, 15)", style=solid]; +"138 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" -> "139 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/adaptive_avg_pool2d_0" [label="(1, 672, 15, 15)", style=solid]; +"138 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" -> "145 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; +"139 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/adaptive_avg_pool2d_0" -> "140 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 672, 1, 1)", style=solid]; +"140 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "141 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 28, 1, 1)", style=solid]; +"140 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "142 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; +"141 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "142 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; +"142 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" -> "143 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 28, 1, 1)", style=solid]; +"143 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "144 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/sigmoid_0" [label="(1, 672, 1, 1)", style=solid]; +"144 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/sigmoid_0" -> "145 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" [label="(1, 672, 1, 1)", style=solid]; +"145 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" -> "146 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 672, 15, 15)", style=solid]; +"146 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "147 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 112, 15, 15)", style=solid]; +"147 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "148 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" [label="(1, 112, 15, 15)", style=solid]; +"148 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" -> "149 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 112, 15, 15)", style=solid]; +"149 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "150 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; +"150 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "151 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 672, 15, 15)", style=solid]; +"150 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "152 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; +"151 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "152 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; +"152 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" -> "153 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 672, 15, 15)", style=solid]; +"153 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "154 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 672, 15, 15)", style=solid]; +"154 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "155 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 672, 18, 18)", style=solid]; +"155 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "156 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 672, 7, 7)", style=solid]; +"156 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "157 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 672, 7, 7)", style=solid]; +"156 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "158 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 7, 7)", style=solid]; +"157 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "158 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 7, 7)", style=solid]; +"158 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" -> "159 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/adaptive_avg_pool2d_0" [label="(1, 672, 7, 7)", style=solid]; +"158 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" -> "165 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" [label="(1, 672, 7, 7)", style=solid]; +"159 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/adaptive_avg_pool2d_0" -> "160 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 672, 1, 1)", style=solid]; +"160 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "161 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 28, 1, 1)", style=solid]; +"160 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "162 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; +"161 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "162 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; +"162 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" -> "163 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 28, 1, 1)", style=solid]; +"163 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "164 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/sigmoid_0" [label="(1, 672, 1, 1)", style=solid]; +"164 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/sigmoid_0" -> "165 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" [label="(1, 672, 1, 1)", style=solid]; +"165 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" -> "166 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 672, 7, 7)", style=solid]; +"166 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "167 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; +"167 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "168 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; +"167 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "187 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" [label="(1, 192, 7, 7)", style=solid]; +"168 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "169 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"169 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "170 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; +"169 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "171 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"170 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "171 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"171 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" -> "172 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 1152, 7, 7)", style=solid]; +"172 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "173 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 1152, 7, 7)", style=solid]; +"173 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "174 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 1152, 11, 11)", style=solid]; +"174 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "175 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"175 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "176 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1152, 7, 7)", style=solid]; +"175 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "177 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; +"176 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "177 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; +"177 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" -> "178 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/adaptive_avg_pool2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"177 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" -> "184 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"178 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/adaptive_avg_pool2d_0" -> "179 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 1152, 1, 1)", style=solid]; +"179 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "180 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 48, 1, 1)", style=solid]; +"179 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "181 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; +"180 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "181 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; +"181 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" -> "182 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 48, 1, 1)", style=solid]; +"182 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "183 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/sigmoid_0" [label="(1, 1152, 1, 1)", style=solid]; +"183 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/sigmoid_0" -> "184 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" [label="(1, 1152, 1, 1)", style=solid]; +"184 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" -> "185 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"185 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "186 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; +"186 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "187 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" [label="(1, 192, 7, 7)", style=solid]; +"187 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" -> "188 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; +"187 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" -> "207 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" [label="(1, 192, 7, 7)", style=solid]; +"188 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "189 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"189 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "190 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; +"189 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "191 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"190 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "191 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"191 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" -> "192 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 1152, 7, 7)", style=solid]; +"192 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "193 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 1152, 7, 7)", style=solid]; +"193 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "194 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 1152, 11, 11)", style=solid]; +"194 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "195 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"195 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "196 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1152, 7, 7)", style=solid]; +"195 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "197 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; +"196 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "197 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; +"197 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" -> "198 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/adaptive_avg_pool2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"197 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" -> "204 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"198 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/adaptive_avg_pool2d_0" -> "199 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 1152, 1, 1)", style=solid]; +"199 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "200 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 48, 1, 1)", style=solid]; +"199 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "201 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; +"200 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "201 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; +"201 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" -> "202 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 48, 1, 1)", style=solid]; +"202 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "203 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/sigmoid_0" [label="(1, 1152, 1, 1)", style=solid]; +"203 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/sigmoid_0" -> "204 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" [label="(1, 1152, 1, 1)", style=solid]; +"204 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" -> "205 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"205 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "206 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; +"206 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "207 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" [label="(1, 192, 7, 7)", style=solid]; +"207 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" -> "208 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; +"208 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "209 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"209 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "210 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; +"209 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "211 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"210 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "211 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"211 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" -> "212 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"212 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "213 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 320, 7, 7)", style=solid]; +"213 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "214 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_head]/conv2d_0" [label="(1, 320, 7, 7)", style=solid]; +"214 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_head]/conv2d_0" -> "215 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1280, 7, 7)", style=solid]; +"215 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "216 EfficientNet/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1280, 7, 7)", style=solid]; +"215 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "217 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1280, 7, 7)", style=solid]; +"216 EfficientNet/MemoryEfficientSwish[_swish]/sigmoid_1" -> "217 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1280, 7, 7)", style=solid]; +"217 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" -> "218 EfficientNet/AdaptiveAvgPool2d[_avg_pooling]/adaptive_avg_pool2d_0" [label="(1, 1280, 7, 7)", style=solid]; +"218 EfficientNet/AdaptiveAvgPool2d[_avg_pooling]/adaptive_avg_pool2d_0" -> "219 EfficientNet/flatten_0" [label="(1, 1280, 1, 1)", style=solid]; +"219 EfficientNet/flatten_0" -> "220 EfficientNet/Dropout[_dropout]/dropout_0" [label="(1, 1280)", style=solid]; +"220 EfficientNet/Dropout[_dropout]/dropout_0" -> "221 EfficientNet/NNCFLinear[_fc]/linear_0" [label="(1, 1280)", style=solid]; +"221 EfficientNet/NNCFLinear[_fc]/linear_0" -> "222 /nncf_model_output_0" [label="(1, 1000)", style=solid]; } diff --git a/tests/torch/data/reference_graphs/nas/efficient_net_b0_kernel.dot b/tests/torch/data/reference_graphs/nas/efficient_net_b0_kernel.dot index ca9e035006f..a3fc728d0c9 100644 --- a/tests/torch/data/reference_graphs/nas/efficient_net_b0_kernel.dot +++ b/tests/torch/data/reference_graphs/nas/efficient_net_b0_kernel.dot @@ -65,274 +65,265 @@ strict digraph { "63 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=63, label="sigmoid_IW144_OW144_#59", style=filled, type=sigmoid]; "64 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___0" [id=64, label="__mul___IW[144, 144]_OW144_#60", style=filled, type=__mul__]; "65 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=65, label="__getitem___#65", style=filled, type=__getitem__]; -"66 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" [id=66, label="linear_#66", style=filled, type=linear]; -"67 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [id=67, label="view_#67", style=filled, type=view]; -"68 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [id=68, label="view_#68", style=filled, type=view]; -"69 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=69, label="pad_IW144_OW144_#61", style=filled, type=pad]; -"70 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=70, label="DW_conv2d_IW144_OW144_G8_#62", style=filled, type=conv2d]; -"71 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=71, label="batch_norm_IW144_OW144_#63", style=filled, type=batch_norm]; -"72 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=72, label="sigmoid_IW144_OW144_#64", style=filled, type=sigmoid]; -"73 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" [id=73, label="__mul___IW[144, 144]_OW144_#65", style=filled, type=__mul__]; -"74 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/adaptive_avg_pool2d_0" [id=74, label="adaptive_avg_pool2d_IW144_OW144_#66", style=filled, type=adaptive_avg_pool2d]; -"75 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=75, label="conv2d_IW144_OW6_G7_#67", style=filled, type=conv2d]; -"76 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=76, label="sigmoid_IW6_OW6_#68", style=filled, type=sigmoid]; -"77 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___2" [id=77, label="__mul___IW[6, 6]_OW6_#69", style=filled, type=__mul__]; -"78 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=78, label="conv2d_IW6_OW144_G8_#70", style=filled, type=conv2d]; -"79 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/sigmoid_0" [id=79, label="sigmoid_IW144_OW144_#71", style=filled, type=sigmoid]; -"80 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/__mul___0" [id=80, label="__mul___IW[144, 144]_OW144_#72", style=filled, type=__mul__]; -"81 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=81, label="conv2d_IW144_OW40_G11_#73", style=filled, type=conv2d]; -"82 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=82, label="batch_norm_IW40_OW40_#74", style=filled, type=batch_norm]; -"83 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=83, label="conv2d_IW40_OW240_G10_#75", style=filled, type=conv2d]; -"84 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=84, label="batch_norm_IW240_OW240_#76", style=filled, type=batch_norm]; -"85 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=85, label="sigmoid_IW240_OW240_#77", style=filled, type=sigmoid]; -"86 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___0" [id=86, label="__mul___IW[240, 240]_OW240_#78", style=filled, type=__mul__]; -"87 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=87, label="__getitem___#87", style=filled, type=__getitem__]; -"88 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=88, label="pad_IW240_OW240_#79", style=filled, type=pad]; -"89 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=89, label="DW_conv2d_IW240_OW240_G10_#80", style=filled, type=conv2d]; -"90 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=90, label="batch_norm_IW240_OW240_#81", style=filled, type=batch_norm]; -"91 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=91, label="sigmoid_IW240_OW240_#82", style=filled, type=sigmoid]; -"92 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" [id=92, label="__mul___IW[240, 240]_OW240_#83", style=filled, type=__mul__]; -"93 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/adaptive_avg_pool2d_0" [id=93, label="adaptive_avg_pool2d_IW240_OW240_#84", style=filled, type=adaptive_avg_pool2d]; -"94 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=94, label="conv2d_IW240_OW10_G9_#85", style=filled, type=conv2d]; -"95 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=95, label="sigmoid_IW10_OW10_#86", style=filled, type=sigmoid]; -"96 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___2" [id=96, label="__mul___IW[10, 10]_OW10_#87", style=filled, type=__mul__]; -"97 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=97, label="conv2d_IW10_OW240_G10_#88", style=filled, type=conv2d]; -"98 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/sigmoid_0" [id=98, label="sigmoid_IW240_OW240_#89", style=filled, type=sigmoid]; -"99 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__mul___0" [id=99, label="__mul___IW[240, 240]_OW240_#90", style=filled, type=__mul__]; -"100 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=100, label="conv2d_IW240_OW40_G11_#91", style=filled, type=conv2d]; -"101 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=101, label="batch_norm_IW40_OW40_#92", style=filled, type=batch_norm]; -"102 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__add___0" [id=102, label="__add___IW[40, 40]_OW40_#93", style=filled, type=__add__]; -"103 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=103, label="conv2d_IW40_OW240_G13_#94", style=filled, type=conv2d]; -"104 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=104, label="batch_norm_IW240_OW240_#95", style=filled, type=batch_norm]; -"105 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=105, label="sigmoid_IW240_OW240_#96", style=filled, type=sigmoid]; -"106 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___0" [id=106, label="__mul___IW[240, 240]_OW240_#97", style=filled, type=__mul__]; -"107 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=107, label="__getitem___#107", style=filled, type=__getitem__]; -"108 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=108, label="pad_IW240_OW240_#98", style=filled, type=pad]; -"109 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=109, label="DW_conv2d_IW240_OW240_G13_#99", style=filled, type=conv2d]; -"110 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=110, label="batch_norm_IW240_OW240_#100", style=filled, type=batch_norm]; -"111 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=111, label="sigmoid_IW240_OW240_#101", style=filled, type=sigmoid]; -"112 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" [id=112, label="__mul___IW[240, 240]_OW240_#102", style=filled, type=__mul__]; -"113 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/adaptive_avg_pool2d_0" [id=113, label="adaptive_avg_pool2d_IW240_OW240_#103", style=filled, type=adaptive_avg_pool2d]; -"114 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=114, label="conv2d_IW240_OW10_G12_#104", style=filled, type=conv2d]; -"115 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=115, label="sigmoid_IW10_OW10_#105", style=filled, type=sigmoid]; -"116 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___2" [id=116, label="__mul___IW[10, 10]_OW10_#106", style=filled, type=__mul__]; -"117 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=117, label="conv2d_IW10_OW240_G13_#107", style=filled, type=conv2d]; -"118 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/sigmoid_0" [id=118, label="sigmoid_IW240_OW240_#108", style=filled, type=sigmoid]; -"119 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/__mul___0" [id=119, label="__mul___IW[240, 240]_OW240_#109", style=filled, type=__mul__]; -"120 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=120, label="conv2d_IW240_OW80_G18_#110", style=filled, type=conv2d]; -"121 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=121, label="batch_norm_IW80_OW80_#111", style=filled, type=batch_norm]; -"122 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=122, label="conv2d_IW80_OW480_G15_#112", style=filled, type=conv2d]; -"123 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=123, label="batch_norm_IW480_OW480_#113", style=filled, type=batch_norm]; -"124 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=124, label="sigmoid_IW480_OW480_#114", style=filled, type=sigmoid]; -"125 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___0" [id=125, label="__mul___IW[480, 480]_OW480_#115", style=filled, type=__mul__]; -"126 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=126, label="__getitem___#126", style=filled, type=__getitem__]; -"127 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=127, label="pad_IW480_OW480_#116", style=filled, type=pad]; -"128 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=128, label="DW_conv2d_IW480_OW480_G15_#117", style=filled, type=conv2d]; -"129 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=129, label="batch_norm_IW480_OW480_#118", style=filled, type=batch_norm]; -"130 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=130, label="sigmoid_IW480_OW480_#119", style=filled, type=sigmoid]; -"131 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" [id=131, label="__mul___IW[480, 480]_OW480_#120", style=filled, type=__mul__]; -"132 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/adaptive_avg_pool2d_0" [id=132, label="adaptive_avg_pool2d_IW480_OW480_#121", style=filled, type=adaptive_avg_pool2d]; -"133 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=133, label="conv2d_IW480_OW20_G14_#122", style=filled, type=conv2d]; -"134 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=134, label="sigmoid_IW20_OW20_#123", style=filled, type=sigmoid]; -"135 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___2" [id=135, label="__mul___IW[20, 20]_OW20_#124", style=filled, type=__mul__]; -"136 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=136, label="conv2d_IW20_OW480_G15_#125", style=filled, type=conv2d]; -"137 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/sigmoid_0" [id=137, label="sigmoid_IW480_OW480_#126", style=filled, type=sigmoid]; -"138 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__mul___0" [id=138, label="__mul___IW[480, 480]_OW480_#127", style=filled, type=__mul__]; -"139 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=139, label="conv2d_IW480_OW80_G18_#128", style=filled, type=conv2d]; -"140 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=140, label="batch_norm_IW80_OW80_#129", style=filled, type=batch_norm]; -"141 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__add___0" [id=141, label="__add___IW[80, 80]_OW80_#130", style=filled, type=__add__]; -"142 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=142, label="conv2d_IW80_OW480_G17_#131", style=filled, type=conv2d]; -"143 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=143, label="batch_norm_IW480_OW480_#132", style=filled, type=batch_norm]; -"144 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=144, label="sigmoid_IW480_OW480_#133", style=filled, type=sigmoid]; -"145 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___0" [id=145, label="__mul___IW[480, 480]_OW480_#134", style=filled, type=__mul__]; -"146 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=146, label="__getitem___#146", style=filled, type=__getitem__]; -"147 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=147, label="pad_IW480_OW480_#135", style=filled, type=pad]; -"148 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=148, label="DW_conv2d_IW480_OW480_G17_#136", style=filled, type=conv2d]; -"149 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=149, label="batch_norm_IW480_OW480_#137", style=filled, type=batch_norm]; -"150 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=150, label="sigmoid_IW480_OW480_#138", style=filled, type=sigmoid]; -"151 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___1" [id=151, label="__mul___IW[480, 480]_OW480_#139", style=filled, type=__mul__]; -"152 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/adaptive_avg_pool2d_0" [id=152, label="adaptive_avg_pool2d_IW480_OW480_#140", style=filled, type=adaptive_avg_pool2d]; -"153 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=153, label="conv2d_IW480_OW20_G16_#141", style=filled, type=conv2d]; -"154 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=154, label="sigmoid_IW20_OW20_#142", style=filled, type=sigmoid]; -"155 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___2" [id=155, label="__mul___IW[20, 20]_OW20_#143", style=filled, type=__mul__]; -"156 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=156, label="conv2d_IW20_OW480_G17_#144", style=filled, type=conv2d]; -"157 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/sigmoid_0" [id=157, label="sigmoid_IW480_OW480_#145", style=filled, type=sigmoid]; -"158 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/__mul___0" [id=158, label="__mul___IW[480, 480]_OW480_#146", style=filled, type=__mul__]; -"159 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=159, label="conv2d_IW480_OW80_G18_#147", style=filled, type=conv2d]; -"160 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=160, label="batch_norm_IW80_OW80_#148", style=filled, type=batch_norm]; -"161 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/__add___0" [id=161, label="__add___IW[80, 80]_OW80_#149", style=filled, type=__add__]; -"162 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=162, label="conv2d_IW80_OW480_G20_#150", style=filled, type=conv2d]; -"163 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=163, label="batch_norm_IW480_OW480_#151", style=filled, type=batch_norm]; -"164 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=164, label="sigmoid_IW480_OW480_#152", style=filled, type=sigmoid]; -"165 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___0" [id=165, label="__mul___IW[480, 480]_OW480_#153", style=filled, type=__mul__]; -"166 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=166, label="__getitem___#166", style=filled, type=__getitem__]; -"167 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=167, label="pad_IW480_OW480_#154", style=filled, type=pad]; -"168 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=168, label="DW_conv2d_IW480_OW480_G20_#155", style=filled, type=conv2d]; -"169 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=169, label="batch_norm_IW480_OW480_#156", style=filled, type=batch_norm]; -"170 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=170, label="sigmoid_IW480_OW480_#157", style=filled, type=sigmoid]; -"171 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___1" [id=171, label="__mul___IW[480, 480]_OW480_#158", style=filled, type=__mul__]; -"172 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/adaptive_avg_pool2d_0" [id=172, label="adaptive_avg_pool2d_IW480_OW480_#159", style=filled, type=adaptive_avg_pool2d]; -"173 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=173, label="conv2d_IW480_OW20_G19_#160", style=filled, type=conv2d]; -"174 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=174, label="sigmoid_IW20_OW20_#161", style=filled, type=sigmoid]; -"175 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___2" [id=175, label="__mul___IW[20, 20]_OW20_#162", style=filled, type=__mul__]; -"176 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=176, label="conv2d_IW20_OW480_G20_#163", style=filled, type=conv2d]; -"177 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/sigmoid_0" [id=177, label="sigmoid_IW480_OW480_#164", style=filled, type=sigmoid]; -"178 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/__mul___0" [id=178, label="__mul___IW[480, 480]_OW480_#165", style=filled, type=__mul__]; -"179 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=179, label="conv2d_IW480_OW112_G25_#166", style=filled, type=conv2d]; -"180 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=180, label="batch_norm_IW112_OW112_#167", style=filled, type=batch_norm]; -"181 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=181, label="conv2d_IW112_OW672_G22_#168", style=filled, type=conv2d]; -"182 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=182, label="batch_norm_IW672_OW672_#169", style=filled, type=batch_norm]; -"183 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=183, label="sigmoid_IW672_OW672_#170", style=filled, type=sigmoid]; -"184 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" [id=184, label="__mul___IW[672, 672]_OW672_#171", style=filled, type=__mul__]; -"185 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=185, label="__getitem___#185", style=filled, type=__getitem__]; -"186 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" [id=186, label="linear_#186", style=filled, type=linear]; -"187 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [id=187, label="view_#187", style=filled, type=view]; -"188 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [id=188, label="view_#188", style=filled, type=view]; -"189 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=189, label="pad_IW672_OW672_#172", style=filled, type=pad]; -"190 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=190, label="DW_conv2d_IW672_OW672_G22_#173", style=filled, type=conv2d]; -"191 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=191, label="batch_norm_IW672_OW672_#174", style=filled, type=batch_norm]; -"192 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=192, label="sigmoid_IW672_OW672_#175", style=filled, type=sigmoid]; -"193 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" [id=193, label="__mul___IW[672, 672]_OW672_#176", style=filled, type=__mul__]; -"194 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/adaptive_avg_pool2d_0" [id=194, label="adaptive_avg_pool2d_IW672_OW672_#177", style=filled, type=adaptive_avg_pool2d]; -"195 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=195, label="conv2d_IW672_OW28_G21_#178", style=filled, type=conv2d]; -"196 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=196, label="sigmoid_IW28_OW28_#179", style=filled, type=sigmoid]; -"197 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" [id=197, label="__mul___IW[28, 28]_OW28_#180", style=filled, type=__mul__]; -"198 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=198, label="conv2d_IW28_OW672_G22_#181", style=filled, type=conv2d]; -"199 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/sigmoid_0" [id=199, label="sigmoid_IW672_OW672_#182", style=filled, type=sigmoid]; -"200 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" [id=200, label="__mul___IW[672, 672]_OW672_#183", style=filled, type=__mul__]; -"201 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=201, label="conv2d_IW672_OW112_G25_#184", style=filled, type=conv2d]; -"202 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=202, label="batch_norm_IW112_OW112_#185", style=filled, type=batch_norm]; -"203 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" [id=203, label="__add___IW[112, 112]_OW112_#186", style=filled, type=__add__]; -"204 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=204, label="conv2d_IW112_OW672_G24_#187", style=filled, type=conv2d]; -"205 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=205, label="batch_norm_IW672_OW672_#188", style=filled, type=batch_norm]; -"206 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=206, label="sigmoid_IW672_OW672_#189", style=filled, type=sigmoid]; -"207 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___0" [id=207, label="__mul___IW[672, 672]_OW672_#190", style=filled, type=__mul__]; -"208 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=208, label="__getitem___#208", style=filled, type=__getitem__]; -"209 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=209, label="pad_IW672_OW672_#191", style=filled, type=pad]; -"210 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=210, label="DW_conv2d_IW672_OW672_G24_#192", style=filled, type=conv2d]; -"211 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=211, label="batch_norm_IW672_OW672_#193", style=filled, type=batch_norm]; -"212 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=212, label="sigmoid_IW672_OW672_#194", style=filled, type=sigmoid]; -"213 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___1" [id=213, label="__mul___IW[672, 672]_OW672_#195", style=filled, type=__mul__]; -"214 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/adaptive_avg_pool2d_0" [id=214, label="adaptive_avg_pool2d_IW672_OW672_#196", style=filled, type=adaptive_avg_pool2d]; -"215 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=215, label="conv2d_IW672_OW28_G23_#197", style=filled, type=conv2d]; -"216 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=216, label="sigmoid_IW28_OW28_#198", style=filled, type=sigmoid]; -"217 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___2" [id=217, label="__mul___IW[28, 28]_OW28_#199", style=filled, type=__mul__]; -"218 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=218, label="conv2d_IW28_OW672_G24_#200", style=filled, type=conv2d]; -"219 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/sigmoid_0" [id=219, label="sigmoid_IW672_OW672_#201", style=filled, type=sigmoid]; -"220 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/__mul___0" [id=220, label="__mul___IW[672, 672]_OW672_#202", style=filled, type=__mul__]; -"221 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=221, label="conv2d_IW672_OW112_G25_#203", style=filled, type=conv2d]; -"222 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=222, label="batch_norm_IW112_OW112_#204", style=filled, type=batch_norm]; -"223 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/__add___0" [id=223, label="__add___IW[112, 112]_OW112_#205", style=filled, type=__add__]; -"224 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=224, label="conv2d_IW112_OW672_G27_#206", style=filled, type=conv2d]; -"225 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=225, label="batch_norm_IW672_OW672_#207", style=filled, type=batch_norm]; -"226 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=226, label="sigmoid_IW672_OW672_#208", style=filled, type=sigmoid]; -"227 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" [id=227, label="__mul___IW[672, 672]_OW672_#209", style=filled, type=__mul__]; -"228 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=228, label="__getitem___#228", style=filled, type=__getitem__]; -"229 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=229, label="pad_IW672_OW672_#210", style=filled, type=pad]; -"230 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=230, label="DW_conv2d_IW672_OW672_G27_#211", style=filled, type=conv2d]; -"231 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=231, label="batch_norm_IW672_OW672_#212", style=filled, type=batch_norm]; -"232 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=232, label="sigmoid_IW672_OW672_#213", style=filled, type=sigmoid]; -"233 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" [id=233, label="__mul___IW[672, 672]_OW672_#214", style=filled, type=__mul__]; -"234 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/adaptive_avg_pool2d_0" [id=234, label="adaptive_avg_pool2d_IW672_OW672_#215", style=filled, type=adaptive_avg_pool2d]; -"235 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=235, label="conv2d_IW672_OW28_G26_#216", style=filled, type=conv2d]; -"236 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=236, label="sigmoid_IW28_OW28_#217", style=filled, type=sigmoid]; -"237 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" [id=237, label="__mul___IW[28, 28]_OW28_#218", style=filled, type=__mul__]; -"238 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=238, label="conv2d_IW28_OW672_G27_#219", style=filled, type=conv2d]; -"239 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/sigmoid_0" [id=239, label="sigmoid_IW672_OW672_#220", style=filled, type=sigmoid]; -"240 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" [id=240, label="__mul___IW[672, 672]_OW672_#221", style=filled, type=__mul__]; -"241 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=241, label="conv2d_IW672_OW192_G34_#222", style=filled, type=conv2d]; -"242 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=242, label="batch_norm_IW192_OW192_#223", style=filled, type=batch_norm]; -"243 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=243, label="conv2d_IW192_OW1152_G29_#224", style=filled, type=conv2d]; -"244 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=244, label="batch_norm_IW1152_OW1152_#225", style=filled, type=batch_norm]; -"245 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=245, label="sigmoid_IW1152_OW1152_#226", style=filled, type=sigmoid]; -"246 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" [id=246, label="__mul___IW[1152, 1152]_OW1152_#227", style=filled, type=__mul__]; -"247 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=247, label="__getitem___#247", style=filled, type=__getitem__]; -"248 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=248, label="pad_IW1152_OW1152_#228", style=filled, type=pad]; -"249 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=249, label="DW_conv2d_IW1152_OW1152_G29_#229", style=filled, type=conv2d]; -"250 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=250, label="batch_norm_IW1152_OW1152_#230", style=filled, type=batch_norm]; -"251 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=251, label="sigmoid_IW1152_OW1152_#231", style=filled, type=sigmoid]; -"252 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" [id=252, label="__mul___IW[1152, 1152]_OW1152_#232", style=filled, type=__mul__]; -"253 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/adaptive_avg_pool2d_0" [id=253, label="adaptive_avg_pool2d_IW1152_OW1152_#233", style=filled, type=adaptive_avg_pool2d]; -"254 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=254, label="conv2d_IW1152_OW48_G28_#234", style=filled, type=conv2d]; -"255 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=255, label="sigmoid_IW48_OW48_#235", style=filled, type=sigmoid]; -"256 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" [id=256, label="__mul___IW[48, 48]_OW48_#236", style=filled, type=__mul__]; -"257 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=257, label="conv2d_IW48_OW1152_G29_#237", style=filled, type=conv2d]; -"258 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/sigmoid_0" [id=258, label="sigmoid_IW1152_OW1152_#238", style=filled, type=sigmoid]; -"259 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" [id=259, label="__mul___IW[1152, 1152]_OW1152_#239", style=filled, type=__mul__]; -"260 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=260, label="conv2d_IW1152_OW192_G34_#240", style=filled, type=conv2d]; -"261 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=261, label="batch_norm_IW192_OW192_#241", style=filled, type=batch_norm]; -"262 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" [id=262, label="__add___IW[192, 192]_OW192_#242", style=filled, type=__add__]; -"263 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=263, label="conv2d_IW192_OW1152_G31_#243", style=filled, type=conv2d]; -"264 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=264, label="batch_norm_IW1152_OW1152_#244", style=filled, type=batch_norm]; -"265 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=265, label="sigmoid_IW1152_OW1152_#245", style=filled, type=sigmoid]; -"266 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" [id=266, label="__mul___IW[1152, 1152]_OW1152_#246", style=filled, type=__mul__]; -"267 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=267, label="__getitem___#267", style=filled, type=__getitem__]; -"268 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" [id=268, label="linear_#268", style=filled, type=linear]; -"269 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [id=269, label="view_#269", style=filled, type=view]; -"270 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [id=270, label="view_#270", style=filled, type=view]; -"271 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=271, label="pad_IW1152_OW1152_#247", style=filled, type=pad]; -"272 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=272, label="DW_conv2d_IW1152_OW1152_G31_#248", style=filled, type=conv2d]; -"273 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=273, label="batch_norm_IW1152_OW1152_#249", style=filled, type=batch_norm]; -"274 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=274, label="sigmoid_IW1152_OW1152_#250", style=filled, type=sigmoid]; -"275 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" [id=275, label="__mul___IW[1152, 1152]_OW1152_#251", style=filled, type=__mul__]; -"276 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/adaptive_avg_pool2d_0" [id=276, label="adaptive_avg_pool2d_IW1152_OW1152_#252", style=filled, type=adaptive_avg_pool2d]; -"277 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=277, label="conv2d_IW1152_OW48_G30_#253", style=filled, type=conv2d]; -"278 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=278, label="sigmoid_IW48_OW48_#254", style=filled, type=sigmoid]; -"279 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" [id=279, label="__mul___IW[48, 48]_OW48_#255", style=filled, type=__mul__]; -"280 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=280, label="conv2d_IW48_OW1152_G31_#256", style=filled, type=conv2d]; -"281 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/sigmoid_0" [id=281, label="sigmoid_IW1152_OW1152_#257", style=filled, type=sigmoid]; -"282 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" [id=282, label="__mul___IW[1152, 1152]_OW1152_#258", style=filled, type=__mul__]; -"283 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=283, label="conv2d_IW1152_OW192_G34_#259", style=filled, type=conv2d]; -"284 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=284, label="batch_norm_IW192_OW192_#260", style=filled, type=batch_norm]; -"285 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" [id=285, label="__add___IW[192, 192]_OW192_#261", style=filled, type=__add__]; -"286 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=286, label="conv2d_IW192_OW1152_G33_#262", style=filled, type=conv2d]; -"287 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=287, label="batch_norm_IW1152_OW1152_#263", style=filled, type=batch_norm]; -"288 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=288, label="sigmoid_IW1152_OW1152_#264", style=filled, type=sigmoid]; -"289 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___0" [id=289, label="__mul___IW[1152, 1152]_OW1152_#265", style=filled, type=__mul__]; -"290 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=290, label="__getitem___#290", style=filled, type=__getitem__]; -"291 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=291, label="pad_IW1152_OW1152_#266", style=filled, type=pad]; -"292 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=292, label="DW_conv2d_IW1152_OW1152_G33_#267", style=filled, type=conv2d]; -"293 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=293, label="batch_norm_IW1152_OW1152_#268", style=filled, type=batch_norm]; -"294 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=294, label="sigmoid_IW1152_OW1152_#269", style=filled, type=sigmoid]; -"295 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" [id=295, label="__mul___IW[1152, 1152]_OW1152_#270", style=filled, type=__mul__]; -"296 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/adaptive_avg_pool2d_0" [id=296, label="adaptive_avg_pool2d_IW1152_OW1152_#271", style=filled, type=adaptive_avg_pool2d]; -"297 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=297, label="conv2d_IW1152_OW48_G32_#272", style=filled, type=conv2d]; -"298 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=298, label="sigmoid_IW48_OW48_#273", style=filled, type=sigmoid]; -"299 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___2" [id=299, label="__mul___IW[48, 48]_OW48_#274", style=filled, type=__mul__]; -"300 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=300, label="conv2d_IW48_OW1152_G33_#275", style=filled, type=conv2d]; -"301 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/sigmoid_0" [id=301, label="sigmoid_IW1152_OW1152_#276", style=filled, type=sigmoid]; -"302 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__mul___0" [id=302, label="__mul___IW[1152, 1152]_OW1152_#277", style=filled, type=__mul__]; -"303 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=303, label="conv2d_IW1152_OW192_G34_#278", style=filled, type=conv2d]; -"304 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=304, label="batch_norm_IW192_OW192_#279", style=filled, type=batch_norm]; -"305 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__add___0" [id=305, label="__add___IW[192, 192]_OW192_#280", style=filled, type=__add__]; -"306 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=306, label="conv2d_IW192_OW1152_G36_#281", style=filled, type=conv2d]; -"307 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=307, label="batch_norm_IW1152_OW1152_#282", style=filled, type=batch_norm]; -"308 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=308, label="sigmoid_IW1152_OW1152_#283", style=filled, type=sigmoid]; -"309 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" [id=309, label="__mul___IW[1152, 1152]_OW1152_#284", style=filled, type=__mul__]; -"310 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=310, label="__getitem___#310", style=filled, type=__getitem__]; -"311 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=311, label="pad_IW1152_OW1152_#285", style=filled, type=pad]; -"312 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=312, label="DW_conv2d_IW1152_OW1152_G36_#286", style=filled, type=conv2d]; -"313 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=313, label="batch_norm_IW1152_OW1152_#287", style=filled, type=batch_norm]; -"314 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=314, label="sigmoid_IW1152_OW1152_#288", style=filled, type=sigmoid]; -"315 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___1" [id=315, label="__mul___IW[1152, 1152]_OW1152_#289", style=filled, type=__mul__]; -"316 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/adaptive_avg_pool2d_0" [id=316, label="adaptive_avg_pool2d_IW1152_OW1152_#290", style=filled, type=adaptive_avg_pool2d]; -"317 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=317, label="conv2d_IW1152_OW48_G35_#291", style=filled, type=conv2d]; -"318 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=318, label="sigmoid_IW48_OW48_#292", style=filled, type=sigmoid]; -"319 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___2" [id=319, label="__mul___IW[48, 48]_OW48_#293", style=filled, type=__mul__]; -"320 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=320, label="conv2d_IW48_OW1152_G36_#294", style=filled, type=conv2d]; -"321 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/sigmoid_0" [id=321, label="sigmoid_IW1152_OW1152_#295", style=filled, type=sigmoid]; -"322 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/__mul___0" [id=322, label="__mul___IW[1152, 1152]_OW1152_#296", style=filled, type=__mul__]; -"323 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=323, label="conv2d_IW1152_OW320_G39_#297", style=filled, type=conv2d]; -"324 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=324, label="batch_norm_IW320_OW320_#298", style=filled, type=batch_norm]; -"325 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_head]/conv2d_0" [color=lightblue, id=325, label="conv2d_IW320_OW1280_G37_#299", style=filled, type=conv2d]; -"326 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=326, label="batch_norm_IW1280_OW1280_#300", style=filled, type=batch_norm]; -"327 EfficientNet/MemoryEfficientSwish[_swish]/sigmoid_1" [id=327, label="sigmoid_IW1280_OW1280_#301", style=filled, type=sigmoid]; -"328 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" [id=328, label="__mul___IW[1280, 1280]_OW1280_#302", style=filled, type=__mul__]; -"329 EfficientNet/AdaptiveAvgPool2d[_avg_pooling]/adaptive_avg_pool2d_0" [id=329, label="adaptive_avg_pool2d_IW1280_OW1280_#303", style=filled, type=adaptive_avg_pool2d]; -"330 EfficientNet/flatten_0" [id=330, label="flatten_IW1280_OW1280_#304", style=filled, type=flatten]; -"331 EfficientNet/Dropout[_dropout]/dropout_0" [id=331, label="dropout_IW1280_OW1280_#305", style=filled, type=dropout]; -"332 EfficientNet/NNCFLinear[_fc]/linear_0" [id=332, label="linear_IW1280_#306", style=filled, type=linear]; -"333 /nncf_model_output_0" [id=333, label="nncf_model_output_#307", style=filled, type=nncf_model_output]; +"66 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=66, label="pad_IW144_OW144_#61", style=filled, type=pad]; +"67 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=67, label="DW_conv2d_IW144_OW144_G8_#62", style=filled, type=conv2d]; +"68 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=68, label="batch_norm_IW144_OW144_#63", style=filled, type=batch_norm]; +"69 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=69, label="sigmoid_IW144_OW144_#64", style=filled, type=sigmoid]; +"70 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" [id=70, label="__mul___IW[144, 144]_OW144_#65", style=filled, type=__mul__]; +"71 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/adaptive_avg_pool2d_0" [id=71, label="adaptive_avg_pool2d_IW144_OW144_#66", style=filled, type=adaptive_avg_pool2d]; +"72 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=72, label="conv2d_IW144_OW6_G7_#67", style=filled, type=conv2d]; +"73 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=73, label="sigmoid_IW6_OW6_#68", style=filled, type=sigmoid]; +"74 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___2" [id=74, label="__mul___IW[6, 6]_OW6_#69", style=filled, type=__mul__]; +"75 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=75, label="conv2d_IW6_OW144_G8_#70", style=filled, type=conv2d]; +"76 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/sigmoid_0" [id=76, label="sigmoid_IW144_OW144_#71", style=filled, type=sigmoid]; +"77 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/__mul___0" [id=77, label="__mul___IW[144, 144]_OW144_#72", style=filled, type=__mul__]; +"78 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=78, label="conv2d_IW144_OW40_G11_#73", style=filled, type=conv2d]; +"79 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=79, label="batch_norm_IW40_OW40_#74", style=filled, type=batch_norm]; +"80 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=80, label="conv2d_IW40_OW240_G10_#75", style=filled, type=conv2d]; +"81 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=81, label="batch_norm_IW240_OW240_#76", style=filled, type=batch_norm]; +"82 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=82, label="sigmoid_IW240_OW240_#77", style=filled, type=sigmoid]; +"83 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___0" [id=83, label="__mul___IW[240, 240]_OW240_#78", style=filled, type=__mul__]; +"84 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=84, label="__getitem___#84", style=filled, type=__getitem__]; +"85 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=85, label="pad_IW240_OW240_#79", style=filled, type=pad]; +"86 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=86, label="DW_conv2d_IW240_OW240_G10_#80", style=filled, type=conv2d]; +"87 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=87, label="batch_norm_IW240_OW240_#81", style=filled, type=batch_norm]; +"88 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=88, label="sigmoid_IW240_OW240_#82", style=filled, type=sigmoid]; +"89 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" [id=89, label="__mul___IW[240, 240]_OW240_#83", style=filled, type=__mul__]; +"90 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/adaptive_avg_pool2d_0" [id=90, label="adaptive_avg_pool2d_IW240_OW240_#84", style=filled, type=adaptive_avg_pool2d]; +"91 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=91, label="conv2d_IW240_OW10_G9_#85", style=filled, type=conv2d]; +"92 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=92, label="sigmoid_IW10_OW10_#86", style=filled, type=sigmoid]; +"93 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___2" [id=93, label="__mul___IW[10, 10]_OW10_#87", style=filled, type=__mul__]; +"94 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=94, label="conv2d_IW10_OW240_G10_#88", style=filled, type=conv2d]; +"95 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/sigmoid_0" [id=95, label="sigmoid_IW240_OW240_#89", style=filled, type=sigmoid]; +"96 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__mul___0" [id=96, label="__mul___IW[240, 240]_OW240_#90", style=filled, type=__mul__]; +"97 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=97, label="conv2d_IW240_OW40_G11_#91", style=filled, type=conv2d]; +"98 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=98, label="batch_norm_IW40_OW40_#92", style=filled, type=batch_norm]; +"99 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__add___0" [id=99, label="__add___IW[40, 40]_OW40_#93", style=filled, type=__add__]; +"100 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=100, label="conv2d_IW40_OW240_G13_#94", style=filled, type=conv2d]; +"101 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=101, label="batch_norm_IW240_OW240_#95", style=filled, type=batch_norm]; +"102 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=102, label="sigmoid_IW240_OW240_#96", style=filled, type=sigmoid]; +"103 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___0" [id=103, label="__mul___IW[240, 240]_OW240_#97", style=filled, type=__mul__]; +"104 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=104, label="__getitem___#104", style=filled, type=__getitem__]; +"105 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=105, label="pad_IW240_OW240_#98", style=filled, type=pad]; +"106 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=106, label="DW_conv2d_IW240_OW240_G13_#99", style=filled, type=conv2d]; +"107 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=107, label="batch_norm_IW240_OW240_#100", style=filled, type=batch_norm]; +"108 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=108, label="sigmoid_IW240_OW240_#101", style=filled, type=sigmoid]; +"109 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" [id=109, label="__mul___IW[240, 240]_OW240_#102", style=filled, type=__mul__]; +"110 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/adaptive_avg_pool2d_0" [id=110, label="adaptive_avg_pool2d_IW240_OW240_#103", style=filled, type=adaptive_avg_pool2d]; +"111 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=111, label="conv2d_IW240_OW10_G12_#104", style=filled, type=conv2d]; +"112 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=112, label="sigmoid_IW10_OW10_#105", style=filled, type=sigmoid]; +"113 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___2" [id=113, label="__mul___IW[10, 10]_OW10_#106", style=filled, type=__mul__]; +"114 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=114, label="conv2d_IW10_OW240_G13_#107", style=filled, type=conv2d]; +"115 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/sigmoid_0" [id=115, label="sigmoid_IW240_OW240_#108", style=filled, type=sigmoid]; +"116 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/__mul___0" [id=116, label="__mul___IW[240, 240]_OW240_#109", style=filled, type=__mul__]; +"117 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=117, label="conv2d_IW240_OW80_G18_#110", style=filled, type=conv2d]; +"118 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=118, label="batch_norm_IW80_OW80_#111", style=filled, type=batch_norm]; +"119 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=119, label="conv2d_IW80_OW480_G15_#112", style=filled, type=conv2d]; +"120 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=120, label="batch_norm_IW480_OW480_#113", style=filled, type=batch_norm]; +"121 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=121, label="sigmoid_IW480_OW480_#114", style=filled, type=sigmoid]; +"122 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___0" [id=122, label="__mul___IW[480, 480]_OW480_#115", style=filled, type=__mul__]; +"123 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=123, label="__getitem___#123", style=filled, type=__getitem__]; +"124 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=124, label="pad_IW480_OW480_#116", style=filled, type=pad]; +"125 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=125, label="DW_conv2d_IW480_OW480_G15_#117", style=filled, type=conv2d]; +"126 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=126, label="batch_norm_IW480_OW480_#118", style=filled, type=batch_norm]; +"127 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=127, label="sigmoid_IW480_OW480_#119", style=filled, type=sigmoid]; +"128 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" [id=128, label="__mul___IW[480, 480]_OW480_#120", style=filled, type=__mul__]; +"129 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/adaptive_avg_pool2d_0" [id=129, label="adaptive_avg_pool2d_IW480_OW480_#121", style=filled, type=adaptive_avg_pool2d]; +"130 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=130, label="conv2d_IW480_OW20_G14_#122", style=filled, type=conv2d]; +"131 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=131, label="sigmoid_IW20_OW20_#123", style=filled, type=sigmoid]; +"132 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___2" [id=132, label="__mul___IW[20, 20]_OW20_#124", style=filled, type=__mul__]; +"133 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=133, label="conv2d_IW20_OW480_G15_#125", style=filled, type=conv2d]; +"134 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/sigmoid_0" [id=134, label="sigmoid_IW480_OW480_#126", style=filled, type=sigmoid]; +"135 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__mul___0" [id=135, label="__mul___IW[480, 480]_OW480_#127", style=filled, type=__mul__]; +"136 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=136, label="conv2d_IW480_OW80_G18_#128", style=filled, type=conv2d]; +"137 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=137, label="batch_norm_IW80_OW80_#129", style=filled, type=batch_norm]; +"138 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__add___0" [id=138, label="__add___IW[80, 80]_OW80_#130", style=filled, type=__add__]; +"139 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=139, label="conv2d_IW80_OW480_G17_#131", style=filled, type=conv2d]; +"140 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=140, label="batch_norm_IW480_OW480_#132", style=filled, type=batch_norm]; +"141 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=141, label="sigmoid_IW480_OW480_#133", style=filled, type=sigmoid]; +"142 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___0" [id=142, label="__mul___IW[480, 480]_OW480_#134", style=filled, type=__mul__]; +"143 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=143, label="__getitem___#143", style=filled, type=__getitem__]; +"144 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=144, label="pad_IW480_OW480_#135", style=filled, type=pad]; +"145 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=145, label="DW_conv2d_IW480_OW480_G17_#136", style=filled, type=conv2d]; +"146 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=146, label="batch_norm_IW480_OW480_#137", style=filled, type=batch_norm]; +"147 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=147, label="sigmoid_IW480_OW480_#138", style=filled, type=sigmoid]; +"148 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___1" [id=148, label="__mul___IW[480, 480]_OW480_#139", style=filled, type=__mul__]; +"149 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/adaptive_avg_pool2d_0" [id=149, label="adaptive_avg_pool2d_IW480_OW480_#140", style=filled, type=adaptive_avg_pool2d]; +"150 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=150, label="conv2d_IW480_OW20_G16_#141", style=filled, type=conv2d]; +"151 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=151, label="sigmoid_IW20_OW20_#142", style=filled, type=sigmoid]; +"152 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___2" [id=152, label="__mul___IW[20, 20]_OW20_#143", style=filled, type=__mul__]; +"153 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=153, label="conv2d_IW20_OW480_G17_#144", style=filled, type=conv2d]; +"154 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/sigmoid_0" [id=154, label="sigmoid_IW480_OW480_#145", style=filled, type=sigmoid]; +"155 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/__mul___0" [id=155, label="__mul___IW[480, 480]_OW480_#146", style=filled, type=__mul__]; +"156 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=156, label="conv2d_IW480_OW80_G18_#147", style=filled, type=conv2d]; +"157 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=157, label="batch_norm_IW80_OW80_#148", style=filled, type=batch_norm]; +"158 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/__add___0" [id=158, label="__add___IW[80, 80]_OW80_#149", style=filled, type=__add__]; +"159 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=159, label="conv2d_IW80_OW480_G20_#150", style=filled, type=conv2d]; +"160 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=160, label="batch_norm_IW480_OW480_#151", style=filled, type=batch_norm]; +"161 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=161, label="sigmoid_IW480_OW480_#152", style=filled, type=sigmoid]; +"162 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___0" [id=162, label="__mul___IW[480, 480]_OW480_#153", style=filled, type=__mul__]; +"163 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=163, label="__getitem___#163", style=filled, type=__getitem__]; +"164 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=164, label="pad_IW480_OW480_#154", style=filled, type=pad]; +"165 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=165, label="DW_conv2d_IW480_OW480_G20_#155", style=filled, type=conv2d]; +"166 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=166, label="batch_norm_IW480_OW480_#156", style=filled, type=batch_norm]; +"167 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=167, label="sigmoid_IW480_OW480_#157", style=filled, type=sigmoid]; +"168 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___1" [id=168, label="__mul___IW[480, 480]_OW480_#158", style=filled, type=__mul__]; +"169 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/adaptive_avg_pool2d_0" [id=169, label="adaptive_avg_pool2d_IW480_OW480_#159", style=filled, type=adaptive_avg_pool2d]; +"170 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=170, label="conv2d_IW480_OW20_G19_#160", style=filled, type=conv2d]; +"171 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=171, label="sigmoid_IW20_OW20_#161", style=filled, type=sigmoid]; +"172 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___2" [id=172, label="__mul___IW[20, 20]_OW20_#162", style=filled, type=__mul__]; +"173 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=173, label="conv2d_IW20_OW480_G20_#163", style=filled, type=conv2d]; +"174 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/sigmoid_0" [id=174, label="sigmoid_IW480_OW480_#164", style=filled, type=sigmoid]; +"175 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/__mul___0" [id=175, label="__mul___IW[480, 480]_OW480_#165", style=filled, type=__mul__]; +"176 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=176, label="conv2d_IW480_OW112_G25_#166", style=filled, type=conv2d]; +"177 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=177, label="batch_norm_IW112_OW112_#167", style=filled, type=batch_norm]; +"178 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=178, label="conv2d_IW112_OW672_G22_#168", style=filled, type=conv2d]; +"179 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=179, label="batch_norm_IW672_OW672_#169", style=filled, type=batch_norm]; +"180 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=180, label="sigmoid_IW672_OW672_#170", style=filled, type=sigmoid]; +"181 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" [id=181, label="__mul___IW[672, 672]_OW672_#171", style=filled, type=__mul__]; +"182 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=182, label="__getitem___#182", style=filled, type=__getitem__]; +"183 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=183, label="pad_IW672_OW672_#172", style=filled, type=pad]; +"184 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=184, label="DW_conv2d_IW672_OW672_G22_#173", style=filled, type=conv2d]; +"185 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=185, label="batch_norm_IW672_OW672_#174", style=filled, type=batch_norm]; +"186 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=186, label="sigmoid_IW672_OW672_#175", style=filled, type=sigmoid]; +"187 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" [id=187, label="__mul___IW[672, 672]_OW672_#176", style=filled, type=__mul__]; +"188 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/adaptive_avg_pool2d_0" [id=188, label="adaptive_avg_pool2d_IW672_OW672_#177", style=filled, type=adaptive_avg_pool2d]; +"189 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=189, label="conv2d_IW672_OW28_G21_#178", style=filled, type=conv2d]; +"190 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=190, label="sigmoid_IW28_OW28_#179", style=filled, type=sigmoid]; +"191 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" [id=191, label="__mul___IW[28, 28]_OW28_#180", style=filled, type=__mul__]; +"192 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=192, label="conv2d_IW28_OW672_G22_#181", style=filled, type=conv2d]; +"193 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/sigmoid_0" [id=193, label="sigmoid_IW672_OW672_#182", style=filled, type=sigmoid]; +"194 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" [id=194, label="__mul___IW[672, 672]_OW672_#183", style=filled, type=__mul__]; +"195 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=195, label="conv2d_IW672_OW112_G25_#184", style=filled, type=conv2d]; +"196 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=196, label="batch_norm_IW112_OW112_#185", style=filled, type=batch_norm]; +"197 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" [id=197, label="__add___IW[112, 112]_OW112_#186", style=filled, type=__add__]; +"198 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=198, label="conv2d_IW112_OW672_G24_#187", style=filled, type=conv2d]; +"199 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=199, label="batch_norm_IW672_OW672_#188", style=filled, type=batch_norm]; +"200 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=200, label="sigmoid_IW672_OW672_#189", style=filled, type=sigmoid]; +"201 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___0" [id=201, label="__mul___IW[672, 672]_OW672_#190", style=filled, type=__mul__]; +"202 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=202, label="__getitem___#202", style=filled, type=__getitem__]; +"203 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=203, label="pad_IW672_OW672_#191", style=filled, type=pad]; +"204 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=204, label="DW_conv2d_IW672_OW672_G24_#192", style=filled, type=conv2d]; +"205 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=205, label="batch_norm_IW672_OW672_#193", style=filled, type=batch_norm]; +"206 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=206, label="sigmoid_IW672_OW672_#194", style=filled, type=sigmoid]; +"207 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___1" [id=207, label="__mul___IW[672, 672]_OW672_#195", style=filled, type=__mul__]; +"208 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/adaptive_avg_pool2d_0" [id=208, label="adaptive_avg_pool2d_IW672_OW672_#196", style=filled, type=adaptive_avg_pool2d]; +"209 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=209, label="conv2d_IW672_OW28_G23_#197", style=filled, type=conv2d]; +"210 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=210, label="sigmoid_IW28_OW28_#198", style=filled, type=sigmoid]; +"211 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___2" [id=211, label="__mul___IW[28, 28]_OW28_#199", style=filled, type=__mul__]; +"212 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=212, label="conv2d_IW28_OW672_G24_#200", style=filled, type=conv2d]; +"213 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/sigmoid_0" [id=213, label="sigmoid_IW672_OW672_#201", style=filled, type=sigmoid]; +"214 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/__mul___0" [id=214, label="__mul___IW[672, 672]_OW672_#202", style=filled, type=__mul__]; +"215 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=215, label="conv2d_IW672_OW112_G25_#203", style=filled, type=conv2d]; +"216 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=216, label="batch_norm_IW112_OW112_#204", style=filled, type=batch_norm]; +"217 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/__add___0" [id=217, label="__add___IW[112, 112]_OW112_#205", style=filled, type=__add__]; +"218 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=218, label="conv2d_IW112_OW672_G27_#206", style=filled, type=conv2d]; +"219 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=219, label="batch_norm_IW672_OW672_#207", style=filled, type=batch_norm]; +"220 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=220, label="sigmoid_IW672_OW672_#208", style=filled, type=sigmoid]; +"221 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" [id=221, label="__mul___IW[672, 672]_OW672_#209", style=filled, type=__mul__]; +"222 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=222, label="__getitem___#222", style=filled, type=__getitem__]; +"223 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=223, label="pad_IW672_OW672_#210", style=filled, type=pad]; +"224 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=224, label="DW_conv2d_IW672_OW672_G27_#211", style=filled, type=conv2d]; +"225 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=225, label="batch_norm_IW672_OW672_#212", style=filled, type=batch_norm]; +"226 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=226, label="sigmoid_IW672_OW672_#213", style=filled, type=sigmoid]; +"227 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" [id=227, label="__mul___IW[672, 672]_OW672_#214", style=filled, type=__mul__]; +"228 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/adaptive_avg_pool2d_0" [id=228, label="adaptive_avg_pool2d_IW672_OW672_#215", style=filled, type=adaptive_avg_pool2d]; +"229 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=229, label="conv2d_IW672_OW28_G26_#216", style=filled, type=conv2d]; +"230 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=230, label="sigmoid_IW28_OW28_#217", style=filled, type=sigmoid]; +"231 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" [id=231, label="__mul___IW[28, 28]_OW28_#218", style=filled, type=__mul__]; +"232 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=232, label="conv2d_IW28_OW672_G27_#219", style=filled, type=conv2d]; +"233 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/sigmoid_0" [id=233, label="sigmoid_IW672_OW672_#220", style=filled, type=sigmoid]; +"234 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" [id=234, label="__mul___IW[672, 672]_OW672_#221", style=filled, type=__mul__]; +"235 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=235, label="conv2d_IW672_OW192_G34_#222", style=filled, type=conv2d]; +"236 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=236, label="batch_norm_IW192_OW192_#223", style=filled, type=batch_norm]; +"237 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=237, label="conv2d_IW192_OW1152_G29_#224", style=filled, type=conv2d]; +"238 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=238, label="batch_norm_IW1152_OW1152_#225", style=filled, type=batch_norm]; +"239 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=239, label="sigmoid_IW1152_OW1152_#226", style=filled, type=sigmoid]; +"240 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" [id=240, label="__mul___IW[1152, 1152]_OW1152_#227", style=filled, type=__mul__]; +"241 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=241, label="__getitem___#241", style=filled, type=__getitem__]; +"242 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=242, label="pad_IW1152_OW1152_#228", style=filled, type=pad]; +"243 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=243, label="DW_conv2d_IW1152_OW1152_G29_#229", style=filled, type=conv2d]; +"244 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=244, label="batch_norm_IW1152_OW1152_#230", style=filled, type=batch_norm]; +"245 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=245, label="sigmoid_IW1152_OW1152_#231", style=filled, type=sigmoid]; +"246 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" [id=246, label="__mul___IW[1152, 1152]_OW1152_#232", style=filled, type=__mul__]; +"247 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/adaptive_avg_pool2d_0" [id=247, label="adaptive_avg_pool2d_IW1152_OW1152_#233", style=filled, type=adaptive_avg_pool2d]; +"248 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=248, label="conv2d_IW1152_OW48_G28_#234", style=filled, type=conv2d]; +"249 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=249, label="sigmoid_IW48_OW48_#235", style=filled, type=sigmoid]; +"250 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" [id=250, label="__mul___IW[48, 48]_OW48_#236", style=filled, type=__mul__]; +"251 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=251, label="conv2d_IW48_OW1152_G29_#237", style=filled, type=conv2d]; +"252 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/sigmoid_0" [id=252, label="sigmoid_IW1152_OW1152_#238", style=filled, type=sigmoid]; +"253 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" [id=253, label="__mul___IW[1152, 1152]_OW1152_#239", style=filled, type=__mul__]; +"254 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=254, label="conv2d_IW1152_OW192_G34_#240", style=filled, type=conv2d]; +"255 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=255, label="batch_norm_IW192_OW192_#241", style=filled, type=batch_norm]; +"256 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" [id=256, label="__add___IW[192, 192]_OW192_#242", style=filled, type=__add__]; +"257 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=257, label="conv2d_IW192_OW1152_G31_#243", style=filled, type=conv2d]; +"258 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=258, label="batch_norm_IW1152_OW1152_#244", style=filled, type=batch_norm]; +"259 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=259, label="sigmoid_IW1152_OW1152_#245", style=filled, type=sigmoid]; +"260 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" [id=260, label="__mul___IW[1152, 1152]_OW1152_#246", style=filled, type=__mul__]; +"261 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=261, label="__getitem___#261", style=filled, type=__getitem__]; +"262 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=262, label="pad_IW1152_OW1152_#247", style=filled, type=pad]; +"263 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=263, label="DW_conv2d_IW1152_OW1152_G31_#248", style=filled, type=conv2d]; +"264 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=264, label="batch_norm_IW1152_OW1152_#249", style=filled, type=batch_norm]; +"265 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=265, label="sigmoid_IW1152_OW1152_#250", style=filled, type=sigmoid]; +"266 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" [id=266, label="__mul___IW[1152, 1152]_OW1152_#251", style=filled, type=__mul__]; +"267 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/adaptive_avg_pool2d_0" [id=267, label="adaptive_avg_pool2d_IW1152_OW1152_#252", style=filled, type=adaptive_avg_pool2d]; +"268 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=268, label="conv2d_IW1152_OW48_G30_#253", style=filled, type=conv2d]; +"269 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=269, label="sigmoid_IW48_OW48_#254", style=filled, type=sigmoid]; +"270 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" [id=270, label="__mul___IW[48, 48]_OW48_#255", style=filled, type=__mul__]; +"271 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=271, label="conv2d_IW48_OW1152_G31_#256", style=filled, type=conv2d]; +"272 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/sigmoid_0" [id=272, label="sigmoid_IW1152_OW1152_#257", style=filled, type=sigmoid]; +"273 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" [id=273, label="__mul___IW[1152, 1152]_OW1152_#258", style=filled, type=__mul__]; +"274 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=274, label="conv2d_IW1152_OW192_G34_#259", style=filled, type=conv2d]; +"275 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=275, label="batch_norm_IW192_OW192_#260", style=filled, type=batch_norm]; +"276 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" [id=276, label="__add___IW[192, 192]_OW192_#261", style=filled, type=__add__]; +"277 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=277, label="conv2d_IW192_OW1152_G33_#262", style=filled, type=conv2d]; +"278 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=278, label="batch_norm_IW1152_OW1152_#263", style=filled, type=batch_norm]; +"279 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=279, label="sigmoid_IW1152_OW1152_#264", style=filled, type=sigmoid]; +"280 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___0" [id=280, label="__mul___IW[1152, 1152]_OW1152_#265", style=filled, type=__mul__]; +"281 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=281, label="__getitem___#281", style=filled, type=__getitem__]; +"282 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=282, label="pad_IW1152_OW1152_#266", style=filled, type=pad]; +"283 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=283, label="DW_conv2d_IW1152_OW1152_G33_#267", style=filled, type=conv2d]; +"284 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=284, label="batch_norm_IW1152_OW1152_#268", style=filled, type=batch_norm]; +"285 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=285, label="sigmoid_IW1152_OW1152_#269", style=filled, type=sigmoid]; +"286 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" [id=286, label="__mul___IW[1152, 1152]_OW1152_#270", style=filled, type=__mul__]; +"287 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/adaptive_avg_pool2d_0" [id=287, label="adaptive_avg_pool2d_IW1152_OW1152_#271", style=filled, type=adaptive_avg_pool2d]; +"288 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=288, label="conv2d_IW1152_OW48_G32_#272", style=filled, type=conv2d]; +"289 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=289, label="sigmoid_IW48_OW48_#273", style=filled, type=sigmoid]; +"290 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___2" [id=290, label="__mul___IW[48, 48]_OW48_#274", style=filled, type=__mul__]; +"291 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=291, label="conv2d_IW48_OW1152_G33_#275", style=filled, type=conv2d]; +"292 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/sigmoid_0" [id=292, label="sigmoid_IW1152_OW1152_#276", style=filled, type=sigmoid]; +"293 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__mul___0" [id=293, label="__mul___IW[1152, 1152]_OW1152_#277", style=filled, type=__mul__]; +"294 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=294, label="conv2d_IW1152_OW192_G34_#278", style=filled, type=conv2d]; +"295 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=295, label="batch_norm_IW192_OW192_#279", style=filled, type=batch_norm]; +"296 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__add___0" [id=296, label="__add___IW[192, 192]_OW192_#280", style=filled, type=__add__]; +"297 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=297, label="conv2d_IW192_OW1152_G36_#281", style=filled, type=conv2d]; +"298 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=298, label="batch_norm_IW1152_OW1152_#282", style=filled, type=batch_norm]; +"299 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=299, label="sigmoid_IW1152_OW1152_#283", style=filled, type=sigmoid]; +"300 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" [id=300, label="__mul___IW[1152, 1152]_OW1152_#284", style=filled, type=__mul__]; +"301 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=301, label="__getitem___#301", style=filled, type=__getitem__]; +"302 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=302, label="pad_IW1152_OW1152_#285", style=filled, type=pad]; +"303 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=303, label="DW_conv2d_IW1152_OW1152_G36_#286", style=filled, type=conv2d]; +"304 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=304, label="batch_norm_IW1152_OW1152_#287", style=filled, type=batch_norm]; +"305 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=305, label="sigmoid_IW1152_OW1152_#288", style=filled, type=sigmoid]; +"306 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___1" [id=306, label="__mul___IW[1152, 1152]_OW1152_#289", style=filled, type=__mul__]; +"307 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/adaptive_avg_pool2d_0" [id=307, label="adaptive_avg_pool2d_IW1152_OW1152_#290", style=filled, type=adaptive_avg_pool2d]; +"308 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=308, label="conv2d_IW1152_OW48_G35_#291", style=filled, type=conv2d]; +"309 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=309, label="sigmoid_IW48_OW48_#292", style=filled, type=sigmoid]; +"310 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___2" [id=310, label="__mul___IW[48, 48]_OW48_#293", style=filled, type=__mul__]; +"311 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=311, label="conv2d_IW48_OW1152_G36_#294", style=filled, type=conv2d]; +"312 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/sigmoid_0" [id=312, label="sigmoid_IW1152_OW1152_#295", style=filled, type=sigmoid]; +"313 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/__mul___0" [id=313, label="__mul___IW[1152, 1152]_OW1152_#296", style=filled, type=__mul__]; +"314 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=314, label="conv2d_IW1152_OW320_G39_#297", style=filled, type=conv2d]; +"315 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=315, label="batch_norm_IW320_OW320_#298", style=filled, type=batch_norm]; +"316 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_head]/conv2d_0" [color=lightblue, id=316, label="conv2d_IW320_OW1280_G37_#299", style=filled, type=conv2d]; +"317 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=317, label="batch_norm_IW1280_OW1280_#300", style=filled, type=batch_norm]; +"318 EfficientNet/MemoryEfficientSwish[_swish]/sigmoid_1" [id=318, label="sigmoid_IW1280_OW1280_#301", style=filled, type=sigmoid]; +"319 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" [id=319, label="__mul___IW[1280, 1280]_OW1280_#302", style=filled, type=__mul__]; +"320 EfficientNet/AdaptiveAvgPool2d[_avg_pooling]/adaptive_avg_pool2d_0" [id=320, label="adaptive_avg_pool2d_IW1280_OW1280_#303", style=filled, type=adaptive_avg_pool2d]; +"321 EfficientNet/flatten_0" [id=321, label="flatten_IW1280_OW1280_#304", style=filled, type=flatten]; +"322 EfficientNet/Dropout[_dropout]/dropout_0" [id=322, label="dropout_IW1280_OW1280_#305", style=filled, type=dropout]; +"323 EfficientNet/NNCFLinear[_fc]/linear_0" [id=323, label="linear_IW1280_#306", style=filled, type=linear]; +"324 /nncf_model_output_0" [id=324, label="nncf_model_output_#307", style=filled, type=nncf_model_output]; "0 /nncf_model_input_0" -> "1 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_stem]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 3, 240, 240)", style=solid]; "1 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_stem]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "2 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_stem]/ZeroPad2d[static_padding]/pad_0" [label="(1, 3, 240, 240)", style=solid]; "2 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_stem]/ZeroPad2d[static_padding]/pad_0" -> "3 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_stem]/conv2d_0" [label="(1, 3, 241, 241)", style=solid]; @@ -412,332 +403,323 @@ strict digraph { "62 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "64 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 144, 60, 60)", style=solid]; "63 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "64 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 144, 60, 60)", style=solid]; "64 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___0" -> "65 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 144, 60, 60)", style=solid]; -"65 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "69 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 144, 58, 58)", style=solid]; -"66 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" -> "67 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [label="(144, 9)", style=solid]; -"67 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" -> "68 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [label="(144, 1, 9)", style=solid]; -"68 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" -> "70 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(144, 1, 3, 3)", style=solid]; -"69 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "70 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 144, 61, 61)", style=solid]; -"70 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "71 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 144, 30, 30)", style=solid]; -"71 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "72 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 144, 30, 30)", style=solid]; -"71 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "73 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 144, 30, 30)", style=solid]; -"72 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "73 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 144, 30, 30)", style=solid]; -"73 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" -> "74 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/adaptive_avg_pool2d_0" [label="(1, 144, 30, 30)", style=solid]; -"73 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" -> "80 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/__mul___0" [label="(1, 144, 30, 30)", style=solid]; -"74 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/adaptive_avg_pool2d_0" -> "75 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 144, 1, 1)", style=solid]; -"75 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "76 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 6, 1, 1)", style=solid]; -"75 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "77 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 6, 1, 1)", style=solid]; -"76 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "77 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 6, 1, 1)", style=solid]; -"77 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___2" -> "78 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 6, 1, 1)", style=solid]; -"78 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "79 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/sigmoid_0" [label="(1, 144, 1, 1)", style=solid]; -"79 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/sigmoid_0" -> "80 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/__mul___0" [label="(1, 144, 1, 1)", style=solid]; -"80 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/__mul___0" -> "81 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 144, 30, 30)", style=solid]; -"81 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "82 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 40, 30, 30)", style=solid]; -"82 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "83 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 40, 30, 30)", style=solid]; -"82 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "102 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__add___0" [label="(1, 40, 30, 30)", style=solid]; -"83 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "84 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 240, 30, 30)", style=solid]; -"84 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "85 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 240, 30, 30)", style=solid]; -"84 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "86 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; -"85 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "86 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; -"86 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___0" -> "87 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 240, 30, 30)", style=solid]; -"87 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "88 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 240, 30, 30)", style=solid]; -"88 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "89 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 240, 34, 34)", style=solid]; -"89 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "90 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 240, 30, 30)", style=solid]; -"90 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "91 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 240, 30, 30)", style=solid]; -"90 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "92 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 240, 30, 30)", style=solid]; -"91 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "92 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 240, 30, 30)", style=solid]; -"92 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" -> "93 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/adaptive_avg_pool2d_0" [label="(1, 240, 30, 30)", style=solid]; -"92 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" -> "99 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; -"93 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/adaptive_avg_pool2d_0" -> "94 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 240, 1, 1)", style=solid]; -"94 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "95 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 10, 1, 1)", style=solid]; -"94 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "96 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 10, 1, 1)", style=solid]; -"95 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "96 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 10, 1, 1)", style=solid]; -"96 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___2" -> "97 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 10, 1, 1)", style=solid]; -"97 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "98 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/sigmoid_0" [label="(1, 240, 1, 1)", style=solid]; -"98 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/sigmoid_0" -> "99 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__mul___0" [label="(1, 240, 1, 1)", style=solid]; -"99 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__mul___0" -> "100 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 240, 30, 30)", style=solid]; -"100 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "101 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 40, 30, 30)", style=solid]; -"101 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "102 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__add___0" [label="(1, 40, 30, 30)", style=solid]; -"102 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__add___0" -> "103 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 40, 30, 30)", style=solid]; -"103 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "104 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 240, 30, 30)", style=solid]; -"104 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "105 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 240, 30, 30)", style=solid]; -"104 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "106 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; -"105 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "106 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; -"106 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___0" -> "107 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 240, 30, 30)", style=solid]; -"107 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "108 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 240, 30, 30)", style=solid]; -"108 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "109 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 240, 31, 31)", style=solid]; -"109 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "110 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 240, 15, 15)", style=solid]; -"110 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "111 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 240, 15, 15)", style=solid]; -"110 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "112 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 240, 15, 15)", style=solid]; -"111 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "112 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 240, 15, 15)", style=solid]; -"112 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" -> "113 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/adaptive_avg_pool2d_0" [label="(1, 240, 15, 15)", style=solid]; -"112 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" -> "119 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/__mul___0" [label="(1, 240, 15, 15)", style=solid]; -"113 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/adaptive_avg_pool2d_0" -> "114 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 240, 1, 1)", style=solid]; -"114 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "115 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 10, 1, 1)", style=solid]; -"114 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "116 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 10, 1, 1)", style=solid]; -"115 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "116 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 10, 1, 1)", style=solid]; -"116 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___2" -> "117 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 10, 1, 1)", style=solid]; -"117 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "118 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/sigmoid_0" [label="(1, 240, 1, 1)", style=solid]; -"118 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/sigmoid_0" -> "119 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/__mul___0" [label="(1, 240, 1, 1)", style=solid]; -"119 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/__mul___0" -> "120 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 240, 15, 15)", style=solid]; -"120 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "121 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 80, 15, 15)", style=solid]; -"121 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "122 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 80, 15, 15)", style=solid]; -"121 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "141 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__add___0" [label="(1, 80, 15, 15)", style=solid]; -"122 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "123 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 480, 15, 15)", style=solid]; -"123 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "124 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 480, 15, 15)", style=solid]; -"123 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "125 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; -"124 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "125 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; -"125 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___0" -> "126 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 480, 15, 15)", style=solid]; -"126 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "127 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 480, 15, 15)", style=solid]; -"127 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "128 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 480, 17, 17)", style=solid]; -"128 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "129 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 480, 15, 15)", style=solid]; -"129 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "130 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 480, 15, 15)", style=solid]; -"129 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "131 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 480, 15, 15)", style=solid]; -"130 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "131 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 480, 15, 15)", style=solid]; -"131 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" -> "132 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/adaptive_avg_pool2d_0" [label="(1, 480, 15, 15)", style=solid]; -"131 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" -> "138 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; -"132 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/adaptive_avg_pool2d_0" -> "133 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 480, 1, 1)", style=solid]; -"133 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "134 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 20, 1, 1)", style=solid]; -"133 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "135 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 20, 1, 1)", style=solid]; -"134 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "135 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 20, 1, 1)", style=solid]; -"135 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___2" -> "136 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 20, 1, 1)", style=solid]; -"136 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "137 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/sigmoid_0" [label="(1, 480, 1, 1)", style=solid]; -"137 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/sigmoid_0" -> "138 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__mul___0" [label="(1, 480, 1, 1)", style=solid]; -"138 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__mul___0" -> "139 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 480, 15, 15)", style=solid]; -"139 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "140 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 80, 15, 15)", style=solid]; -"140 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "141 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__add___0" [label="(1, 80, 15, 15)", style=solid]; -"141 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__add___0" -> "142 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 80, 15, 15)", style=solid]; -"141 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__add___0" -> "161 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/__add___0" [label="(1, 80, 15, 15)", style=solid]; -"142 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "143 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 480, 15, 15)", style=solid]; -"143 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "144 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 480, 15, 15)", style=solid]; -"143 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "145 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; -"144 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "145 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; -"145 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___0" -> "146 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 480, 15, 15)", style=solid]; -"146 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "147 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 480, 15, 15)", style=solid]; -"147 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "148 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 480, 17, 17)", style=solid]; -"148 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "149 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 480, 15, 15)", style=solid]; -"149 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "150 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 480, 15, 15)", style=solid]; -"149 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "151 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 480, 15, 15)", style=solid]; -"150 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "151 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 480, 15, 15)", style=solid]; -"151 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___1" -> "152 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/adaptive_avg_pool2d_0" [label="(1, 480, 15, 15)", style=solid]; -"151 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___1" -> "158 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; -"152 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/adaptive_avg_pool2d_0" -> "153 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 480, 1, 1)", style=solid]; -"153 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "154 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 20, 1, 1)", style=solid]; -"153 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "155 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 20, 1, 1)", style=solid]; -"154 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "155 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 20, 1, 1)", style=solid]; -"155 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___2" -> "156 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 20, 1, 1)", style=solid]; -"156 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "157 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/sigmoid_0" [label="(1, 480, 1, 1)", style=solid]; -"157 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/sigmoid_0" -> "158 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/__mul___0" [label="(1, 480, 1, 1)", style=solid]; -"158 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/__mul___0" -> "159 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 480, 15, 15)", style=solid]; -"159 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "160 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 80, 15, 15)", style=solid]; -"160 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "161 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/__add___0" [label="(1, 80, 15, 15)", style=solid]; -"161 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/__add___0" -> "162 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 80, 15, 15)", style=solid]; -"162 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "163 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 480, 15, 15)", style=solid]; -"163 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "164 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 480, 15, 15)", style=solid]; -"163 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "165 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; -"164 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "165 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; -"165 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___0" -> "166 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 480, 15, 15)", style=solid]; -"166 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "167 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 480, 15, 15)", style=solid]; -"167 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "168 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 480, 19, 19)", style=solid]; -"168 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "169 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 480, 15, 15)", style=solid]; -"169 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "170 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 480, 15, 15)", style=solid]; -"169 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "171 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 480, 15, 15)", style=solid]; -"170 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "171 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 480, 15, 15)", style=solid]; -"171 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___1" -> "172 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/adaptive_avg_pool2d_0" [label="(1, 480, 15, 15)", style=solid]; -"171 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___1" -> "178 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; -"172 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/adaptive_avg_pool2d_0" -> "173 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 480, 1, 1)", style=solid]; -"173 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "174 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 20, 1, 1)", style=solid]; -"173 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "175 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 20, 1, 1)", style=solid]; -"174 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "175 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 20, 1, 1)", style=solid]; -"175 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___2" -> "176 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 20, 1, 1)", style=solid]; -"176 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "177 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/sigmoid_0" [label="(1, 480, 1, 1)", style=solid]; -"177 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/sigmoid_0" -> "178 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/__mul___0" [label="(1, 480, 1, 1)", style=solid]; -"178 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/__mul___0" -> "179 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 480, 15, 15)", style=solid]; -"179 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "180 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 112, 15, 15)", style=solid]; -"180 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "181 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 112, 15, 15)", style=solid]; -"180 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "203 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" [label="(1, 112, 15, 15)", style=solid]; -"181 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "182 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; -"182 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "183 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 672, 15, 15)", style=solid]; -"182 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "184 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; -"183 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "184 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; -"184 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" -> "185 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 672, 15, 15)", style=solid]; -"185 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "189 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 672, 13, 13)", style=solid]; -"186 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" -> "187 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [label="(672, 9)", style=solid]; -"187 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" -> "188 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [label="(672, 1, 9)", style=solid]; -"188 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" -> "190 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(672, 1, 3, 3)", style=solid]; -"189 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "190 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 672, 17, 17)", style=solid]; -"190 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "191 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; -"191 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "192 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 672, 15, 15)", style=solid]; -"191 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "193 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 15, 15)", style=solid]; -"192 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "193 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 15, 15)", style=solid]; -"193 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" -> "194 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/adaptive_avg_pool2d_0" [label="(1, 672, 15, 15)", style=solid]; -"193 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" -> "200 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; -"194 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/adaptive_avg_pool2d_0" -> "195 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 672, 1, 1)", style=solid]; -"195 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "196 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 28, 1, 1)", style=solid]; -"195 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "197 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; -"196 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "197 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; -"197 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" -> "198 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 28, 1, 1)", style=solid]; -"198 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "199 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/sigmoid_0" [label="(1, 672, 1, 1)", style=solid]; -"199 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/sigmoid_0" -> "200 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" [label="(1, 672, 1, 1)", style=solid]; -"200 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" -> "201 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 672, 15, 15)", style=solid]; -"201 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "202 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 112, 15, 15)", style=solid]; -"202 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "203 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" [label="(1, 112, 15, 15)", style=solid]; -"203 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" -> "204 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 112, 15, 15)", style=solid]; -"203 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" -> "223 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/__add___0" [label="(1, 112, 15, 15)", style=solid]; -"204 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "205 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; -"205 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "206 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 672, 15, 15)", style=solid]; -"205 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "207 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; -"206 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "207 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; -"207 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___0" -> "208 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 672, 15, 15)", style=solid]; -"208 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "209 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 672, 15, 15)", style=solid]; -"209 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "210 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 672, 19, 19)", style=solid]; -"210 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "211 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; -"211 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "212 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 672, 15, 15)", style=solid]; -"211 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "213 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 15, 15)", style=solid]; -"212 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "213 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 15, 15)", style=solid]; -"213 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___1" -> "214 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/adaptive_avg_pool2d_0" [label="(1, 672, 15, 15)", style=solid]; -"213 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___1" -> "220 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; -"214 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/adaptive_avg_pool2d_0" -> "215 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 672, 1, 1)", style=solid]; -"215 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "216 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 28, 1, 1)", style=solid]; -"215 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "217 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; -"216 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "217 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; -"217 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___2" -> "218 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 28, 1, 1)", style=solid]; -"218 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "219 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/sigmoid_0" [label="(1, 672, 1, 1)", style=solid]; -"219 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/sigmoid_0" -> "220 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/__mul___0" [label="(1, 672, 1, 1)", style=solid]; -"220 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/__mul___0" -> "221 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 672, 15, 15)", style=solid]; -"221 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "222 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 112, 15, 15)", style=solid]; -"222 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "223 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/__add___0" [label="(1, 112, 15, 15)", style=solid]; -"223 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/__add___0" -> "224 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 112, 15, 15)", style=solid]; -"224 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "225 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; -"225 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "226 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 672, 15, 15)", style=solid]; -"225 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "227 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; -"226 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "227 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; -"227 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" -> "228 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 672, 15, 15)", style=solid]; -"228 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "229 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 672, 15, 15)", style=solid]; -"229 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "230 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 672, 18, 18)", style=solid]; -"230 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "231 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 672, 7, 7)", style=solid]; -"231 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "232 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 672, 7, 7)", style=solid]; -"231 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "233 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 7, 7)", style=solid]; -"232 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "233 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 7, 7)", style=solid]; -"233 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" -> "234 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/adaptive_avg_pool2d_0" [label="(1, 672, 7, 7)", style=solid]; -"233 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" -> "240 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" [label="(1, 672, 7, 7)", style=solid]; -"234 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/adaptive_avg_pool2d_0" -> "235 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 672, 1, 1)", style=solid]; -"235 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "236 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 28, 1, 1)", style=solid]; -"235 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "237 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; -"236 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "237 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; -"237 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" -> "238 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 28, 1, 1)", style=solid]; -"238 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "239 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/sigmoid_0" [label="(1, 672, 1, 1)", style=solid]; -"239 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/sigmoid_0" -> "240 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" [label="(1, 672, 1, 1)", style=solid]; -"240 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" -> "241 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 672, 7, 7)", style=solid]; -"241 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "242 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; -"242 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "243 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; -"242 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "262 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" [label="(1, 192, 7, 7)", style=solid]; -"243 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "244 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"244 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "245 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; -"244 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "246 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"245 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "246 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"246 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" -> "247 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 1152, 7, 7)", style=solid]; -"247 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "248 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 1152, 7, 7)", style=solid]; -"248 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "249 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 1152, 11, 11)", style=solid]; -"249 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "250 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"250 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "251 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1152, 7, 7)", style=solid]; -"250 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "252 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; -"251 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "252 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; -"252 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" -> "253 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/adaptive_avg_pool2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"252 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" -> "259 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"253 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/adaptive_avg_pool2d_0" -> "254 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 1152, 1, 1)", style=solid]; -"254 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "255 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 48, 1, 1)", style=solid]; -"254 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "256 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; -"255 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "256 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; -"256 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" -> "257 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 48, 1, 1)", style=solid]; -"257 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "258 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/sigmoid_0" [label="(1, 1152, 1, 1)", style=solid]; -"258 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/sigmoid_0" -> "259 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" [label="(1, 1152, 1, 1)", style=solid]; -"259 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" -> "260 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"260 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "261 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; -"261 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "262 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" [label="(1, 192, 7, 7)", style=solid]; -"262 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" -> "263 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; -"262 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" -> "285 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" [label="(1, 192, 7, 7)", style=solid]; -"263 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "264 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"264 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "265 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; -"264 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "266 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"265 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "266 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"266 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" -> "267 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 1152, 7, 7)", style=solid]; -"267 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "271 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 1152, 5, 5)", style=solid]; -"268 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" -> "269 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [label="(1152, 9)", style=solid]; -"269 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" -> "270 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [label="(1152, 1, 9)", style=solid]; -"270 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" -> "272 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1152, 1, 3, 3)", style=solid]; -"271 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "272 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 1152, 9, 9)", style=solid]; -"272 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "273 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"273 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "274 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1152, 7, 7)", style=solid]; -"273 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "275 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; -"274 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "275 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; -"275 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" -> "276 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/adaptive_avg_pool2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"275 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" -> "282 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"276 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/adaptive_avg_pool2d_0" -> "277 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 1152, 1, 1)", style=solid]; -"277 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "278 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 48, 1, 1)", style=solid]; -"277 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "279 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; -"278 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "279 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; -"279 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" -> "280 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 48, 1, 1)", style=solid]; -"280 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "281 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/sigmoid_0" [label="(1, 1152, 1, 1)", style=solid]; -"281 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/sigmoid_0" -> "282 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" [label="(1, 1152, 1, 1)", style=solid]; -"282 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" -> "283 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"283 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "284 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; -"284 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "285 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" [label="(1, 192, 7, 7)", style=solid]; -"285 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" -> "286 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; -"285 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" -> "305 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__add___0" [label="(1, 192, 7, 7)", style=solid]; -"286 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "287 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"287 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "288 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; -"287 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "289 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"288 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "289 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"289 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___0" -> "290 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 1152, 7, 7)", style=solid]; -"290 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "291 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 1152, 7, 7)", style=solid]; -"291 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "292 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 1152, 11, 11)", style=solid]; -"292 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "293 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"293 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "294 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1152, 7, 7)", style=solid]; -"293 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "295 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; -"294 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "295 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; -"295 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" -> "296 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/adaptive_avg_pool2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"295 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" -> "302 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"296 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/adaptive_avg_pool2d_0" -> "297 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 1152, 1, 1)", style=solid]; -"297 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "298 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 48, 1, 1)", style=solid]; -"297 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "299 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; -"298 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "299 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; -"299 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___2" -> "300 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 48, 1, 1)", style=solid]; -"300 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "301 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/sigmoid_0" [label="(1, 1152, 1, 1)", style=solid]; -"301 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/sigmoid_0" -> "302 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__mul___0" [label="(1, 1152, 1, 1)", style=solid]; -"302 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__mul___0" -> "303 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"303 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "304 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; -"304 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "305 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__add___0" [label="(1, 192, 7, 7)", style=solid]; -"305 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__add___0" -> "306 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; -"306 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "307 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"307 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "308 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; -"307 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "309 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"308 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "309 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"309 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" -> "310 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 1152, 7, 7)", style=solid]; -"310 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "311 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 1152, 7, 7)", style=solid]; -"311 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "312 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 1152, 9, 9)", style=solid]; -"312 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "313 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"313 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "314 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1152, 7, 7)", style=solid]; -"313 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "315 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; -"314 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "315 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; -"315 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___1" -> "316 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/adaptive_avg_pool2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"315 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___1" -> "322 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"316 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/adaptive_avg_pool2d_0" -> "317 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 1152, 1, 1)", style=solid]; -"317 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "318 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 48, 1, 1)", style=solid]; -"317 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "319 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; -"318 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "319 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; -"319 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___2" -> "320 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 48, 1, 1)", style=solid]; -"320 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "321 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/sigmoid_0" [label="(1, 1152, 1, 1)", style=solid]; -"321 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/sigmoid_0" -> "322 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/__mul___0" [label="(1, 1152, 1, 1)", style=solid]; -"322 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/__mul___0" -> "323 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"323 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "324 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 320, 7, 7)", style=solid]; -"324 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "325 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_head]/conv2d_0" [label="(1, 320, 7, 7)", style=solid]; -"325 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_head]/conv2d_0" -> "326 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1280, 7, 7)", style=solid]; -"326 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "327 EfficientNet/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1280, 7, 7)", style=solid]; -"326 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "328 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1280, 7, 7)", style=solid]; -"327 EfficientNet/MemoryEfficientSwish[_swish]/sigmoid_1" -> "328 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1280, 7, 7)", style=solid]; -"328 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" -> "329 EfficientNet/AdaptiveAvgPool2d[_avg_pooling]/adaptive_avg_pool2d_0" [label="(1, 1280, 7, 7)", style=solid]; -"329 EfficientNet/AdaptiveAvgPool2d[_avg_pooling]/adaptive_avg_pool2d_0" -> "330 EfficientNet/flatten_0" [label="(1, 1280, 1, 1)", style=solid]; -"330 EfficientNet/flatten_0" -> "331 EfficientNet/Dropout[_dropout]/dropout_0" [label="(1, 1280)", style=solid]; -"331 EfficientNet/Dropout[_dropout]/dropout_0" -> "332 EfficientNet/NNCFLinear[_fc]/linear_0" [label="(1, 1280)", style=solid]; -"332 EfficientNet/NNCFLinear[_fc]/linear_0" -> "333 /nncf_model_output_0" [label="(1, 1000)", style=solid]; +"65 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "66 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 144, 60, 60)", style=solid]; +"66 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "67 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 144, 63, 63)", style=solid]; +"67 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "68 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 144, 30, 30)", style=solid]; +"68 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "69 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 144, 30, 30)", style=solid]; +"68 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "70 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 144, 30, 30)", style=solid]; +"69 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "70 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 144, 30, 30)", style=solid]; +"70 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" -> "71 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/adaptive_avg_pool2d_0" [label="(1, 144, 30, 30)", style=solid]; +"70 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" -> "77 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/__mul___0" [label="(1, 144, 30, 30)", style=solid]; +"71 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/adaptive_avg_pool2d_0" -> "72 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 144, 1, 1)", style=solid]; +"72 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "73 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 6, 1, 1)", style=solid]; +"72 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "74 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 6, 1, 1)", style=solid]; +"73 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "74 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 6, 1, 1)", style=solid]; +"74 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___2" -> "75 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 6, 1, 1)", style=solid]; +"75 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "76 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/sigmoid_0" [label="(1, 144, 1, 1)", style=solid]; +"76 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/sigmoid_0" -> "77 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/__mul___0" [label="(1, 144, 1, 1)", style=solid]; +"77 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/__mul___0" -> "78 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 144, 30, 30)", style=solid]; +"78 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "79 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 40, 30, 30)", style=solid]; +"79 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "80 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 40, 30, 30)", style=solid]; +"79 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "99 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__add___0" [label="(1, 40, 30, 30)", style=solid]; +"80 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "81 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 240, 30, 30)", style=solid]; +"81 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "82 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 240, 30, 30)", style=solid]; +"81 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "83 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; +"82 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "83 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; +"83 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___0" -> "84 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 240, 30, 30)", style=solid]; +"84 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "85 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 240, 30, 30)", style=solid]; +"85 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "86 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 240, 34, 34)", style=solid]; +"86 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "87 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 240, 30, 30)", style=solid]; +"87 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "88 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 240, 30, 30)", style=solid]; +"87 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "89 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 240, 30, 30)", style=solid]; +"88 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "89 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 240, 30, 30)", style=solid]; +"89 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" -> "90 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/adaptive_avg_pool2d_0" [label="(1, 240, 30, 30)", style=solid]; +"89 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" -> "96 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; +"90 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/adaptive_avg_pool2d_0" -> "91 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 240, 1, 1)", style=solid]; +"91 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "92 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 10, 1, 1)", style=solid]; +"91 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "93 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 10, 1, 1)", style=solid]; +"92 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "93 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 10, 1, 1)", style=solid]; +"93 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___2" -> "94 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 10, 1, 1)", style=solid]; +"94 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "95 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/sigmoid_0" [label="(1, 240, 1, 1)", style=solid]; +"95 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/sigmoid_0" -> "96 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__mul___0" [label="(1, 240, 1, 1)", style=solid]; +"96 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__mul___0" -> "97 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 240, 30, 30)", style=solid]; +"97 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "98 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 40, 30, 30)", style=solid]; +"98 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "99 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__add___0" [label="(1, 40, 30, 30)", style=solid]; +"99 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__add___0" -> "100 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 40, 30, 30)", style=solid]; +"100 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "101 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 240, 30, 30)", style=solid]; +"101 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "102 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 240, 30, 30)", style=solid]; +"101 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "103 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; +"102 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "103 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; +"103 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___0" -> "104 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 240, 30, 30)", style=solid]; +"104 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "105 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 240, 30, 30)", style=solid]; +"105 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "106 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 240, 31, 31)", style=solid]; +"106 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "107 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 240, 15, 15)", style=solid]; +"107 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "108 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 240, 15, 15)", style=solid]; +"107 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "109 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 240, 15, 15)", style=solid]; +"108 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "109 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 240, 15, 15)", style=solid]; +"109 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" -> "110 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/adaptive_avg_pool2d_0" [label="(1, 240, 15, 15)", style=solid]; +"109 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" -> "116 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/__mul___0" [label="(1, 240, 15, 15)", style=solid]; +"110 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/adaptive_avg_pool2d_0" -> "111 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 240, 1, 1)", style=solid]; +"111 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "112 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 10, 1, 1)", style=solid]; +"111 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "113 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 10, 1, 1)", style=solid]; +"112 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "113 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 10, 1, 1)", style=solid]; +"113 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___2" -> "114 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 10, 1, 1)", style=solid]; +"114 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "115 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/sigmoid_0" [label="(1, 240, 1, 1)", style=solid]; +"115 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/sigmoid_0" -> "116 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/__mul___0" [label="(1, 240, 1, 1)", style=solid]; +"116 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/__mul___0" -> "117 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 240, 15, 15)", style=solid]; +"117 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "118 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 80, 15, 15)", style=solid]; +"118 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "119 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 80, 15, 15)", style=solid]; +"118 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "138 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__add___0" [label="(1, 80, 15, 15)", style=solid]; +"119 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "120 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 480, 15, 15)", style=solid]; +"120 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "121 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 480, 15, 15)", style=solid]; +"120 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "122 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; +"121 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "122 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; +"122 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___0" -> "123 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 480, 15, 15)", style=solid]; +"123 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "124 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 480, 15, 15)", style=solid]; +"124 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "125 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 480, 17, 17)", style=solid]; +"125 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "126 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 480, 15, 15)", style=solid]; +"126 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "127 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 480, 15, 15)", style=solid]; +"126 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "128 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 480, 15, 15)", style=solid]; +"127 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "128 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 480, 15, 15)", style=solid]; +"128 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" -> "129 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/adaptive_avg_pool2d_0" [label="(1, 480, 15, 15)", style=solid]; +"128 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" -> "135 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; +"129 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/adaptive_avg_pool2d_0" -> "130 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 480, 1, 1)", style=solid]; +"130 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "131 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 20, 1, 1)", style=solid]; +"130 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "132 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 20, 1, 1)", style=solid]; +"131 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "132 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 20, 1, 1)", style=solid]; +"132 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___2" -> "133 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 20, 1, 1)", style=solid]; +"133 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "134 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/sigmoid_0" [label="(1, 480, 1, 1)", style=solid]; +"134 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/sigmoid_0" -> "135 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__mul___0" [label="(1, 480, 1, 1)", style=solid]; +"135 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__mul___0" -> "136 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 480, 15, 15)", style=solid]; +"136 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "137 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 80, 15, 15)", style=solid]; +"137 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "138 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__add___0" [label="(1, 80, 15, 15)", style=solid]; +"138 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__add___0" -> "139 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 80, 15, 15)", style=solid]; +"138 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__add___0" -> "158 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/__add___0" [label="(1, 80, 15, 15)", style=solid]; +"139 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "140 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 480, 15, 15)", style=solid]; +"140 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "141 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 480, 15, 15)", style=solid]; +"140 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "142 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; +"141 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "142 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; +"142 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___0" -> "143 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 480, 15, 15)", style=solid]; +"143 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "144 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 480, 15, 15)", style=solid]; +"144 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "145 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 480, 17, 17)", style=solid]; +"145 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "146 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 480, 15, 15)", style=solid]; +"146 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "147 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 480, 15, 15)", style=solid]; +"146 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "148 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 480, 15, 15)", style=solid]; +"147 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "148 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 480, 15, 15)", style=solid]; +"148 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___1" -> "149 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/adaptive_avg_pool2d_0" [label="(1, 480, 15, 15)", style=solid]; +"148 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___1" -> "155 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; +"149 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/adaptive_avg_pool2d_0" -> "150 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 480, 1, 1)", style=solid]; +"150 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "151 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 20, 1, 1)", style=solid]; +"150 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "152 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 20, 1, 1)", style=solid]; +"151 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "152 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 20, 1, 1)", style=solid]; +"152 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/MemoryEfficientSwish[_swish]/__mul___2" -> "153 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 20, 1, 1)", style=solid]; +"153 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "154 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/sigmoid_0" [label="(1, 480, 1, 1)", style=solid]; +"154 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/sigmoid_0" -> "155 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/__mul___0" [label="(1, 480, 1, 1)", style=solid]; +"155 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/__mul___0" -> "156 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 480, 15, 15)", style=solid]; +"156 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "157 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 80, 15, 15)", style=solid]; +"157 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "158 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/__add___0" [label="(1, 80, 15, 15)", style=solid]; +"158 EfficientNet/ModuleList[_blocks]/MBConvBlock[7]/__add___0" -> "159 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 80, 15, 15)", style=solid]; +"159 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "160 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 480, 15, 15)", style=solid]; +"160 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "161 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 480, 15, 15)", style=solid]; +"160 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "162 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; +"161 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "162 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; +"162 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___0" -> "163 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 480, 15, 15)", style=solid]; +"163 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "164 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 480, 15, 15)", style=solid]; +"164 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "165 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 480, 19, 19)", style=solid]; +"165 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "166 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 480, 15, 15)", style=solid]; +"166 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "167 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 480, 15, 15)", style=solid]; +"166 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "168 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 480, 15, 15)", style=solid]; +"167 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "168 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 480, 15, 15)", style=solid]; +"168 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___1" -> "169 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/adaptive_avg_pool2d_0" [label="(1, 480, 15, 15)", style=solid]; +"168 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___1" -> "175 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; +"169 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/adaptive_avg_pool2d_0" -> "170 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 480, 1, 1)", style=solid]; +"170 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "171 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 20, 1, 1)", style=solid]; +"170 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "172 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 20, 1, 1)", style=solid]; +"171 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "172 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 20, 1, 1)", style=solid]; +"172 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___2" -> "173 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 20, 1, 1)", style=solid]; +"173 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "174 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/sigmoid_0" [label="(1, 480, 1, 1)", style=solid]; +"174 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/sigmoid_0" -> "175 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/__mul___0" [label="(1, 480, 1, 1)", style=solid]; +"175 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/__mul___0" -> "176 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 480, 15, 15)", style=solid]; +"176 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "177 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 112, 15, 15)", style=solid]; +"177 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "178 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 112, 15, 15)", style=solid]; +"177 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "197 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" [label="(1, 112, 15, 15)", style=solid]; +"178 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "179 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; +"179 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "180 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 672, 15, 15)", style=solid]; +"179 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "181 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; +"180 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "181 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; +"181 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" -> "182 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 672, 15, 15)", style=solid]; +"182 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "183 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 672, 15, 15)", style=solid]; +"183 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "184 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 672, 19, 19)", style=solid]; +"184 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "185 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; +"185 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "186 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 672, 15, 15)", style=solid]; +"185 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "187 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 15, 15)", style=solid]; +"186 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "187 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 15, 15)", style=solid]; +"187 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" -> "188 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/adaptive_avg_pool2d_0" [label="(1, 672, 15, 15)", style=solid]; +"187 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" -> "194 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; +"188 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/adaptive_avg_pool2d_0" -> "189 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 672, 1, 1)", style=solid]; +"189 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "190 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 28, 1, 1)", style=solid]; +"189 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "191 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; +"190 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "191 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; +"191 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" -> "192 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 28, 1, 1)", style=solid]; +"192 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "193 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/sigmoid_0" [label="(1, 672, 1, 1)", style=solid]; +"193 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/sigmoid_0" -> "194 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" [label="(1, 672, 1, 1)", style=solid]; +"194 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" -> "195 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 672, 15, 15)", style=solid]; +"195 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "196 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 112, 15, 15)", style=solid]; +"196 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "197 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" [label="(1, 112, 15, 15)", style=solid]; +"197 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" -> "198 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 112, 15, 15)", style=solid]; +"197 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" -> "217 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/__add___0" [label="(1, 112, 15, 15)", style=solid]; +"198 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "199 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; +"199 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "200 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 672, 15, 15)", style=solid]; +"199 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "201 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; +"200 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "201 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; +"201 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___0" -> "202 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 672, 15, 15)", style=solid]; +"202 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "203 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 672, 15, 15)", style=solid]; +"203 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "204 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 672, 19, 19)", style=solid]; +"204 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "205 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; +"205 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "206 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 672, 15, 15)", style=solid]; +"205 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "207 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 15, 15)", style=solid]; +"206 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "207 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 15, 15)", style=solid]; +"207 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___1" -> "208 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/adaptive_avg_pool2d_0" [label="(1, 672, 15, 15)", style=solid]; +"207 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___1" -> "214 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; +"208 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/adaptive_avg_pool2d_0" -> "209 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 672, 1, 1)", style=solid]; +"209 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "210 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 28, 1, 1)", style=solid]; +"209 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "211 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; +"210 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "211 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; +"211 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/MemoryEfficientSwish[_swish]/__mul___2" -> "212 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 28, 1, 1)", style=solid]; +"212 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "213 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/sigmoid_0" [label="(1, 672, 1, 1)", style=solid]; +"213 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/sigmoid_0" -> "214 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/__mul___0" [label="(1, 672, 1, 1)", style=solid]; +"214 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/__mul___0" -> "215 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 672, 15, 15)", style=solid]; +"215 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "216 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 112, 15, 15)", style=solid]; +"216 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "217 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/__add___0" [label="(1, 112, 15, 15)", style=solid]; +"217 EfficientNet/ModuleList[_blocks]/MBConvBlock[10]/__add___0" -> "218 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 112, 15, 15)", style=solid]; +"218 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "219 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; +"219 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "220 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 672, 15, 15)", style=solid]; +"219 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "221 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; +"220 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "221 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; +"221 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" -> "222 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 672, 15, 15)", style=solid]; +"222 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "223 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 672, 15, 15)", style=solid]; +"223 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "224 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 672, 18, 18)", style=solid]; +"224 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "225 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 672, 7, 7)", style=solid]; +"225 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "226 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 672, 7, 7)", style=solid]; +"225 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "227 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 7, 7)", style=solid]; +"226 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "227 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 7, 7)", style=solid]; +"227 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" -> "228 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/adaptive_avg_pool2d_0" [label="(1, 672, 7, 7)", style=solid]; +"227 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" -> "234 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" [label="(1, 672, 7, 7)", style=solid]; +"228 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/adaptive_avg_pool2d_0" -> "229 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 672, 1, 1)", style=solid]; +"229 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "230 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 28, 1, 1)", style=solid]; +"229 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "231 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; +"230 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "231 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; +"231 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" -> "232 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 28, 1, 1)", style=solid]; +"232 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "233 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/sigmoid_0" [label="(1, 672, 1, 1)", style=solid]; +"233 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/sigmoid_0" -> "234 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" [label="(1, 672, 1, 1)", style=solid]; +"234 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" -> "235 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 672, 7, 7)", style=solid]; +"235 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "236 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; +"236 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "237 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; +"236 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "256 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" [label="(1, 192, 7, 7)", style=solid]; +"237 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "238 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"238 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "239 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; +"238 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "240 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"239 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "240 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"240 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" -> "241 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 1152, 7, 7)", style=solid]; +"241 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "242 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 1152, 7, 7)", style=solid]; +"242 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "243 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 1152, 11, 11)", style=solid]; +"243 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "244 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"244 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "245 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1152, 7, 7)", style=solid]; +"244 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "246 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; +"245 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "246 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; +"246 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" -> "247 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/adaptive_avg_pool2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"246 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" -> "253 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"247 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/adaptive_avg_pool2d_0" -> "248 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 1152, 1, 1)", style=solid]; +"248 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "249 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 48, 1, 1)", style=solid]; +"248 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "250 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; +"249 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "250 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; +"250 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" -> "251 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 48, 1, 1)", style=solid]; +"251 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "252 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/sigmoid_0" [label="(1, 1152, 1, 1)", style=solid]; +"252 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/sigmoid_0" -> "253 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" [label="(1, 1152, 1, 1)", style=solid]; +"253 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" -> "254 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"254 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "255 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; +"255 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "256 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" [label="(1, 192, 7, 7)", style=solid]; +"256 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" -> "257 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; +"256 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" -> "276 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" [label="(1, 192, 7, 7)", style=solid]; +"257 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "258 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"258 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "259 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; +"258 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "260 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"259 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "260 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"260 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" -> "261 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 1152, 7, 7)", style=solid]; +"261 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "262 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 1152, 7, 7)", style=solid]; +"262 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "263 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 1152, 11, 11)", style=solid]; +"263 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "264 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"264 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "265 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1152, 7, 7)", style=solid]; +"264 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "266 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; +"265 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "266 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; +"266 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" -> "267 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/adaptive_avg_pool2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"266 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" -> "273 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"267 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/adaptive_avg_pool2d_0" -> "268 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 1152, 1, 1)", style=solid]; +"268 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "269 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 48, 1, 1)", style=solid]; +"268 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "270 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; +"269 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "270 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; +"270 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" -> "271 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 48, 1, 1)", style=solid]; +"271 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "272 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/sigmoid_0" [label="(1, 1152, 1, 1)", style=solid]; +"272 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/sigmoid_0" -> "273 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" [label="(1, 1152, 1, 1)", style=solid]; +"273 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" -> "274 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"274 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "275 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; +"275 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "276 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" [label="(1, 192, 7, 7)", style=solid]; +"276 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" -> "277 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; +"276 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" -> "296 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__add___0" [label="(1, 192, 7, 7)", style=solid]; +"277 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "278 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"278 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "279 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; +"278 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "280 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"279 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "280 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"280 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___0" -> "281 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 1152, 7, 7)", style=solid]; +"281 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "282 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 1152, 7, 7)", style=solid]; +"282 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "283 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 1152, 11, 11)", style=solid]; +"283 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "284 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"284 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "285 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1152, 7, 7)", style=solid]; +"284 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "286 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; +"285 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "286 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; +"286 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" -> "287 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/adaptive_avg_pool2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"286 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" -> "293 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"287 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/adaptive_avg_pool2d_0" -> "288 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 1152, 1, 1)", style=solid]; +"288 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "289 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 48, 1, 1)", style=solid]; +"288 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "290 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; +"289 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "290 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; +"290 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___2" -> "291 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 48, 1, 1)", style=solid]; +"291 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "292 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/sigmoid_0" [label="(1, 1152, 1, 1)", style=solid]; +"292 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/sigmoid_0" -> "293 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__mul___0" [label="(1, 1152, 1, 1)", style=solid]; +"293 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__mul___0" -> "294 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"294 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "295 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; +"295 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "296 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__add___0" [label="(1, 192, 7, 7)", style=solid]; +"296 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__add___0" -> "297 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; +"297 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "298 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"298 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "299 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; +"298 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "300 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"299 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "300 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"300 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" -> "301 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 1152, 7, 7)", style=solid]; +"301 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "302 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 1152, 7, 7)", style=solid]; +"302 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "303 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 1152, 9, 9)", style=solid]; +"303 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "304 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"304 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "305 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1152, 7, 7)", style=solid]; +"304 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "306 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; +"305 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "306 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; +"306 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___1" -> "307 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/adaptive_avg_pool2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"306 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___1" -> "313 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"307 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/adaptive_avg_pool2d_0" -> "308 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 1152, 1, 1)", style=solid]; +"308 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "309 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 48, 1, 1)", style=solid]; +"308 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "310 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; +"309 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "310 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; +"310 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___2" -> "311 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 48, 1, 1)", style=solid]; +"311 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "312 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/sigmoid_0" [label="(1, 1152, 1, 1)", style=solid]; +"312 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/sigmoid_0" -> "313 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/__mul___0" [label="(1, 1152, 1, 1)", style=solid]; +"313 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/__mul___0" -> "314 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"314 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "315 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 320, 7, 7)", style=solid]; +"315 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "316 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_head]/conv2d_0" [label="(1, 320, 7, 7)", style=solid]; +"316 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_head]/conv2d_0" -> "317 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1280, 7, 7)", style=solid]; +"317 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "318 EfficientNet/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1280, 7, 7)", style=solid]; +"317 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "319 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1280, 7, 7)", style=solid]; +"318 EfficientNet/MemoryEfficientSwish[_swish]/sigmoid_1" -> "319 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1280, 7, 7)", style=solid]; +"319 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" -> "320 EfficientNet/AdaptiveAvgPool2d[_avg_pooling]/adaptive_avg_pool2d_0" [label="(1, 1280, 7, 7)", style=solid]; +"320 EfficientNet/AdaptiveAvgPool2d[_avg_pooling]/adaptive_avg_pool2d_0" -> "321 EfficientNet/flatten_0" [label="(1, 1280, 1, 1)", style=solid]; +"321 EfficientNet/flatten_0" -> "322 EfficientNet/Dropout[_dropout]/dropout_0" [label="(1, 1280)", style=solid]; +"322 EfficientNet/Dropout[_dropout]/dropout_0" -> "323 EfficientNet/NNCFLinear[_fc]/linear_0" [label="(1, 1280)", style=solid]; +"323 EfficientNet/NNCFLinear[_fc]/linear_0" -> "324 /nncf_model_output_0" [label="(1, 1000)", style=solid]; } diff --git a/tests/torch/data/reference_graphs/nas/efficient_net_b0_width.dot b/tests/torch/data/reference_graphs/nas/efficient_net_b0_width.dot index 5426d41ed62..f9096e25429 100644 --- a/tests/torch/data/reference_graphs/nas/efficient_net_b0_width.dot +++ b/tests/torch/data/reference_graphs/nas/efficient_net_b0_width.dot @@ -52,217 +52,199 @@ strict digraph { "50 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=50, label="sigmoid_IW144_OW144_#59", style=filled, type=sigmoid]; "51 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___0" [id=51, label="__mul___IW[144, 144]_OW144_#60", style=filled, type=__mul__]; "52 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=52, label="__getitem___#52", style=filled, type=__getitem__]; -"53 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" [id=53, label="linear_#53", style=filled, type=linear]; -"54 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [id=54, label="view_#54", style=filled, type=view]; -"55 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [id=55, label="view_#55", style=filled, type=view]; -"56 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=56, label="pad_IW144_OW144_#61", style=filled, type=pad]; -"57 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=57, label="DW_conv2d_IW144_OW144_G8_#62", style=filled, type=conv2d]; -"58 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=58, label="batch_norm_IW144_OW144_#63", style=filled, type=batch_norm]; -"59 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=59, label="sigmoid_IW144_OW144_#64", style=filled, type=sigmoid]; -"60 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" [id=60, label="__mul___IW[144, 144]_OW144_#65", style=filled, type=__mul__]; -"61 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/adaptive_avg_pool2d_0" [id=61, label="adaptive_avg_pool2d_IW144_OW144_#66", style=filled, type=adaptive_avg_pool2d]; -"62 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=62, label="conv2d_IW144_OW6_G7_#67", style=filled, type=conv2d]; -"63 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=63, label="sigmoid_IW6_OW6_#68", style=filled, type=sigmoid]; -"64 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___2" [id=64, label="__mul___IW[6, 6]_OW6_#69", style=filled, type=__mul__]; -"65 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=65, label="conv2d_IW6_OW144_G8_#70", style=filled, type=conv2d]; -"66 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/sigmoid_0" [id=66, label="sigmoid_IW144_OW144_#71", style=filled, type=sigmoid]; -"67 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/__mul___0" [id=67, label="__mul___IW[144, 144]_OW144_#72", style=filled, type=__mul__]; -"68 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=68, label="conv2d_IW144_OW40_G11_#73", style=filled, type=conv2d]; -"69 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=69, label="batch_norm_IW40_OW40_#74", style=filled, type=batch_norm]; -"70 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=70, label="conv2d_IW40_OW240_G10_#75", style=filled, type=conv2d]; -"71 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=71, label="batch_norm_IW240_OW240_#76", style=filled, type=batch_norm]; -"72 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=72, label="sigmoid_IW240_OW240_#77", style=filled, type=sigmoid]; -"73 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___0" [id=73, label="__mul___IW[240, 240]_OW240_#78", style=filled, type=__mul__]; -"74 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=74, label="__getitem___#74", style=filled, type=__getitem__]; -"75 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" [id=75, label="linear_#75", style=filled, type=linear]; -"76 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [id=76, label="view_#76", style=filled, type=view]; -"77 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [id=77, label="view_#77", style=filled, type=view]; -"78 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=78, label="pad_IW240_OW240_#79", style=filled, type=pad]; -"79 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=79, label="DW_conv2d_IW240_OW240_G10_#80", style=filled, type=conv2d]; -"80 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=80, label="batch_norm_IW240_OW240_#81", style=filled, type=batch_norm]; -"81 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=81, label="sigmoid_IW240_OW240_#82", style=filled, type=sigmoid]; -"82 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" [id=82, label="__mul___IW[240, 240]_OW240_#83", style=filled, type=__mul__]; -"83 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/adaptive_avg_pool2d_0" [id=83, label="adaptive_avg_pool2d_IW240_OW240_#84", style=filled, type=adaptive_avg_pool2d]; -"84 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=84, label="conv2d_IW240_OW10_G9_#85", style=filled, type=conv2d]; -"85 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=85, label="sigmoid_IW10_OW10_#86", style=filled, type=sigmoid]; -"86 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___2" [id=86, label="__mul___IW[10, 10]_OW10_#87", style=filled, type=__mul__]; -"87 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=87, label="conv2d_IW10_OW240_G10_#88", style=filled, type=conv2d]; -"88 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/sigmoid_0" [id=88, label="sigmoid_IW240_OW240_#89", style=filled, type=sigmoid]; -"89 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__mul___0" [id=89, label="__mul___IW[240, 240]_OW240_#90", style=filled, type=__mul__]; -"90 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=90, label="conv2d_IW240_OW40_G11_#91", style=filled, type=conv2d]; -"91 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=91, label="batch_norm_IW40_OW40_#92", style=filled, type=batch_norm]; -"92 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__add___0" [id=92, label="__add___IW[40, 40]_OW40_#93", style=filled, type=__add__]; -"93 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=93, label="conv2d_IW40_OW240_G13_#94", style=filled, type=conv2d]; -"94 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=94, label="batch_norm_IW240_OW240_#95", style=filled, type=batch_norm]; -"95 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=95, label="sigmoid_IW240_OW240_#96", style=filled, type=sigmoid]; -"96 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___0" [id=96, label="__mul___IW[240, 240]_OW240_#97", style=filled, type=__mul__]; -"97 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=97, label="__getitem___#97", style=filled, type=__getitem__]; -"98 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=98, label="pad_IW240_OW240_#98", style=filled, type=pad]; -"99 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=99, label="DW_conv2d_IW240_OW240_G13_#99", style=filled, type=conv2d]; -"100 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=100, label="batch_norm_IW240_OW240_#100", style=filled, type=batch_norm]; -"101 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=101, label="sigmoid_IW240_OW240_#101", style=filled, type=sigmoid]; -"102 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" [id=102, label="__mul___IW[240, 240]_OW240_#102", style=filled, type=__mul__]; -"103 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/adaptive_avg_pool2d_0" [id=103, label="adaptive_avg_pool2d_IW240_OW240_#103", style=filled, type=adaptive_avg_pool2d]; -"104 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=104, label="conv2d_IW240_OW10_G12_#104", style=filled, type=conv2d]; -"105 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=105, label="sigmoid_IW10_OW10_#105", style=filled, type=sigmoid]; -"106 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___2" [id=106, label="__mul___IW[10, 10]_OW10_#106", style=filled, type=__mul__]; -"107 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=107, label="conv2d_IW10_OW240_G13_#107", style=filled, type=conv2d]; -"108 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/sigmoid_0" [id=108, label="sigmoid_IW240_OW240_#108", style=filled, type=sigmoid]; -"109 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/__mul___0" [id=109, label="__mul___IW[240, 240]_OW240_#109", style=filled, type=__mul__]; -"110 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=110, label="conv2d_IW240_OW80_G18_#110", style=filled, type=conv2d]; -"111 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=111, label="batch_norm_IW80_OW80_#111", style=filled, type=batch_norm]; -"112 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=112, label="conv2d_IW80_OW480_G15_#112", style=filled, type=conv2d]; -"113 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=113, label="batch_norm_IW480_OW480_#113", style=filled, type=batch_norm]; -"114 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=114, label="sigmoid_IW480_OW480_#114", style=filled, type=sigmoid]; -"115 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___0" [id=115, label="__mul___IW[480, 480]_OW480_#115", style=filled, type=__mul__]; -"116 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=116, label="__getitem___#116", style=filled, type=__getitem__]; -"117 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=117, label="pad_IW480_OW480_#116", style=filled, type=pad]; -"118 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=118, label="DW_conv2d_IW480_OW480_G15_#117", style=filled, type=conv2d]; -"119 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=119, label="batch_norm_IW480_OW480_#118", style=filled, type=batch_norm]; -"120 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=120, label="sigmoid_IW480_OW480_#119", style=filled, type=sigmoid]; -"121 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" [id=121, label="__mul___IW[480, 480]_OW480_#120", style=filled, type=__mul__]; -"122 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/adaptive_avg_pool2d_0" [id=122, label="adaptive_avg_pool2d_IW480_OW480_#121", style=filled, type=adaptive_avg_pool2d]; -"123 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=123, label="conv2d_IW480_OW20_G14_#122", style=filled, type=conv2d]; -"124 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=124, label="sigmoid_IW20_OW20_#123", style=filled, type=sigmoid]; -"125 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___2" [id=125, label="__mul___IW[20, 20]_OW20_#124", style=filled, type=__mul__]; -"126 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=126, label="conv2d_IW20_OW480_G15_#125", style=filled, type=conv2d]; -"127 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/sigmoid_0" [id=127, label="sigmoid_IW480_OW480_#126", style=filled, type=sigmoid]; -"128 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__mul___0" [id=128, label="__mul___IW[480, 480]_OW480_#127", style=filled, type=__mul__]; -"129 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=129, label="conv2d_IW480_OW80_G18_#128", style=filled, type=conv2d]; -"130 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=130, label="batch_norm_IW80_OW80_#129", style=filled, type=batch_norm]; -"131 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__add___0" [id=131, label="__add___IW[80, 80]_OW80_#130", style=filled, type=__add__]; -"132 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=132, label="conv2d_IW80_OW480_G20_#150", style=filled, type=conv2d]; -"133 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=133, label="batch_norm_IW480_OW480_#151", style=filled, type=batch_norm]; -"134 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=134, label="sigmoid_IW480_OW480_#152", style=filled, type=sigmoid]; -"135 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___0" [id=135, label="__mul___IW[480, 480]_OW480_#153", style=filled, type=__mul__]; -"136 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=136, label="conv2d_IW480_OW112_G25_#166", style=filled, type=conv2d]; -"137 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=137, label="batch_norm_IW112_OW112_#167", style=filled, type=batch_norm]; -"138 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=138, label="conv2d_IW112_OW672_G22_#168", style=filled, type=conv2d]; -"139 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=139, label="batch_norm_IW672_OW672_#169", style=filled, type=batch_norm]; -"140 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=140, label="sigmoid_IW672_OW672_#170", style=filled, type=sigmoid]; -"141 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" [id=141, label="__mul___IW[672, 672]_OW672_#171", style=filled, type=__mul__]; -"142 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=142, label="__getitem___#142", style=filled, type=__getitem__]; -"143 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" [id=143, label="linear_#143", style=filled, type=linear]; -"144 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [id=144, label="view_#144", style=filled, type=view]; -"145 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [id=145, label="view_#145", style=filled, type=view]; -"146 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=146, label="pad_IW672_OW672_#172", style=filled, type=pad]; -"147 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=147, label="DW_conv2d_IW672_OW672_G22_#173", style=filled, type=conv2d]; -"148 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=148, label="batch_norm_IW672_OW672_#174", style=filled, type=batch_norm]; -"149 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=149, label="sigmoid_IW672_OW672_#175", style=filled, type=sigmoid]; -"150 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" [id=150, label="__mul___IW[672, 672]_OW672_#176", style=filled, type=__mul__]; -"151 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/adaptive_avg_pool2d_0" [id=151, label="adaptive_avg_pool2d_IW672_OW672_#177", style=filled, type=adaptive_avg_pool2d]; -"152 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=152, label="conv2d_IW672_OW28_G21_#178", style=filled, type=conv2d]; -"153 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=153, label="sigmoid_IW28_OW28_#179", style=filled, type=sigmoid]; -"154 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" [id=154, label="__mul___IW[28, 28]_OW28_#180", style=filled, type=__mul__]; -"155 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=155, label="conv2d_IW28_OW672_G22_#181", style=filled, type=conv2d]; -"156 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/sigmoid_0" [id=156, label="sigmoid_IW672_OW672_#182", style=filled, type=sigmoid]; -"157 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" [id=157, label="__mul___IW[672, 672]_OW672_#183", style=filled, type=__mul__]; -"158 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=158, label="conv2d_IW672_OW112_G25_#184", style=filled, type=conv2d]; -"159 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=159, label="batch_norm_IW112_OW112_#185", style=filled, type=batch_norm]; -"160 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" [id=160, label="__add___IW[112, 112]_OW112_#186", style=filled, type=__add__]; -"161 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=161, label="conv2d_IW112_OW672_G27_#206", style=filled, type=conv2d]; -"162 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=162, label="batch_norm_IW672_OW672_#207", style=filled, type=batch_norm]; -"163 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=163, label="sigmoid_IW672_OW672_#208", style=filled, type=sigmoid]; -"164 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" [id=164, label="__mul___IW[672, 672]_OW672_#209", style=filled, type=__mul__]; -"165 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=165, label="__getitem___#165", style=filled, type=__getitem__]; -"166 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" [id=166, label="linear_#166", style=filled, type=linear]; -"167 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [id=167, label="view_#167", style=filled, type=view]; -"168 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [id=168, label="view_#168", style=filled, type=view]; -"169 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=169, label="pad_IW672_OW672_#210", style=filled, type=pad]; -"170 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=170, label="DW_conv2d_IW672_OW672_G27_#211", style=filled, type=conv2d]; -"171 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=171, label="batch_norm_IW672_OW672_#212", style=filled, type=batch_norm]; -"172 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=172, label="sigmoid_IW672_OW672_#213", style=filled, type=sigmoid]; -"173 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" [id=173, label="__mul___IW[672, 672]_OW672_#214", style=filled, type=__mul__]; -"174 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/adaptive_avg_pool2d_0" [id=174, label="adaptive_avg_pool2d_IW672_OW672_#215", style=filled, type=adaptive_avg_pool2d]; -"175 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=175, label="conv2d_IW672_OW28_G26_#216", style=filled, type=conv2d]; -"176 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=176, label="sigmoid_IW28_OW28_#217", style=filled, type=sigmoid]; -"177 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" [id=177, label="__mul___IW[28, 28]_OW28_#218", style=filled, type=__mul__]; -"178 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=178, label="conv2d_IW28_OW672_G27_#219", style=filled, type=conv2d]; -"179 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/sigmoid_0" [id=179, label="sigmoid_IW672_OW672_#220", style=filled, type=sigmoid]; -"180 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" [id=180, label="__mul___IW[672, 672]_OW672_#221", style=filled, type=__mul__]; -"181 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=181, label="conv2d_IW672_OW192_G34_#222", style=filled, type=conv2d]; -"182 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=182, label="batch_norm_IW192_OW192_#223", style=filled, type=batch_norm]; -"183 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=183, label="conv2d_IW192_OW1152_G29_#224", style=filled, type=conv2d]; -"184 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=184, label="batch_norm_IW1152_OW1152_#225", style=filled, type=batch_norm]; -"185 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=185, label="sigmoid_IW1152_OW1152_#226", style=filled, type=sigmoid]; -"186 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" [id=186, label="__mul___IW[1152, 1152]_OW1152_#227", style=filled, type=__mul__]; -"187 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=187, label="__getitem___#187", style=filled, type=__getitem__]; -"188 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" [id=188, label="linear_#188", style=filled, type=linear]; -"189 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [id=189, label="view_#189", style=filled, type=view]; -"190 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [id=190, label="view_#190", style=filled, type=view]; -"191 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=191, label="pad_IW1152_OW1152_#228", style=filled, type=pad]; -"192 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=192, label="DW_conv2d_IW1152_OW1152_G29_#229", style=filled, type=conv2d]; -"193 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=193, label="batch_norm_IW1152_OW1152_#230", style=filled, type=batch_norm]; -"194 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=194, label="sigmoid_IW1152_OW1152_#231", style=filled, type=sigmoid]; -"195 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" [id=195, label="__mul___IW[1152, 1152]_OW1152_#232", style=filled, type=__mul__]; -"196 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/adaptive_avg_pool2d_0" [id=196, label="adaptive_avg_pool2d_IW1152_OW1152_#233", style=filled, type=adaptive_avg_pool2d]; -"197 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=197, label="conv2d_IW1152_OW48_G28_#234", style=filled, type=conv2d]; -"198 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=198, label="sigmoid_IW48_OW48_#235", style=filled, type=sigmoid]; -"199 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" [id=199, label="__mul___IW[48, 48]_OW48_#236", style=filled, type=__mul__]; -"200 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=200, label="conv2d_IW48_OW1152_G29_#237", style=filled, type=conv2d]; -"201 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/sigmoid_0" [id=201, label="sigmoid_IW1152_OW1152_#238", style=filled, type=sigmoid]; -"202 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" [id=202, label="__mul___IW[1152, 1152]_OW1152_#239", style=filled, type=__mul__]; -"203 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=203, label="conv2d_IW1152_OW192_G34_#240", style=filled, type=conv2d]; -"204 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=204, label="batch_norm_IW192_OW192_#241", style=filled, type=batch_norm]; -"205 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" [id=205, label="__add___IW[192, 192]_OW192_#242", style=filled, type=__add__]; -"206 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=206, label="conv2d_IW192_OW1152_G31_#243", style=filled, type=conv2d]; -"207 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=207, label="batch_norm_IW1152_OW1152_#244", style=filled, type=batch_norm]; -"208 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=208, label="sigmoid_IW1152_OW1152_#245", style=filled, type=sigmoid]; -"209 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" [id=209, label="__mul___IW[1152, 1152]_OW1152_#246", style=filled, type=__mul__]; -"210 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=210, label="__getitem___#210", style=filled, type=__getitem__]; -"211 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=211, label="pad_IW1152_OW1152_#247", style=filled, type=pad]; -"212 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=212, label="DW_conv2d_IW1152_OW1152_G31_#248", style=filled, type=conv2d]; -"213 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=213, label="batch_norm_IW1152_OW1152_#249", style=filled, type=batch_norm]; -"214 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=214, label="sigmoid_IW1152_OW1152_#250", style=filled, type=sigmoid]; -"215 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" [id=215, label="__mul___IW[1152, 1152]_OW1152_#251", style=filled, type=__mul__]; -"216 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/adaptive_avg_pool2d_0" [id=216, label="adaptive_avg_pool2d_IW1152_OW1152_#252", style=filled, type=adaptive_avg_pool2d]; -"217 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=217, label="conv2d_IW1152_OW48_G30_#253", style=filled, type=conv2d]; -"218 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=218, label="sigmoid_IW48_OW48_#254", style=filled, type=sigmoid]; -"219 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" [id=219, label="__mul___IW[48, 48]_OW48_#255", style=filled, type=__mul__]; -"220 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=220, label="conv2d_IW48_OW1152_G31_#256", style=filled, type=conv2d]; -"221 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/sigmoid_0" [id=221, label="sigmoid_IW1152_OW1152_#257", style=filled, type=sigmoid]; -"222 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" [id=222, label="__mul___IW[1152, 1152]_OW1152_#258", style=filled, type=__mul__]; -"223 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=223, label="conv2d_IW1152_OW192_G34_#259", style=filled, type=conv2d]; -"224 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=224, label="batch_norm_IW192_OW192_#260", style=filled, type=batch_norm]; -"225 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" [id=225, label="__add___IW[192, 192]_OW192_#261", style=filled, type=__add__]; -"226 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=226, label="conv2d_IW192_OW1152_G33_#262", style=filled, type=conv2d]; -"227 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=227, label="batch_norm_IW1152_OW1152_#263", style=filled, type=batch_norm]; -"228 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=228, label="sigmoid_IW1152_OW1152_#264", style=filled, type=sigmoid]; -"229 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___0" [id=229, label="__mul___IW[1152, 1152]_OW1152_#265", style=filled, type=__mul__]; -"230 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=230, label="__getitem___#230", style=filled, type=__getitem__]; -"231 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" [id=231, label="linear_#231", style=filled, type=linear]; -"232 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [id=232, label="view_#232", style=filled, type=view]; -"233 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [id=233, label="view_#233", style=filled, type=view]; -"234 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=234, label="pad_IW1152_OW1152_#266", style=filled, type=pad]; -"235 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=235, label="DW_conv2d_IW1152_OW1152_G33_#267", style=filled, type=conv2d]; -"236 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=236, label="batch_norm_IW1152_OW1152_#268", style=filled, type=batch_norm]; -"237 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=237, label="sigmoid_IW1152_OW1152_#269", style=filled, type=sigmoid]; -"238 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" [id=238, label="__mul___IW[1152, 1152]_OW1152_#270", style=filled, type=__mul__]; -"239 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/adaptive_avg_pool2d_0" [id=239, label="adaptive_avg_pool2d_IW1152_OW1152_#271", style=filled, type=adaptive_avg_pool2d]; -"240 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=240, label="conv2d_IW1152_OW48_G32_#272", style=filled, type=conv2d]; -"241 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=241, label="sigmoid_IW48_OW48_#273", style=filled, type=sigmoid]; -"242 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___2" [id=242, label="__mul___IW[48, 48]_OW48_#274", style=filled, type=__mul__]; -"243 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=243, label="conv2d_IW48_OW1152_G33_#275", style=filled, type=conv2d]; -"244 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/sigmoid_0" [id=244, label="sigmoid_IW1152_OW1152_#276", style=filled, type=sigmoid]; -"245 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__mul___0" [id=245, label="__mul___IW[1152, 1152]_OW1152_#277", style=filled, type=__mul__]; -"246 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=246, label="conv2d_IW1152_OW192_G34_#278", style=filled, type=conv2d]; -"247 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=247, label="batch_norm_IW192_OW192_#279", style=filled, type=batch_norm]; -"248 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__add___0" [id=248, label="__add___IW[192, 192]_OW192_#280", style=filled, type=__add__]; -"249 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=249, label="conv2d_IW192_OW1152_G36_#281", style=filled, type=conv2d]; -"250 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=250, label="batch_norm_IW1152_OW1152_#282", style=filled, type=batch_norm]; -"251 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=251, label="sigmoid_IW1152_OW1152_#283", style=filled, type=sigmoid]; -"252 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" [id=252, label="__mul___IW[1152, 1152]_OW1152_#284", style=filled, type=__mul__]; -"253 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=253, label="conv2d_IW1152_OW320_G39_#297", style=filled, type=conv2d]; -"254 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=254, label="batch_norm_IW320_OW320_#298", style=filled, type=batch_norm]; -"255 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_head]/conv2d_0" [color=lightblue, id=255, label="conv2d_IW320_OW1280_G37_#299", style=filled, type=conv2d]; -"256 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=256, label="batch_norm_IW1280_OW1280_#300", style=filled, type=batch_norm]; -"257 EfficientNet/MemoryEfficientSwish[_swish]/sigmoid_1" [id=257, label="sigmoid_IW1280_OW1280_#301", style=filled, type=sigmoid]; -"258 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" [id=258, label="__mul___IW[1280, 1280]_OW1280_#302", style=filled, type=__mul__]; -"259 EfficientNet/AdaptiveAvgPool2d[_avg_pooling]/adaptive_avg_pool2d_0" [id=259, label="adaptive_avg_pool2d_IW1280_OW1280_#303", style=filled, type=adaptive_avg_pool2d]; -"260 EfficientNet/flatten_0" [id=260, label="flatten_IW1280_OW1280_#304", style=filled, type=flatten]; -"261 EfficientNet/Dropout[_dropout]/dropout_0" [id=261, label="dropout_IW1280_OW1280_#305", style=filled, type=dropout]; -"262 EfficientNet/NNCFLinear[_fc]/linear_0" [id=262, label="linear_IW1280_#306", style=filled, type=linear]; -"263 /nncf_model_output_0" [id=263, label="nncf_model_output_#307", style=filled, type=nncf_model_output]; +"53 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=53, label="pad_IW144_OW144_#61", style=filled, type=pad]; +"54 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=54, label="DW_conv2d_IW144_OW144_G8_#62", style=filled, type=conv2d]; +"55 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=55, label="batch_norm_IW144_OW144_#63", style=filled, type=batch_norm]; +"56 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=56, label="sigmoid_IW144_OW144_#64", style=filled, type=sigmoid]; +"57 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" [id=57, label="__mul___IW[144, 144]_OW144_#65", style=filled, type=__mul__]; +"58 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/adaptive_avg_pool2d_0" [id=58, label="adaptive_avg_pool2d_IW144_OW144_#66", style=filled, type=adaptive_avg_pool2d]; +"59 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=59, label="conv2d_IW144_OW6_G7_#67", style=filled, type=conv2d]; +"60 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=60, label="sigmoid_IW6_OW6_#68", style=filled, type=sigmoid]; +"61 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___2" [id=61, label="__mul___IW[6, 6]_OW6_#69", style=filled, type=__mul__]; +"62 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=62, label="conv2d_IW6_OW144_G8_#70", style=filled, type=conv2d]; +"63 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/sigmoid_0" [id=63, label="sigmoid_IW144_OW144_#71", style=filled, type=sigmoid]; +"64 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/__mul___0" [id=64, label="__mul___IW[144, 144]_OW144_#72", style=filled, type=__mul__]; +"65 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=65, label="conv2d_IW144_OW40_G11_#73", style=filled, type=conv2d]; +"66 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=66, label="batch_norm_IW40_OW40_#74", style=filled, type=batch_norm]; +"67 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=67, label="conv2d_IW40_OW240_G10_#75", style=filled, type=conv2d]; +"68 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=68, label="batch_norm_IW240_OW240_#76", style=filled, type=batch_norm]; +"69 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=69, label="sigmoid_IW240_OW240_#77", style=filled, type=sigmoid]; +"70 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___0" [id=70, label="__mul___IW[240, 240]_OW240_#78", style=filled, type=__mul__]; +"71 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=71, label="__getitem___#71", style=filled, type=__getitem__]; +"72 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=72, label="pad_IW240_OW240_#79", style=filled, type=pad]; +"73 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=73, label="DW_conv2d_IW240_OW240_G10_#80", style=filled, type=conv2d]; +"74 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=74, label="batch_norm_IW240_OW240_#81", style=filled, type=batch_norm]; +"75 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=75, label="sigmoid_IW240_OW240_#82", style=filled, type=sigmoid]; +"76 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" [id=76, label="__mul___IW[240, 240]_OW240_#83", style=filled, type=__mul__]; +"77 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/adaptive_avg_pool2d_0" [id=77, label="adaptive_avg_pool2d_IW240_OW240_#84", style=filled, type=adaptive_avg_pool2d]; +"78 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=78, label="conv2d_IW240_OW10_G9_#85", style=filled, type=conv2d]; +"79 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=79, label="sigmoid_IW10_OW10_#86", style=filled, type=sigmoid]; +"80 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___2" [id=80, label="__mul___IW[10, 10]_OW10_#87", style=filled, type=__mul__]; +"81 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=81, label="conv2d_IW10_OW240_G10_#88", style=filled, type=conv2d]; +"82 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/sigmoid_0" [id=82, label="sigmoid_IW240_OW240_#89", style=filled, type=sigmoid]; +"83 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__mul___0" [id=83, label="__mul___IW[240, 240]_OW240_#90", style=filled, type=__mul__]; +"84 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=84, label="conv2d_IW240_OW40_G11_#91", style=filled, type=conv2d]; +"85 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=85, label="batch_norm_IW40_OW40_#92", style=filled, type=batch_norm]; +"86 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__add___0" [id=86, label="__add___IW[40, 40]_OW40_#93", style=filled, type=__add__]; +"87 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=87, label="conv2d_IW40_OW240_G13_#94", style=filled, type=conv2d]; +"88 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=88, label="batch_norm_IW240_OW240_#95", style=filled, type=batch_norm]; +"89 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=89, label="sigmoid_IW240_OW240_#96", style=filled, type=sigmoid]; +"90 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___0" [id=90, label="__mul___IW[240, 240]_OW240_#97", style=filled, type=__mul__]; +"91 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=91, label="__getitem___#91", style=filled, type=__getitem__]; +"92 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=92, label="pad_IW240_OW240_#98", style=filled, type=pad]; +"93 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=93, label="DW_conv2d_IW240_OW240_G13_#99", style=filled, type=conv2d]; +"94 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=94, label="batch_norm_IW240_OW240_#100", style=filled, type=batch_norm]; +"95 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=95, label="sigmoid_IW240_OW240_#101", style=filled, type=sigmoid]; +"96 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" [id=96, label="__mul___IW[240, 240]_OW240_#102", style=filled, type=__mul__]; +"97 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/adaptive_avg_pool2d_0" [id=97, label="adaptive_avg_pool2d_IW240_OW240_#103", style=filled, type=adaptive_avg_pool2d]; +"98 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=98, label="conv2d_IW240_OW10_G12_#104", style=filled, type=conv2d]; +"99 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=99, label="sigmoid_IW10_OW10_#105", style=filled, type=sigmoid]; +"100 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___2" [id=100, label="__mul___IW[10, 10]_OW10_#106", style=filled, type=__mul__]; +"101 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=101, label="conv2d_IW10_OW240_G13_#107", style=filled, type=conv2d]; +"102 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/sigmoid_0" [id=102, label="sigmoid_IW240_OW240_#108", style=filled, type=sigmoid]; +"103 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/__mul___0" [id=103, label="__mul___IW[240, 240]_OW240_#109", style=filled, type=__mul__]; +"104 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=104, label="conv2d_IW240_OW80_G18_#110", style=filled, type=conv2d]; +"105 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=105, label="batch_norm_IW80_OW80_#111", style=filled, type=batch_norm]; +"106 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=106, label="conv2d_IW80_OW480_G15_#112", style=filled, type=conv2d]; +"107 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=107, label="batch_norm_IW480_OW480_#113", style=filled, type=batch_norm]; +"108 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=108, label="sigmoid_IW480_OW480_#114", style=filled, type=sigmoid]; +"109 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___0" [id=109, label="__mul___IW[480, 480]_OW480_#115", style=filled, type=__mul__]; +"110 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=110, label="__getitem___#110", style=filled, type=__getitem__]; +"111 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=111, label="pad_IW480_OW480_#116", style=filled, type=pad]; +"112 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=112, label="DW_conv2d_IW480_OW480_G15_#117", style=filled, type=conv2d]; +"113 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=113, label="batch_norm_IW480_OW480_#118", style=filled, type=batch_norm]; +"114 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=114, label="sigmoid_IW480_OW480_#119", style=filled, type=sigmoid]; +"115 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" [id=115, label="__mul___IW[480, 480]_OW480_#120", style=filled, type=__mul__]; +"116 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/adaptive_avg_pool2d_0" [id=116, label="adaptive_avg_pool2d_IW480_OW480_#121", style=filled, type=adaptive_avg_pool2d]; +"117 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=117, label="conv2d_IW480_OW20_G14_#122", style=filled, type=conv2d]; +"118 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=118, label="sigmoid_IW20_OW20_#123", style=filled, type=sigmoid]; +"119 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___2" [id=119, label="__mul___IW[20, 20]_OW20_#124", style=filled, type=__mul__]; +"120 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=120, label="conv2d_IW20_OW480_G15_#125", style=filled, type=conv2d]; +"121 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/sigmoid_0" [id=121, label="sigmoid_IW480_OW480_#126", style=filled, type=sigmoid]; +"122 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__mul___0" [id=122, label="__mul___IW[480, 480]_OW480_#127", style=filled, type=__mul__]; +"123 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=123, label="conv2d_IW480_OW80_G18_#128", style=filled, type=conv2d]; +"124 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=124, label="batch_norm_IW80_OW80_#129", style=filled, type=batch_norm]; +"125 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__add___0" [id=125, label="__add___IW[80, 80]_OW80_#130", style=filled, type=__add__]; +"126 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=126, label="conv2d_IW80_OW480_G20_#150", style=filled, type=conv2d]; +"127 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=127, label="batch_norm_IW480_OW480_#151", style=filled, type=batch_norm]; +"128 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=128, label="sigmoid_IW480_OW480_#152", style=filled, type=sigmoid]; +"129 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___0" [id=129, label="__mul___IW[480, 480]_OW480_#153", style=filled, type=__mul__]; +"130 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=130, label="conv2d_IW480_OW112_G25_#166", style=filled, type=conv2d]; +"131 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=131, label="batch_norm_IW112_OW112_#167", style=filled, type=batch_norm]; +"132 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=132, label="conv2d_IW112_OW672_G22_#168", style=filled, type=conv2d]; +"133 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=133, label="batch_norm_IW672_OW672_#169", style=filled, type=batch_norm]; +"134 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=134, label="sigmoid_IW672_OW672_#170", style=filled, type=sigmoid]; +"135 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" [id=135, label="__mul___IW[672, 672]_OW672_#171", style=filled, type=__mul__]; +"136 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=136, label="__getitem___#136", style=filled, type=__getitem__]; +"137 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=137, label="pad_IW672_OW672_#172", style=filled, type=pad]; +"138 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=138, label="DW_conv2d_IW672_OW672_G22_#173", style=filled, type=conv2d]; +"139 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=139, label="batch_norm_IW672_OW672_#174", style=filled, type=batch_norm]; +"140 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=140, label="sigmoid_IW672_OW672_#175", style=filled, type=sigmoid]; +"141 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" [id=141, label="__mul___IW[672, 672]_OW672_#176", style=filled, type=__mul__]; +"142 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/adaptive_avg_pool2d_0" [id=142, label="adaptive_avg_pool2d_IW672_OW672_#177", style=filled, type=adaptive_avg_pool2d]; +"143 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=143, label="conv2d_IW672_OW28_G21_#178", style=filled, type=conv2d]; +"144 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=144, label="sigmoid_IW28_OW28_#179", style=filled, type=sigmoid]; +"145 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" [id=145, label="__mul___IW[28, 28]_OW28_#180", style=filled, type=__mul__]; +"146 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=146, label="conv2d_IW28_OW672_G22_#181", style=filled, type=conv2d]; +"147 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/sigmoid_0" [id=147, label="sigmoid_IW672_OW672_#182", style=filled, type=sigmoid]; +"148 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" [id=148, label="__mul___IW[672, 672]_OW672_#183", style=filled, type=__mul__]; +"149 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=149, label="conv2d_IW672_OW112_G25_#184", style=filled, type=conv2d]; +"150 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=150, label="batch_norm_IW112_OW112_#185", style=filled, type=batch_norm]; +"151 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" [id=151, label="__add___IW[112, 112]_OW112_#186", style=filled, type=__add__]; +"152 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=152, label="conv2d_IW112_OW672_G27_#206", style=filled, type=conv2d]; +"153 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=153, label="batch_norm_IW672_OW672_#207", style=filled, type=batch_norm]; +"154 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=154, label="sigmoid_IW672_OW672_#208", style=filled, type=sigmoid]; +"155 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" [id=155, label="__mul___IW[672, 672]_OW672_#209", style=filled, type=__mul__]; +"156 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=156, label="__getitem___#156", style=filled, type=__getitem__]; +"157 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=157, label="pad_IW672_OW672_#210", style=filled, type=pad]; +"158 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=158, label="DW_conv2d_IW672_OW672_G27_#211", style=filled, type=conv2d]; +"159 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=159, label="batch_norm_IW672_OW672_#212", style=filled, type=batch_norm]; +"160 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=160, label="sigmoid_IW672_OW672_#213", style=filled, type=sigmoid]; +"161 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" [id=161, label="__mul___IW[672, 672]_OW672_#214", style=filled, type=__mul__]; +"162 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/adaptive_avg_pool2d_0" [id=162, label="adaptive_avg_pool2d_IW672_OW672_#215", style=filled, type=adaptive_avg_pool2d]; +"163 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=163, label="conv2d_IW672_OW28_G26_#216", style=filled, type=conv2d]; +"164 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=164, label="sigmoid_IW28_OW28_#217", style=filled, type=sigmoid]; +"165 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" [id=165, label="__mul___IW[28, 28]_OW28_#218", style=filled, type=__mul__]; +"166 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=166, label="conv2d_IW28_OW672_G27_#219", style=filled, type=conv2d]; +"167 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/sigmoid_0" [id=167, label="sigmoid_IW672_OW672_#220", style=filled, type=sigmoid]; +"168 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" [id=168, label="__mul___IW[672, 672]_OW672_#221", style=filled, type=__mul__]; +"169 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=169, label="conv2d_IW672_OW192_G34_#222", style=filled, type=conv2d]; +"170 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=170, label="batch_norm_IW192_OW192_#223", style=filled, type=batch_norm]; +"171 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=171, label="conv2d_IW192_OW1152_G29_#224", style=filled, type=conv2d]; +"172 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=172, label="batch_norm_IW1152_OW1152_#225", style=filled, type=batch_norm]; +"173 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=173, label="sigmoid_IW1152_OW1152_#226", style=filled, type=sigmoid]; +"174 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" [id=174, label="__mul___IW[1152, 1152]_OW1152_#227", style=filled, type=__mul__]; +"175 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=175, label="__getitem___#175", style=filled, type=__getitem__]; +"176 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=176, label="pad_IW1152_OW1152_#228", style=filled, type=pad]; +"177 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=177, label="DW_conv2d_IW1152_OW1152_G29_#229", style=filled, type=conv2d]; +"178 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=178, label="batch_norm_IW1152_OW1152_#230", style=filled, type=batch_norm]; +"179 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=179, label="sigmoid_IW1152_OW1152_#231", style=filled, type=sigmoid]; +"180 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" [id=180, label="__mul___IW[1152, 1152]_OW1152_#232", style=filled, type=__mul__]; +"181 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/adaptive_avg_pool2d_0" [id=181, label="adaptive_avg_pool2d_IW1152_OW1152_#233", style=filled, type=adaptive_avg_pool2d]; +"182 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=182, label="conv2d_IW1152_OW48_G28_#234", style=filled, type=conv2d]; +"183 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=183, label="sigmoid_IW48_OW48_#235", style=filled, type=sigmoid]; +"184 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" [id=184, label="__mul___IW[48, 48]_OW48_#236", style=filled, type=__mul__]; +"185 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=185, label="conv2d_IW48_OW1152_G29_#237", style=filled, type=conv2d]; +"186 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/sigmoid_0" [id=186, label="sigmoid_IW1152_OW1152_#238", style=filled, type=sigmoid]; +"187 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" [id=187, label="__mul___IW[1152, 1152]_OW1152_#239", style=filled, type=__mul__]; +"188 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=188, label="conv2d_IW1152_OW192_G34_#240", style=filled, type=conv2d]; +"189 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=189, label="batch_norm_IW192_OW192_#241", style=filled, type=batch_norm]; +"190 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" [id=190, label="__add___IW[192, 192]_OW192_#242", style=filled, type=__add__]; +"191 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=191, label="conv2d_IW192_OW1152_G31_#243", style=filled, type=conv2d]; +"192 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=192, label="batch_norm_IW1152_OW1152_#244", style=filled, type=batch_norm]; +"193 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=193, label="sigmoid_IW1152_OW1152_#245", style=filled, type=sigmoid]; +"194 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" [id=194, label="__mul___IW[1152, 1152]_OW1152_#246", style=filled, type=__mul__]; +"195 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=195, label="__getitem___#195", style=filled, type=__getitem__]; +"196 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=196, label="pad_IW1152_OW1152_#247", style=filled, type=pad]; +"197 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=197, label="DW_conv2d_IW1152_OW1152_G31_#248", style=filled, type=conv2d]; +"198 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=198, label="batch_norm_IW1152_OW1152_#249", style=filled, type=batch_norm]; +"199 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=199, label="sigmoid_IW1152_OW1152_#250", style=filled, type=sigmoid]; +"200 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" [id=200, label="__mul___IW[1152, 1152]_OW1152_#251", style=filled, type=__mul__]; +"201 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/adaptive_avg_pool2d_0" [id=201, label="adaptive_avg_pool2d_IW1152_OW1152_#252", style=filled, type=adaptive_avg_pool2d]; +"202 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=202, label="conv2d_IW1152_OW48_G30_#253", style=filled, type=conv2d]; +"203 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=203, label="sigmoid_IW48_OW48_#254", style=filled, type=sigmoid]; +"204 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" [id=204, label="__mul___IW[48, 48]_OW48_#255", style=filled, type=__mul__]; +"205 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=205, label="conv2d_IW48_OW1152_G31_#256", style=filled, type=conv2d]; +"206 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/sigmoid_0" [id=206, label="sigmoid_IW1152_OW1152_#257", style=filled, type=sigmoid]; +"207 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" [id=207, label="__mul___IW[1152, 1152]_OW1152_#258", style=filled, type=__mul__]; +"208 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=208, label="conv2d_IW1152_OW192_G34_#259", style=filled, type=conv2d]; +"209 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=209, label="batch_norm_IW192_OW192_#260", style=filled, type=batch_norm]; +"210 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" [id=210, label="__add___IW[192, 192]_OW192_#261", style=filled, type=__add__]; +"211 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=211, label="conv2d_IW192_OW1152_G33_#262", style=filled, type=conv2d]; +"212 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=212, label="batch_norm_IW1152_OW1152_#263", style=filled, type=batch_norm]; +"213 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=213, label="sigmoid_IW1152_OW1152_#264", style=filled, type=sigmoid]; +"214 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___0" [id=214, label="__mul___IW[1152, 1152]_OW1152_#265", style=filled, type=__mul__]; +"215 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [id=215, label="__getitem___#215", style=filled, type=__getitem__]; +"216 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [id=216, label="pad_IW1152_OW1152_#266", style=filled, type=pad]; +"217 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [color=purple, id=217, label="DW_conv2d_IW1152_OW1152_G33_#267", style=filled, type=conv2d]; +"218 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=218, label="batch_norm_IW1152_OW1152_#268", style=filled, type=batch_norm]; +"219 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_1" [id=219, label="sigmoid_IW1152_OW1152_#269", style=filled, type=sigmoid]; +"220 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" [id=220, label="__mul___IW[1152, 1152]_OW1152_#270", style=filled, type=__mul__]; +"221 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/adaptive_avg_pool2d_0" [id=221, label="adaptive_avg_pool2d_IW1152_OW1152_#271", style=filled, type=adaptive_avg_pool2d]; +"222 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [color=lightblue, id=222, label="conv2d_IW1152_OW48_G32_#272", style=filled, type=conv2d]; +"223 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_2" [id=223, label="sigmoid_IW48_OW48_#273", style=filled, type=sigmoid]; +"224 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___2" [id=224, label="__mul___IW[48, 48]_OW48_#274", style=filled, type=__mul__]; +"225 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [color=lightblue, id=225, label="conv2d_IW48_OW1152_G33_#275", style=filled, type=conv2d]; +"226 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/sigmoid_0" [id=226, label="sigmoid_IW1152_OW1152_#276", style=filled, type=sigmoid]; +"227 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__mul___0" [id=227, label="__mul___IW[1152, 1152]_OW1152_#277", style=filled, type=__mul__]; +"228 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=228, label="conv2d_IW1152_OW192_G34_#278", style=filled, type=conv2d]; +"229 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=229, label="batch_norm_IW192_OW192_#279", style=filled, type=batch_norm]; +"230 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__add___0" [id=230, label="__add___IW[192, 192]_OW192_#280", style=filled, type=__add__]; +"231 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [color=lightblue, id=231, label="conv2d_IW192_OW1152_G36_#281", style=filled, type=conv2d]; +"232 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [id=232, label="batch_norm_IW1152_OW1152_#282", style=filled, type=batch_norm]; +"233 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_0" [id=233, label="sigmoid_IW1152_OW1152_#283", style=filled, type=sigmoid]; +"234 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" [id=234, label="__mul___IW[1152, 1152]_OW1152_#284", style=filled, type=__mul__]; +"235 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [color=lightblue, id=235, label="conv2d_IW1152_OW320_G39_#297", style=filled, type=conv2d]; +"236 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [id=236, label="batch_norm_IW320_OW320_#298", style=filled, type=batch_norm]; +"237 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_head]/conv2d_0" [color=lightblue, id=237, label="conv2d_IW320_OW1280_G37_#299", style=filled, type=conv2d]; +"238 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" [id=238, label="batch_norm_IW1280_OW1280_#300", style=filled, type=batch_norm]; +"239 EfficientNet/MemoryEfficientSwish[_swish]/sigmoid_1" [id=239, label="sigmoid_IW1280_OW1280_#301", style=filled, type=sigmoid]; +"240 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" [id=240, label="__mul___IW[1280, 1280]_OW1280_#302", style=filled, type=__mul__]; +"241 EfficientNet/AdaptiveAvgPool2d[_avg_pooling]/adaptive_avg_pool2d_0" [id=241, label="adaptive_avg_pool2d_IW1280_OW1280_#303", style=filled, type=adaptive_avg_pool2d]; +"242 EfficientNet/flatten_0" [id=242, label="flatten_IW1280_OW1280_#304", style=filled, type=flatten]; +"243 EfficientNet/Dropout[_dropout]/dropout_0" [id=243, label="dropout_IW1280_OW1280_#305", style=filled, type=dropout]; +"244 EfficientNet/NNCFLinear[_fc]/linear_0" [id=244, label="linear_IW1280_#306", style=filled, type=linear]; +"245 /nncf_model_output_0" [id=245, label="nncf_model_output_#307", style=filled, type=nncf_model_output]; "0 /nncf_model_input_0" -> "1 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_stem]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 3, 240, 240)", style=solid]; "1 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_stem]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "2 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_stem]/ZeroPad2d[static_padding]/pad_0" [label="(1, 3, 240, 240)", style=solid]; "2 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_stem]/ZeroPad2d[static_padding]/pad_0" -> "3 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_stem]/conv2d_0" [label="(1, 3, 241, 241)", style=solid]; @@ -326,259 +308,241 @@ strict digraph { "49 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "51 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 144, 60, 60)", style=solid]; "50 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "51 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 144, 60, 60)", style=solid]; "51 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___0" -> "52 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 144, 60, 60)", style=solid]; -"52 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "56 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 144, 58, 58)", style=solid]; -"53 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" -> "54 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [label="(144, 9)", style=solid]; -"54 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" -> "55 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [label="(144, 1, 9)", style=solid]; -"55 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" -> "57 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(144, 1, 3, 3)", style=solid]; -"56 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "57 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 144, 61, 61)", style=solid]; -"57 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "58 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 144, 30, 30)", style=solid]; -"58 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "59 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 144, 30, 30)", style=solid]; -"58 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "60 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 144, 30, 30)", style=solid]; -"59 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "60 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 144, 30, 30)", style=solid]; -"60 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" -> "61 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/adaptive_avg_pool2d_0" [label="(1, 144, 30, 30)", style=solid]; -"60 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" -> "67 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/__mul___0" [label="(1, 144, 30, 30)", style=solid]; -"61 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/adaptive_avg_pool2d_0" -> "62 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 144, 1, 1)", style=solid]; -"62 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "63 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 6, 1, 1)", style=solid]; -"62 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "64 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 6, 1, 1)", style=solid]; -"63 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "64 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 6, 1, 1)", style=solid]; -"64 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___2" -> "65 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 6, 1, 1)", style=solid]; -"65 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "66 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/sigmoid_0" [label="(1, 144, 1, 1)", style=solid]; -"66 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/sigmoid_0" -> "67 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/__mul___0" [label="(1, 144, 1, 1)", style=solid]; -"67 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/__mul___0" -> "68 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 144, 30, 30)", style=solid]; -"68 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "69 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 40, 30, 30)", style=solid]; -"69 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "70 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 40, 30, 30)", style=solid]; -"69 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "92 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__add___0" [label="(1, 40, 30, 30)", style=solid]; -"70 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "71 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 240, 30, 30)", style=solid]; -"71 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "72 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 240, 30, 30)", style=solid]; -"71 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "73 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; -"72 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "73 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; -"73 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___0" -> "74 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 240, 30, 30)", style=solid]; -"74 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "78 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 240, 28, 28)", style=solid]; -"75 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" -> "76 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [label="(240, 9)", style=solid]; -"76 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" -> "77 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [label="(240, 1, 9)", style=solid]; -"77 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" -> "79 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(240, 1, 3, 3)", style=solid]; -"78 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "79 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 240, 32, 32)", style=solid]; -"79 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "80 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 240, 30, 30)", style=solid]; -"80 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "81 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 240, 30, 30)", style=solid]; -"80 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "82 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 240, 30, 30)", style=solid]; -"81 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "82 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 240, 30, 30)", style=solid]; -"82 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" -> "83 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/adaptive_avg_pool2d_0" [label="(1, 240, 30, 30)", style=solid]; -"82 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" -> "89 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; -"83 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/adaptive_avg_pool2d_0" -> "84 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 240, 1, 1)", style=solid]; -"84 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "85 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 10, 1, 1)", style=solid]; -"84 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "86 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 10, 1, 1)", style=solid]; -"85 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "86 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 10, 1, 1)", style=solid]; -"86 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___2" -> "87 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 10, 1, 1)", style=solid]; -"87 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "88 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/sigmoid_0" [label="(1, 240, 1, 1)", style=solid]; -"88 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/sigmoid_0" -> "89 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__mul___0" [label="(1, 240, 1, 1)", style=solid]; -"89 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__mul___0" -> "90 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 240, 30, 30)", style=solid]; -"90 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "91 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 40, 30, 30)", style=solid]; -"91 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "92 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__add___0" [label="(1, 40, 30, 30)", style=solid]; -"92 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__add___0" -> "93 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 40, 30, 30)", style=solid]; -"93 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "94 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 240, 30, 30)", style=solid]; -"94 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "95 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 240, 30, 30)", style=solid]; -"94 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "96 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; -"95 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "96 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; -"96 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___0" -> "97 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 240, 30, 30)", style=solid]; -"97 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "98 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 240, 30, 30)", style=solid]; -"98 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "99 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 240, 31, 31)", style=solid]; -"99 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "100 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 240, 15, 15)", style=solid]; -"100 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "101 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 240, 15, 15)", style=solid]; -"100 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "102 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 240, 15, 15)", style=solid]; -"101 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "102 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 240, 15, 15)", style=solid]; -"102 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" -> "103 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/adaptive_avg_pool2d_0" [label="(1, 240, 15, 15)", style=solid]; -"102 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" -> "109 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/__mul___0" [label="(1, 240, 15, 15)", style=solid]; -"103 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/adaptive_avg_pool2d_0" -> "104 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 240, 1, 1)", style=solid]; -"104 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "105 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 10, 1, 1)", style=solid]; -"104 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "106 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 10, 1, 1)", style=solid]; -"105 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "106 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 10, 1, 1)", style=solid]; -"106 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___2" -> "107 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 10, 1, 1)", style=solid]; -"107 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "108 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/sigmoid_0" [label="(1, 240, 1, 1)", style=solid]; -"108 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/sigmoid_0" -> "109 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/__mul___0" [label="(1, 240, 1, 1)", style=solid]; -"109 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/__mul___0" -> "110 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 240, 15, 15)", style=solid]; -"110 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "111 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 80, 15, 15)", style=solid]; -"111 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "112 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 80, 15, 15)", style=solid]; -"111 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "131 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__add___0" [label="(1, 80, 15, 15)", style=solid]; -"112 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "113 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 480, 15, 15)", style=solid]; -"113 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "114 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 480, 15, 15)", style=solid]; -"113 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "115 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; -"114 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "115 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; -"115 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___0" -> "116 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 480, 15, 15)", style=solid]; -"116 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "117 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 480, 15, 15)", style=solid]; -"117 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "118 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 480, 17, 17)", style=solid]; -"118 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "119 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 480, 15, 15)", style=solid]; -"119 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "120 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 480, 15, 15)", style=solid]; -"119 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "121 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 480, 15, 15)", style=solid]; -"120 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "121 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 480, 15, 15)", style=solid]; -"121 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" -> "122 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/adaptive_avg_pool2d_0" [label="(1, 480, 15, 15)", style=solid]; -"121 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" -> "128 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; -"122 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/adaptive_avg_pool2d_0" -> "123 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 480, 1, 1)", style=solid]; -"123 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "124 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 20, 1, 1)", style=solid]; -"123 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "125 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 20, 1, 1)", style=solid]; -"124 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "125 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 20, 1, 1)", style=solid]; -"125 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___2" -> "126 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 20, 1, 1)", style=solid]; -"126 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "127 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/sigmoid_0" [label="(1, 480, 1, 1)", style=solid]; -"127 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/sigmoid_0" -> "128 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__mul___0" [label="(1, 480, 1, 1)", style=solid]; -"128 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__mul___0" -> "129 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 480, 15, 15)", style=solid]; -"129 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "130 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 80, 15, 15)", style=solid]; -"130 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "131 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__add___0" [label="(1, 80, 15, 15)", style=solid]; -"131 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__add___0" -> "132 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 80, 15, 15)", style=solid]; -"132 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "133 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 480, 15, 15)", style=solid]; -"133 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "134 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 480, 15, 15)", style=solid]; -"133 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "135 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; -"134 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "135 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; -"135 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___0" -> "136 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 480, 15, 15)", style=solid]; -"136 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "137 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 112, 15, 15)", style=solid]; -"137 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "138 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 112, 15, 15)", style=solid]; -"137 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "160 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" [label="(1, 112, 15, 15)", style=solid]; -"138 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "139 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; -"139 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "140 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 672, 15, 15)", style=solid]; -"139 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "141 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; -"140 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "141 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; -"141 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" -> "142 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 672, 15, 15)", style=solid]; -"142 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "146 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 672, 13, 13)", style=solid]; -"143 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" -> "144 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [label="(672, 9)", style=solid]; -"144 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" -> "145 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [label="(672, 1, 9)", style=solid]; -"145 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" -> "147 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(672, 1, 3, 3)", style=solid]; -"146 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "147 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 672, 17, 17)", style=solid]; -"147 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "148 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; -"148 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "149 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 672, 15, 15)", style=solid]; -"148 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "150 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 15, 15)", style=solid]; -"149 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "150 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 15, 15)", style=solid]; -"150 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" -> "151 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/adaptive_avg_pool2d_0" [label="(1, 672, 15, 15)", style=solid]; -"150 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" -> "157 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; -"151 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/adaptive_avg_pool2d_0" -> "152 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 672, 1, 1)", style=solid]; -"152 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "153 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 28, 1, 1)", style=solid]; -"152 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "154 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; -"153 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "154 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; -"154 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" -> "155 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 28, 1, 1)", style=solid]; -"155 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "156 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/sigmoid_0" [label="(1, 672, 1, 1)", style=solid]; -"156 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/sigmoid_0" -> "157 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" [label="(1, 672, 1, 1)", style=solid]; -"157 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" -> "158 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 672, 15, 15)", style=solid]; -"158 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "159 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 112, 15, 15)", style=solid]; -"159 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "160 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" [label="(1, 112, 15, 15)", style=solid]; -"160 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" -> "161 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 112, 15, 15)", style=solid]; -"161 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "162 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; -"162 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "163 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 672, 15, 15)", style=solid]; -"162 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "164 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; -"163 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "164 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; -"164 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" -> "165 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 672, 15, 15)", style=solid]; -"165 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "169 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 672, 13, 13)", style=solid]; -"166 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" -> "167 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [label="(672, 9)", style=solid]; -"167 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" -> "168 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [label="(672, 1, 9)", style=solid]; -"168 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" -> "170 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(672, 1, 3, 3)", style=solid]; -"169 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "170 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 672, 16, 16)", style=solid]; -"170 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "171 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 672, 7, 7)", style=solid]; -"171 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "172 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 672, 7, 7)", style=solid]; -"171 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "173 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 7, 7)", style=solid]; -"172 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "173 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 7, 7)", style=solid]; -"173 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" -> "174 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/adaptive_avg_pool2d_0" [label="(1, 672, 7, 7)", style=solid]; -"173 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" -> "180 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" [label="(1, 672, 7, 7)", style=solid]; -"174 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/adaptive_avg_pool2d_0" -> "175 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 672, 1, 1)", style=solid]; -"175 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "176 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 28, 1, 1)", style=solid]; -"175 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "177 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; -"176 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "177 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; -"177 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" -> "178 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 28, 1, 1)", style=solid]; -"178 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "179 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/sigmoid_0" [label="(1, 672, 1, 1)", style=solid]; -"179 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/sigmoid_0" -> "180 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" [label="(1, 672, 1, 1)", style=solid]; -"180 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" -> "181 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 672, 7, 7)", style=solid]; -"181 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "182 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; -"182 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "183 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; -"182 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "205 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" [label="(1, 192, 7, 7)", style=solid]; -"183 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "184 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"184 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "185 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; -"184 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "186 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"185 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "186 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"186 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" -> "187 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 1152, 7, 7)", style=solid]; -"187 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "191 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 1152, 5, 5)", style=solid]; -"188 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" -> "189 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [label="(1152, 9)", style=solid]; -"189 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" -> "190 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [label="(1152, 1, 9)", style=solid]; -"190 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" -> "192 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1152, 1, 3, 3)", style=solid]; -"191 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "192 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 1152, 9, 9)", style=solid]; -"192 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "193 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"193 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "194 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1152, 7, 7)", style=solid]; -"193 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "195 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; -"194 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "195 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; -"195 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" -> "196 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/adaptive_avg_pool2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"195 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" -> "202 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"196 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/adaptive_avg_pool2d_0" -> "197 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 1152, 1, 1)", style=solid]; -"197 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "198 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 48, 1, 1)", style=solid]; -"197 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "199 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; -"198 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "199 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; -"199 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" -> "200 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 48, 1, 1)", style=solid]; -"200 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "201 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/sigmoid_0" [label="(1, 1152, 1, 1)", style=solid]; -"201 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/sigmoid_0" -> "202 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" [label="(1, 1152, 1, 1)", style=solid]; -"202 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" -> "203 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"203 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "204 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; -"204 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "205 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" [label="(1, 192, 7, 7)", style=solid]; -"205 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" -> "206 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; -"205 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" -> "225 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" [label="(1, 192, 7, 7)", style=solid]; -"206 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "207 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"207 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "208 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; -"207 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "209 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"208 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "209 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"209 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" -> "210 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 1152, 7, 7)", style=solid]; -"210 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "211 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 1152, 7, 7)", style=solid]; -"211 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "212 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 1152, 11, 11)", style=solid]; -"212 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "213 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"213 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "214 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1152, 7, 7)", style=solid]; -"213 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "215 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; -"214 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "215 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; -"215 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" -> "216 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/adaptive_avg_pool2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"215 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" -> "222 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"216 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/adaptive_avg_pool2d_0" -> "217 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 1152, 1, 1)", style=solid]; -"217 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "218 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 48, 1, 1)", style=solid]; -"217 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "219 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; -"218 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "219 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; -"219 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" -> "220 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 48, 1, 1)", style=solid]; -"220 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "221 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/sigmoid_0" [label="(1, 1152, 1, 1)", style=solid]; -"221 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/sigmoid_0" -> "222 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" [label="(1, 1152, 1, 1)", style=solid]; -"222 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" -> "223 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"223 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "224 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; -"224 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "225 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" [label="(1, 192, 7, 7)", style=solid]; -"225 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" -> "226 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; -"225 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" -> "248 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__add___0" [label="(1, 192, 7, 7)", style=solid]; -"226 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "227 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"227 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "228 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; -"227 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "229 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"228 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "229 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"229 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___0" -> "230 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 1152, 7, 7)", style=solid]; -"230 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "234 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 1152, 5, 5)", style=solid]; -"231 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" -> "232 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [label="(1152, 9)", style=solid]; -"232 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" -> "233 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [label="(1152, 1, 9)", style=solid]; -"233 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" -> "235 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1152, 1, 3, 3)", style=solid]; -"234 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "235 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 1152, 9, 9)", style=solid]; -"235 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "236 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"236 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "237 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1152, 7, 7)", style=solid]; -"236 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "238 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; -"237 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "238 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; -"238 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" -> "239 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/adaptive_avg_pool2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"238 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" -> "245 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"239 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/adaptive_avg_pool2d_0" -> "240 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 1152, 1, 1)", style=solid]; -"240 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "241 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 48, 1, 1)", style=solid]; -"240 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "242 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; -"241 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "242 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; -"242 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___2" -> "243 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 48, 1, 1)", style=solid]; -"243 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "244 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/sigmoid_0" [label="(1, 1152, 1, 1)", style=solid]; -"244 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/sigmoid_0" -> "245 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__mul___0" [label="(1, 1152, 1, 1)", style=solid]; -"245 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__mul___0" -> "246 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"246 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "247 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; -"247 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "248 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__add___0" [label="(1, 192, 7, 7)", style=solid]; -"248 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__add___0" -> "249 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; -"249 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "250 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; -"250 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "251 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; -"250 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "252 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"251 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "252 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; -"252 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" -> "253 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; -"253 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "254 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 320, 7, 7)", style=solid]; -"254 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "255 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_head]/conv2d_0" [label="(1, 320, 7, 7)", style=solid]; -"255 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_head]/conv2d_0" -> "256 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1280, 7, 7)", style=solid]; -"256 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "257 EfficientNet/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1280, 7, 7)", style=solid]; -"256 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "258 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1280, 7, 7)", style=solid]; -"257 EfficientNet/MemoryEfficientSwish[_swish]/sigmoid_1" -> "258 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1280, 7, 7)", style=solid]; -"258 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" -> "259 EfficientNet/AdaptiveAvgPool2d[_avg_pooling]/adaptive_avg_pool2d_0" [label="(1, 1280, 7, 7)", style=solid]; -"259 EfficientNet/AdaptiveAvgPool2d[_avg_pooling]/adaptive_avg_pool2d_0" -> "260 EfficientNet/flatten_0" [label="(1, 1280, 1, 1)", style=solid]; -"260 EfficientNet/flatten_0" -> "261 EfficientNet/Dropout[_dropout]/dropout_0" [label="(1, 1280)", style=solid]; -"261 EfficientNet/Dropout[_dropout]/dropout_0" -> "262 EfficientNet/NNCFLinear[_fc]/linear_0" [label="(1, 1280)", style=solid]; -"262 EfficientNet/NNCFLinear[_fc]/linear_0" -> "263 /nncf_model_output_0" [label="(1, 1000)", style=solid]; +"52 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "53 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 144, 60, 60)", style=solid]; +"53 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "54 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 144, 63, 63)", style=solid]; +"54 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "55 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 144, 30, 30)", style=solid]; +"55 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "56 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 144, 30, 30)", style=solid]; +"55 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "57 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 144, 30, 30)", style=solid]; +"56 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "57 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 144, 30, 30)", style=solid]; +"57 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" -> "58 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/adaptive_avg_pool2d_0" [label="(1, 144, 30, 30)", style=solid]; +"57 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___1" -> "64 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/__mul___0" [label="(1, 144, 30, 30)", style=solid]; +"58 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/adaptive_avg_pool2d_0" -> "59 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 144, 1, 1)", style=solid]; +"59 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "60 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 6, 1, 1)", style=solid]; +"59 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "61 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 6, 1, 1)", style=solid]; +"60 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "61 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 6, 1, 1)", style=solid]; +"61 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/MemoryEfficientSwish[_swish]/__mul___2" -> "62 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 6, 1, 1)", style=solid]; +"62 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "63 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/sigmoid_0" [label="(1, 144, 1, 1)", style=solid]; +"63 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/sigmoid_0" -> "64 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/__mul___0" [label="(1, 144, 1, 1)", style=solid]; +"64 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/__mul___0" -> "65 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 144, 30, 30)", style=solid]; +"65 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "66 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 40, 30, 30)", style=solid]; +"66 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "67 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 40, 30, 30)", style=solid]; +"66 EfficientNet/ModuleList[_blocks]/MBConvBlock[3]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "86 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__add___0" [label="(1, 40, 30, 30)", style=solid]; +"67 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "68 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 240, 30, 30)", style=solid]; +"68 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "69 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 240, 30, 30)", style=solid]; +"68 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "70 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; +"69 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "70 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; +"70 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___0" -> "71 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 240, 30, 30)", style=solid]; +"71 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "72 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 240, 30, 30)", style=solid]; +"72 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "73 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 240, 34, 34)", style=solid]; +"73 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "74 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 240, 30, 30)", style=solid]; +"74 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "75 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 240, 30, 30)", style=solid]; +"74 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "76 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 240, 30, 30)", style=solid]; +"75 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "76 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 240, 30, 30)", style=solid]; +"76 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" -> "77 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/adaptive_avg_pool2d_0" [label="(1, 240, 30, 30)", style=solid]; +"76 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___1" -> "83 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; +"77 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/adaptive_avg_pool2d_0" -> "78 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 240, 1, 1)", style=solid]; +"78 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "79 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 10, 1, 1)", style=solid]; +"78 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "80 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 10, 1, 1)", style=solid]; +"79 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "80 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 10, 1, 1)", style=solid]; +"80 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/MemoryEfficientSwish[_swish]/__mul___2" -> "81 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 10, 1, 1)", style=solid]; +"81 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "82 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/sigmoid_0" [label="(1, 240, 1, 1)", style=solid]; +"82 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/sigmoid_0" -> "83 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__mul___0" [label="(1, 240, 1, 1)", style=solid]; +"83 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__mul___0" -> "84 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 240, 30, 30)", style=solid]; +"84 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "85 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 40, 30, 30)", style=solid]; +"85 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "86 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__add___0" [label="(1, 40, 30, 30)", style=solid]; +"86 EfficientNet/ModuleList[_blocks]/MBConvBlock[4]/__add___0" -> "87 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 40, 30, 30)", style=solid]; +"87 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "88 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 240, 30, 30)", style=solid]; +"88 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "89 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 240, 30, 30)", style=solid]; +"88 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "90 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; +"89 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "90 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 240, 30, 30)", style=solid]; +"90 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___0" -> "91 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 240, 30, 30)", style=solid]; +"91 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "92 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 240, 30, 30)", style=solid]; +"92 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "93 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 240, 31, 31)", style=solid]; +"93 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "94 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 240, 15, 15)", style=solid]; +"94 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "95 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 240, 15, 15)", style=solid]; +"94 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "96 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 240, 15, 15)", style=solid]; +"95 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "96 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 240, 15, 15)", style=solid]; +"96 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" -> "97 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/adaptive_avg_pool2d_0" [label="(1, 240, 15, 15)", style=solid]; +"96 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___1" -> "103 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/__mul___0" [label="(1, 240, 15, 15)", style=solid]; +"97 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/adaptive_avg_pool2d_0" -> "98 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 240, 1, 1)", style=solid]; +"98 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "99 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 10, 1, 1)", style=solid]; +"98 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "100 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 10, 1, 1)", style=solid]; +"99 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "100 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 10, 1, 1)", style=solid]; +"100 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/MemoryEfficientSwish[_swish]/__mul___2" -> "101 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 10, 1, 1)", style=solid]; +"101 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "102 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/sigmoid_0" [label="(1, 240, 1, 1)", style=solid]; +"102 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/sigmoid_0" -> "103 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/__mul___0" [label="(1, 240, 1, 1)", style=solid]; +"103 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/__mul___0" -> "104 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 240, 15, 15)", style=solid]; +"104 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "105 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 80, 15, 15)", style=solid]; +"105 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "106 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 80, 15, 15)", style=solid]; +"105 EfficientNet/ModuleList[_blocks]/MBConvBlock[5]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "125 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__add___0" [label="(1, 80, 15, 15)", style=solid]; +"106 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "107 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 480, 15, 15)", style=solid]; +"107 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "108 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 480, 15, 15)", style=solid]; +"107 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "109 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; +"108 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "109 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; +"109 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___0" -> "110 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 480, 15, 15)", style=solid]; +"110 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "111 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 480, 15, 15)", style=solid]; +"111 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "112 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 480, 17, 17)", style=solid]; +"112 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "113 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 480, 15, 15)", style=solid]; +"113 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "114 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 480, 15, 15)", style=solid]; +"113 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "115 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 480, 15, 15)", style=solid]; +"114 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "115 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 480, 15, 15)", style=solid]; +"115 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" -> "116 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/adaptive_avg_pool2d_0" [label="(1, 480, 15, 15)", style=solid]; +"115 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___1" -> "122 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; +"116 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/adaptive_avg_pool2d_0" -> "117 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 480, 1, 1)", style=solid]; +"117 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "118 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 20, 1, 1)", style=solid]; +"117 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "119 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 20, 1, 1)", style=solid]; +"118 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "119 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 20, 1, 1)", style=solid]; +"119 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/MemoryEfficientSwish[_swish]/__mul___2" -> "120 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 20, 1, 1)", style=solid]; +"120 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "121 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/sigmoid_0" [label="(1, 480, 1, 1)", style=solid]; +"121 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/sigmoid_0" -> "122 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__mul___0" [label="(1, 480, 1, 1)", style=solid]; +"122 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__mul___0" -> "123 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 480, 15, 15)", style=solid]; +"123 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "124 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 80, 15, 15)", style=solid]; +"124 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "125 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__add___0" [label="(1, 80, 15, 15)", style=solid]; +"125 EfficientNet/ModuleList[_blocks]/MBConvBlock[6]/__add___0" -> "126 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 80, 15, 15)", style=solid]; +"126 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "127 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 480, 15, 15)", style=solid]; +"127 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "128 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 480, 15, 15)", style=solid]; +"127 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "129 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; +"128 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "129 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 480, 15, 15)", style=solid]; +"129 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/MemoryEfficientSwish[_swish]/__mul___0" -> "130 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 480, 15, 15)", style=solid]; +"130 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "131 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 112, 15, 15)", style=solid]; +"131 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "132 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 112, 15, 15)", style=solid]; +"131 EfficientNet/ModuleList[_blocks]/MBConvBlock[8]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "151 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" [label="(1, 112, 15, 15)", style=solid]; +"132 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "133 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; +"133 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "134 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 672, 15, 15)", style=solid]; +"133 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "135 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; +"134 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "135 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; +"135 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___0" -> "136 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 672, 15, 15)", style=solid]; +"136 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "137 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 672, 15, 15)", style=solid]; +"137 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "138 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 672, 19, 19)", style=solid]; +"138 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "139 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; +"139 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "140 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 672, 15, 15)", style=solid]; +"139 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "141 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 15, 15)", style=solid]; +"140 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "141 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 15, 15)", style=solid]; +"141 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" -> "142 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/adaptive_avg_pool2d_0" [label="(1, 672, 15, 15)", style=solid]; +"141 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___1" -> "148 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; +"142 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/adaptive_avg_pool2d_0" -> "143 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 672, 1, 1)", style=solid]; +"143 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "144 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 28, 1, 1)", style=solid]; +"143 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "145 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; +"144 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "145 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; +"145 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/MemoryEfficientSwish[_swish]/__mul___2" -> "146 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 28, 1, 1)", style=solid]; +"146 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "147 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/sigmoid_0" [label="(1, 672, 1, 1)", style=solid]; +"147 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/sigmoid_0" -> "148 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" [label="(1, 672, 1, 1)", style=solid]; +"148 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__mul___0" -> "149 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 672, 15, 15)", style=solid]; +"149 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "150 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 112, 15, 15)", style=solid]; +"150 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "151 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" [label="(1, 112, 15, 15)", style=solid]; +"151 EfficientNet/ModuleList[_blocks]/MBConvBlock[9]/__add___0" -> "152 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 112, 15, 15)", style=solid]; +"152 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "153 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 672, 15, 15)", style=solid]; +"153 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "154 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 672, 15, 15)", style=solid]; +"153 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "155 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; +"154 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "155 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 672, 15, 15)", style=solid]; +"155 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___0" -> "156 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 672, 15, 15)", style=solid]; +"156 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "157 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 672, 15, 15)", style=solid]; +"157 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "158 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 672, 18, 18)", style=solid]; +"158 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "159 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 672, 7, 7)", style=solid]; +"159 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "160 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 672, 7, 7)", style=solid]; +"159 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "161 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 7, 7)", style=solid]; +"160 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "161 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 672, 7, 7)", style=solid]; +"161 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" -> "162 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/adaptive_avg_pool2d_0" [label="(1, 672, 7, 7)", style=solid]; +"161 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___1" -> "168 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" [label="(1, 672, 7, 7)", style=solid]; +"162 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/adaptive_avg_pool2d_0" -> "163 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 672, 1, 1)", style=solid]; +"163 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "164 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 28, 1, 1)", style=solid]; +"163 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "165 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; +"164 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "165 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 28, 1, 1)", style=solid]; +"165 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/MemoryEfficientSwish[_swish]/__mul___2" -> "166 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 28, 1, 1)", style=solid]; +"166 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "167 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/sigmoid_0" [label="(1, 672, 1, 1)", style=solid]; +"167 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/sigmoid_0" -> "168 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" [label="(1, 672, 1, 1)", style=solid]; +"168 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/__mul___0" -> "169 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 672, 7, 7)", style=solid]; +"169 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "170 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; +"170 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "171 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; +"170 EfficientNet/ModuleList[_blocks]/MBConvBlock[11]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "190 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" [label="(1, 192, 7, 7)", style=solid]; +"171 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "172 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"172 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "173 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; +"172 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "174 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"173 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "174 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"174 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___0" -> "175 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 1152, 7, 7)", style=solid]; +"175 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "176 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 1152, 7, 7)", style=solid]; +"176 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "177 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 1152, 11, 11)", style=solid]; +"177 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "178 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"178 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "179 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1152, 7, 7)", style=solid]; +"178 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "180 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; +"179 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "180 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; +"180 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" -> "181 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/adaptive_avg_pool2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"180 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___1" -> "187 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"181 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/adaptive_avg_pool2d_0" -> "182 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 1152, 1, 1)", style=solid]; +"182 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "183 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 48, 1, 1)", style=solid]; +"182 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "184 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; +"183 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "184 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; +"184 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/MemoryEfficientSwish[_swish]/__mul___2" -> "185 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 48, 1, 1)", style=solid]; +"185 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "186 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/sigmoid_0" [label="(1, 1152, 1, 1)", style=solid]; +"186 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/sigmoid_0" -> "187 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" [label="(1, 1152, 1, 1)", style=solid]; +"187 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__mul___0" -> "188 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"188 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "189 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; +"189 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "190 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" [label="(1, 192, 7, 7)", style=solid]; +"190 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" -> "191 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; +"190 EfficientNet/ModuleList[_blocks]/MBConvBlock[12]/__add___0" -> "210 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" [label="(1, 192, 7, 7)", style=solid]; +"191 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "192 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"192 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "193 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; +"192 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "194 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"193 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "194 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"194 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___0" -> "195 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 1152, 7, 7)", style=solid]; +"195 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "196 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 1152, 7, 7)", style=solid]; +"196 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "197 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 1152, 11, 11)", style=solid]; +"197 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "198 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"198 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "199 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1152, 7, 7)", style=solid]; +"198 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "200 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; +"199 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "200 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; +"200 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" -> "201 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/adaptive_avg_pool2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"200 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___1" -> "207 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"201 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/adaptive_avg_pool2d_0" -> "202 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 1152, 1, 1)", style=solid]; +"202 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "203 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 48, 1, 1)", style=solid]; +"202 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "204 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; +"203 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "204 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; +"204 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/MemoryEfficientSwish[_swish]/__mul___2" -> "205 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 48, 1, 1)", style=solid]; +"205 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "206 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/sigmoid_0" [label="(1, 1152, 1, 1)", style=solid]; +"206 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/sigmoid_0" -> "207 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" [label="(1, 1152, 1, 1)", style=solid]; +"207 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__mul___0" -> "208 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"208 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "209 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; +"209 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "210 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" [label="(1, 192, 7, 7)", style=solid]; +"210 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" -> "211 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; +"210 EfficientNet/ModuleList[_blocks]/MBConvBlock[13]/__add___0" -> "230 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__add___0" [label="(1, 192, 7, 7)", style=solid]; +"211 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "212 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"212 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "213 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; +"212 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "214 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"213 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "214 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"214 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___0" -> "215 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" [label="(1, 1152, 7, 7)", style=solid]; +"215 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ModuleDict[pre_ops]/UpdateInputs[1]/__getitem___0" -> "216 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" [label="(1, 1152, 7, 7)", style=solid]; +"216 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/ZeroPad2d[static_padding]/pad_0" -> "217 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" [label="(1, 1152, 11, 11)", style=solid]; +"217 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_depthwise_conv]/conv2d_0" -> "218 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"218 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "219 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1152, 7, 7)", style=solid]; +"218 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "220 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; +"219 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_1" -> "220 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1152, 7, 7)", style=solid]; +"220 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" -> "221 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/adaptive_avg_pool2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"220 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___1" -> "227 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"221 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/adaptive_avg_pool2d_0" -> "222 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" [label="(1, 1152, 1, 1)", style=solid]; +"222 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "223 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_2" [label="(1, 48, 1, 1)", style=solid]; +"222 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_reduce]/conv2d_0" -> "224 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; +"223 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/sigmoid_2" -> "224 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___2" [label="(1, 48, 1, 1)", style=solid]; +"224 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/MemoryEfficientSwish[_swish]/__mul___2" -> "225 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" [label="(1, 48, 1, 1)", style=solid]; +"225 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_se_expand]/conv2d_0" -> "226 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/sigmoid_0" [label="(1, 1152, 1, 1)", style=solid]; +"226 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/sigmoid_0" -> "227 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__mul___0" [label="(1, 1152, 1, 1)", style=solid]; +"227 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__mul___0" -> "228 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"228 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "229 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 192, 7, 7)", style=solid]; +"229 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "230 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__add___0" [label="(1, 192, 7, 7)", style=solid]; +"230 EfficientNet/ModuleList[_blocks]/MBConvBlock[14]/__add___0" -> "231 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" [label="(1, 192, 7, 7)", style=solid]; +"231 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_expand_conv]/conv2d_0" -> "232 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" [label="(1, 1152, 7, 7)", style=solid]; +"232 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "233 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_0" [label="(1, 1152, 7, 7)", style=solid]; +"232 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn0]/batch_norm_0" -> "234 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"233 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/sigmoid_0" -> "234 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" [label="(1, 1152, 7, 7)", style=solid]; +"234 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/MemoryEfficientSwish[_swish]/__mul___0" -> "235 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" [label="(1, 1152, 7, 7)", style=solid]; +"235 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFUserConv2dStaticSamePadding[_project_conv]/conv2d_0" -> "236 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn2]/batch_norm_0" [label="(1, 320, 7, 7)", style=solid]; +"236 EfficientNet/ModuleList[_blocks]/MBConvBlock[15]/NNCFBatchNorm2d[_bn2]/batch_norm_0" -> "237 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_head]/conv2d_0" [label="(1, 320, 7, 7)", style=solid]; +"237 EfficientNet/NNCFUserConv2dStaticSamePadding[_conv_head]/conv2d_0" -> "238 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" [label="(1, 1280, 7, 7)", style=solid]; +"238 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "239 EfficientNet/MemoryEfficientSwish[_swish]/sigmoid_1" [label="(1, 1280, 7, 7)", style=solid]; +"238 EfficientNet/NNCFBatchNorm2d[_bn1]/batch_norm_0" -> "240 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1280, 7, 7)", style=solid]; +"239 EfficientNet/MemoryEfficientSwish[_swish]/sigmoid_1" -> "240 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" [label="(1, 1280, 7, 7)", style=solid]; +"240 EfficientNet/MemoryEfficientSwish[_swish]/__mul___1" -> "241 EfficientNet/AdaptiveAvgPool2d[_avg_pooling]/adaptive_avg_pool2d_0" [label="(1, 1280, 7, 7)", style=solid]; +"241 EfficientNet/AdaptiveAvgPool2d[_avg_pooling]/adaptive_avg_pool2d_0" -> "242 EfficientNet/flatten_0" [label="(1, 1280, 1, 1)", style=solid]; +"242 EfficientNet/flatten_0" -> "243 EfficientNet/Dropout[_dropout]/dropout_0" [label="(1, 1280)", style=solid]; +"243 EfficientNet/Dropout[_dropout]/dropout_0" -> "244 EfficientNet/NNCFLinear[_fc]/linear_0" [label="(1, 1280)", style=solid]; +"244 EfficientNet/NNCFLinear[_fc]/linear_0" -> "245 /nncf_model_output_0" [label="(1, 1000)", style=solid]; } diff --git a/tests/torch/data/reference_graphs/nas/squeezenet1_0_depth.dot b/tests/torch/data/reference_graphs/nas/squeezenet1_0_depth.dot index 3289d60451a..e84cfdce968 100644 --- a/tests/torch/data/reference_graphs/nas/squeezenet1_0_depth.dot +++ b/tests/torch/data/reference_graphs/nas/squeezenet1_0_depth.dot @@ -1,131 +1,96 @@ strict digraph { "0 /nncf_model_input_0" [id=0, label="nncf_model_input_#0", style=filled, type=nncf_model_input]; -"1 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" [id=1, label="linear_#1", style=filled, type=linear]; -"2 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [id=2, label="view_#2", style=filled, type=view]; -"3 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [id=3, label="view_#3", style=filled, type=view]; -"4 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/__getitem___0" [id=4, label="__getitem___#4", style=filled, type=__getitem__]; -"5 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/contiguous_0" [id=5, label="contiguous_#5", style=filled, type=contiguous]; -"6 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_2" [id=6, label="view_#6", style=filled, type=view]; -"7 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_3" [id=7, label="view_#7", style=filled, type=view]; -"8 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_1" [id=8, label="linear_#8", style=filled, type=linear]; -"9 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_4" [id=9, label="view_#9", style=filled, type=view]; -"10 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_5" [id=10, label="view_#10", style=filled, type=view]; -"11 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" [color=lightblue, id=11, label="conv2d_OW96_G0_#1", style=filled, type=conv2d]; -"12 SqueezeNet/Sequential[features]/ReLU[1]/relu__0" [id=12, label="relu__IW96_OW96_#2", style=filled, type=relu_]; -"13 SqueezeNet/Sequential[features]/MaxPool2d[2]/max_pool2d_0" [id=13, label="max_pool2d_IW96_OW96_#3", style=filled, type=max_pool2d]; -"14 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=14, label="conv2d_IW96_OW16_G1_#4", style=filled, type=conv2d]; -"15 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=15, label="conv2d_IW16_OW64_G5_#13", style=filled, type=conv2d]; -"16 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand1x1_activation]/relu__0" [id=16, label="relu__IW64_OW64_#14", style=filled, type=relu_]; -"17 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=17, label="conv2d_IW16_OW64_G6_#15", style=filled, type=conv2d]; -"18 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand3x3_activation]/relu__0" [id=18, label="relu__IW64_OW64_#16", style=filled, type=relu_]; -"19 SqueezeNet/Sequential[features]/Fire[4]/cat_0" [id=19, label="cat_IW[64, 64]_OW128_#17", style=filled, type=cat]; -"20 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=20, label="conv2d_IW128_OW32_G7_#18", style=filled, type=conv2d]; -"21 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" [id=21, label="relu__IW32_OW32_#19", style=filled, type=relu_]; -"22 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=22, label="conv2d_IW32_OW128_G8_#20", style=filled, type=conv2d]; -"23 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand1x1_activation]/relu__0" [id=23, label="relu__IW128_OW128_#21", style=filled, type=relu_]; -"24 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=24, label="conv2d_IW32_OW128_G9_#22", style=filled, type=conv2d]; -"25 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand3x3_activation]/relu__0" [id=25, label="relu__IW128_OW128_#23", style=filled, type=relu_]; -"26 SqueezeNet/Sequential[features]/Fire[5]/cat_0" [id=26, label="cat_IW[128, 128]_OW256_#24", style=filled, type=cat]; -"27 SqueezeNet/Sequential[features]/MaxPool2d[6]/max_pool2d_0" [id=27, label="max_pool2d_IW256_OW256_#25", style=filled, type=max_pool2d]; -"28 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=28, label="conv2d_IW256_OW32_G10_#26", style=filled, type=conv2d]; -"29 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" [id=29, label="relu__IW32_OW32_#27", style=filled, type=relu_]; -"30 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=30, label="conv2d_IW32_OW128_G11_#28", style=filled, type=conv2d]; -"31 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand1x1_activation]/relu__0" [id=31, label="relu__IW128_OW128_#29", style=filled, type=relu_]; -"32 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=32, label="conv2d_IW32_OW128_G12_#30", style=filled, type=conv2d]; -"33 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand3x3_activation]/relu__0" [id=33, label="relu__IW128_OW128_#31", style=filled, type=relu_]; -"34 SqueezeNet/Sequential[features]/Fire[7]/cat_0" [id=34, label="cat_IW[128, 128]_OW256_#32", style=filled, type=cat]; -"35 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=35, label="conv2d_IW256_OW48_G13_#33", style=filled, type=conv2d]; -"36 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=36, label="conv2d_IW48_OW192_G17_#42", style=filled, type=conv2d]; -"37 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand1x1_activation]/relu__0" [id=37, label="relu__IW192_OW192_#43", style=filled, type=relu_]; -"38 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=38, label="conv2d_IW48_OW192_G18_#44", style=filled, type=conv2d]; -"39 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand3x3_activation]/relu__0" [id=39, label="relu__IW192_OW192_#45", style=filled, type=relu_]; -"40 SqueezeNet/Sequential[features]/Fire[9]/cat_0" [id=40, label="cat_IW[192, 192]_OW384_#46", style=filled, type=cat]; -"41 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=41, label="conv2d_IW384_OW64_G19_#47", style=filled, type=conv2d]; -"42 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" [id=42, label="relu__IW64_OW64_#48", style=filled, type=relu_]; -"43 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=43, label="conv2d_IW64_OW256_G20_#49", style=filled, type=conv2d]; -"44 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand1x1_activation]/relu__0" [id=44, label="relu__IW256_OW256_#50", style=filled, type=relu_]; -"45 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=45, label="conv2d_IW64_OW256_G21_#51", style=filled, type=conv2d]; -"46 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand3x3_activation]/relu__0" [id=46, label="relu__IW256_OW256_#52", style=filled, type=relu_]; -"47 SqueezeNet/Sequential[features]/Fire[10]/cat_0" [id=47, label="cat_IW[256, 256]_OW512_#53", style=filled, type=cat]; -"48 SqueezeNet/Sequential[features]/MaxPool2d[11]/max_pool2d_0" [id=48, label="max_pool2d_IW512_OW512_#54", style=filled, type=max_pool2d]; -"49 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=49, label="conv2d_IW512_OW64_G22_#55", style=filled, type=conv2d]; -"50 SqueezeNet/Sequential[features]/Fire[12]/ReLU[squeeze_activation]/relu__0" [id=50, label="relu__IW64_OW64_#56", style=filled, type=relu_]; -"51 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=51, label="conv2d_IW64_OW256_G23_#57", style=filled, type=conv2d]; -"52 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand1x1_activation]/relu__0" [id=52, label="relu__IW256_OW256_#58", style=filled, type=relu_]; -"53 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=53, label="conv2d_IW64_OW256_G24_#59", style=filled, type=conv2d]; -"54 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand3x3_activation]/relu__0" [id=54, label="relu__IW256_OW256_#60", style=filled, type=relu_]; -"55 SqueezeNet/Sequential[features]/Fire[12]/cat_0" [id=55, label="cat_IW[256, 256]_OW512_#61", style=filled, type=cat]; -"56 SqueezeNet/Sequential[classifier]/Dropout[0]/dropout_0" [id=56, label="dropout_IW512_OW512_#62", style=filled, type=dropout]; -"57 SqueezeNet/Sequential[classifier]/NNCFConv2d[1]/conv2d_0" [color=lightblue, id=57, label="conv2d_IW512_#63", style=filled, type=conv2d]; -"58 SqueezeNet/Sequential[classifier]/ReLU[2]/relu__0" [id=58, label="relu__#64", style=filled, type=relu_]; -"59 SqueezeNet/Sequential[classifier]/AdaptiveAvgPool2d[3]/adaptive_avg_pool2d_0" [id=59, label="adaptive_avg_pool2d_#65", style=filled, type=adaptive_avg_pool2d]; -"60 SqueezeNet/view_0" [id=60, label="view_#66", style=filled, type=view]; -"61 /nncf_model_output_0" [id=61, label="nncf_model_output_#67", style=filled, type=nncf_model_output]; -"0 /nncf_model_input_0" -> "11 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" [label="(1, 3, 32, 32)", style=solid]; -"1 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" -> "2 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [label="(288, 25)", style=solid]; -"2 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" -> "3 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [label="(96, 3, 25)", style=solid]; -"3 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" -> "4 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/__getitem___0" [label="(96, 3, 5, 5)", style=solid]; -"4 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/__getitem___0" -> "5 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/contiguous_0" [label="(96, 3, 3, 3)", style=solid]; -"5 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/contiguous_0" -> "6 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_2" [label="(96, 3, 3, 3)", style=solid]; -"6 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_2" -> "7 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_3" [label="(96, 3, 9)", style=solid]; -"7 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_3" -> "8 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_1" [label="(288, 9)", style=solid]; -"8 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_1" -> "9 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_4" [label="(288, 9)", style=solid]; -"9 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_4" -> "10 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_5" [label="(96, 3, 9)", style=solid]; -"10 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_5" -> "11 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" [label="(96, 3, 3, 3)", style=solid]; -"11 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" -> "12 SqueezeNet/Sequential[features]/ReLU[1]/relu__0" [label="(1, 96, 16, 16)", style=solid]; -"12 SqueezeNet/Sequential[features]/ReLU[1]/relu__0" -> "13 SqueezeNet/Sequential[features]/MaxPool2d[2]/max_pool2d_0" [label="(1, 96, 16, 16)", style=solid]; -"13 SqueezeNet/Sequential[features]/MaxPool2d[2]/max_pool2d_0" -> "14 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 96, 8, 8)", style=solid]; -"14 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" -> "15 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 16, 8, 8)", style=solid]; -"14 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" -> "17 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 16, 8, 8)", style=solid]; -"15 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand1x1]/conv2d_0" -> "16 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand1x1_activation]/relu__0" [label="(1, 64, 8, 8)", style=solid]; -"16 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand1x1_activation]/relu__0" -> "19 SqueezeNet/Sequential[features]/Fire[4]/cat_0" [label="(1, 64, 8, 8)", style=solid]; -"17 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand3x3]/conv2d_0" -> "18 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand3x3_activation]/relu__0" [label="(1, 64, 8, 8)", style=solid]; -"18 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand3x3_activation]/relu__0" -> "19 SqueezeNet/Sequential[features]/Fire[4]/cat_0" [label="(1, 64, 8, 8)", style=solid]; -"19 SqueezeNet/Sequential[features]/Fire[4]/cat_0" -> "20 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 128, 8, 8)", style=solid]; -"20 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[squeeze]/conv2d_0" -> "21 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" [label="(1, 32, 8, 8)", style=solid]; -"21 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" -> "22 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 32, 8, 8)", style=solid]; -"21 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" -> "24 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 32, 8, 8)", style=solid]; -"22 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand1x1]/conv2d_0" -> "23 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand1x1_activation]/relu__0" [label="(1, 128, 8, 8)", style=solid]; -"23 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand1x1_activation]/relu__0" -> "26 SqueezeNet/Sequential[features]/Fire[5]/cat_0" [label="(1, 128, 8, 8)", style=solid]; -"24 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand3x3]/conv2d_0" -> "25 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand3x3_activation]/relu__0" [label="(1, 128, 8, 8)", style=solid]; -"25 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand3x3_activation]/relu__0" -> "26 SqueezeNet/Sequential[features]/Fire[5]/cat_0" [label="(1, 128, 8, 8)", style=solid]; -"26 SqueezeNet/Sequential[features]/Fire[5]/cat_0" -> "27 SqueezeNet/Sequential[features]/MaxPool2d[6]/max_pool2d_0" [label="(1, 256, 8, 8)", style=solid]; -"27 SqueezeNet/Sequential[features]/MaxPool2d[6]/max_pool2d_0" -> "28 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 256, 4, 4)", style=solid]; -"28 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[squeeze]/conv2d_0" -> "29 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" [label="(1, 32, 4, 4)", style=solid]; -"29 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" -> "30 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 32, 4, 4)", style=solid]; -"29 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" -> "32 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 32, 4, 4)", style=solid]; -"30 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand1x1]/conv2d_0" -> "31 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand1x1_activation]/relu__0" [label="(1, 128, 4, 4)", style=solid]; -"31 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand1x1_activation]/relu__0" -> "34 SqueezeNet/Sequential[features]/Fire[7]/cat_0" [label="(1, 128, 4, 4)", style=solid]; -"32 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand3x3]/conv2d_0" -> "33 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand3x3_activation]/relu__0" [label="(1, 128, 4, 4)", style=solid]; -"33 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand3x3_activation]/relu__0" -> "34 SqueezeNet/Sequential[features]/Fire[7]/cat_0" [label="(1, 128, 4, 4)", style=solid]; -"34 SqueezeNet/Sequential[features]/Fire[7]/cat_0" -> "35 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 256, 4, 4)", style=solid]; -"35 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" -> "36 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 48, 4, 4)", style=solid]; -"35 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" -> "38 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 48, 4, 4)", style=solid]; -"36 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand1x1]/conv2d_0" -> "37 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand1x1_activation]/relu__0" [label="(1, 192, 4, 4)", style=solid]; -"37 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand1x1_activation]/relu__0" -> "40 SqueezeNet/Sequential[features]/Fire[9]/cat_0" [label="(1, 192, 4, 4)", style=solid]; -"38 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand3x3]/conv2d_0" -> "39 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand3x3_activation]/relu__0" [label="(1, 192, 4, 4)", style=solid]; -"39 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand3x3_activation]/relu__0" -> "40 SqueezeNet/Sequential[features]/Fire[9]/cat_0" [label="(1, 192, 4, 4)", style=solid]; -"40 SqueezeNet/Sequential[features]/Fire[9]/cat_0" -> "41 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 384, 4, 4)", style=solid]; -"41 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[squeeze]/conv2d_0" -> "42 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" [label="(1, 64, 4, 4)", style=solid]; -"42 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" -> "43 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 64, 4, 4)", style=solid]; -"42 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" -> "45 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 64, 4, 4)", style=solid]; -"43 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand1x1]/conv2d_0" -> "44 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand1x1_activation]/relu__0" [label="(1, 256, 4, 4)", style=solid]; -"44 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand1x1_activation]/relu__0" -> "47 SqueezeNet/Sequential[features]/Fire[10]/cat_0" [label="(1, 256, 4, 4)", style=solid]; -"45 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand3x3]/conv2d_0" -> "46 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand3x3_activation]/relu__0" [label="(1, 256, 4, 4)", style=solid]; -"46 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand3x3_activation]/relu__0" -> "47 SqueezeNet/Sequential[features]/Fire[10]/cat_0" [label="(1, 256, 4, 4)", style=solid]; -"47 SqueezeNet/Sequential[features]/Fire[10]/cat_0" -> "48 SqueezeNet/Sequential[features]/MaxPool2d[11]/max_pool2d_0" [label="(1, 512, 4, 4)", style=solid]; -"48 SqueezeNet/Sequential[features]/MaxPool2d[11]/max_pool2d_0" -> "49 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 512, 2, 2)", style=solid]; -"49 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[squeeze]/conv2d_0" -> "50 SqueezeNet/Sequential[features]/Fire[12]/ReLU[squeeze_activation]/relu__0" [label="(1, 64, 2, 2)", style=solid]; -"50 SqueezeNet/Sequential[features]/Fire[12]/ReLU[squeeze_activation]/relu__0" -> "51 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 64, 2, 2)", style=solid]; -"50 SqueezeNet/Sequential[features]/Fire[12]/ReLU[squeeze_activation]/relu__0" -> "53 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 64, 2, 2)", style=solid]; -"51 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand1x1]/conv2d_0" -> "52 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand1x1_activation]/relu__0" [label="(1, 256, 2, 2)", style=solid]; -"52 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand1x1_activation]/relu__0" -> "55 SqueezeNet/Sequential[features]/Fire[12]/cat_0" [label="(1, 256, 2, 2)", style=solid]; -"53 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand3x3]/conv2d_0" -> "54 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand3x3_activation]/relu__0" [label="(1, 256, 2, 2)", style=solid]; -"54 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand3x3_activation]/relu__0" -> "55 SqueezeNet/Sequential[features]/Fire[12]/cat_0" [label="(1, 256, 2, 2)", style=solid]; -"55 SqueezeNet/Sequential[features]/Fire[12]/cat_0" -> "56 SqueezeNet/Sequential[classifier]/Dropout[0]/dropout_0" [label="(1, 512, 2, 2)", style=solid]; -"56 SqueezeNet/Sequential[classifier]/Dropout[0]/dropout_0" -> "57 SqueezeNet/Sequential[classifier]/NNCFConv2d[1]/conv2d_0" [label="(1, 512, 2, 2)", style=solid]; -"57 SqueezeNet/Sequential[classifier]/NNCFConv2d[1]/conv2d_0" -> "58 SqueezeNet/Sequential[classifier]/ReLU[2]/relu__0" [label="(1, 1000, 2, 2)", style=solid]; -"58 SqueezeNet/Sequential[classifier]/ReLU[2]/relu__0" -> "59 SqueezeNet/Sequential[classifier]/AdaptiveAvgPool2d[3]/adaptive_avg_pool2d_0" [label="(1, 1000, 2, 2)", style=solid]; -"59 SqueezeNet/Sequential[classifier]/AdaptiveAvgPool2d[3]/adaptive_avg_pool2d_0" -> "60 SqueezeNet/view_0" [label="(1, 1000, 1, 1)", style=solid]; -"60 SqueezeNet/view_0" -> "61 /nncf_model_output_0" [label="(1, 1000)", style=solid]; +"1 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" [color=lightblue, id=1, label="conv2d_OW96_G0_#1", style=filled, type=conv2d]; +"2 SqueezeNet/Sequential[features]/ReLU[1]/relu__0" [id=2, label="relu__IW96_OW96_#2", style=filled, type=relu_]; +"3 SqueezeNet/Sequential[features]/MaxPool2d[2]/max_pool2d_0" [id=3, label="max_pool2d_IW96_OW96_#3", style=filled, type=max_pool2d]; +"4 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=4, label="conv2d_IW96_OW16_G1_#4", style=filled, type=conv2d]; +"5 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=5, label="conv2d_IW16_OW64_G5_#13", style=filled, type=conv2d]; +"6 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand1x1_activation]/relu__0" [id=6, label="relu__IW64_OW64_#14", style=filled, type=relu_]; +"7 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=7, label="conv2d_IW16_OW64_G6_#15", style=filled, type=conv2d]; +"8 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand3x3_activation]/relu__0" [id=8, label="relu__IW64_OW64_#16", style=filled, type=relu_]; +"9 SqueezeNet/Sequential[features]/Fire[4]/cat_0" [id=9, label="cat_IW[64, 64]_OW128_#17", style=filled, type=cat]; +"10 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=10, label="conv2d_IW128_OW32_G7_#18", style=filled, type=conv2d]; +"11 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" [id=11, label="relu__IW32_OW32_#19", style=filled, type=relu_]; +"12 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=12, label="conv2d_IW32_OW128_G8_#20", style=filled, type=conv2d]; +"13 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand1x1_activation]/relu__0" [id=13, label="relu__IW128_OW128_#21", style=filled, type=relu_]; +"14 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=14, label="conv2d_IW32_OW128_G9_#22", style=filled, type=conv2d]; +"15 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand3x3_activation]/relu__0" [id=15, label="relu__IW128_OW128_#23", style=filled, type=relu_]; +"16 SqueezeNet/Sequential[features]/Fire[5]/cat_0" [id=16, label="cat_IW[128, 128]_OW256_#24", style=filled, type=cat]; +"17 SqueezeNet/Sequential[features]/MaxPool2d[6]/max_pool2d_0" [id=17, label="max_pool2d_IW256_OW256_#25", style=filled, type=max_pool2d]; +"18 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=18, label="conv2d_IW256_OW32_G10_#26", style=filled, type=conv2d]; +"19 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" [id=19, label="relu__IW32_OW32_#27", style=filled, type=relu_]; +"20 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=20, label="conv2d_IW32_OW128_G11_#28", style=filled, type=conv2d]; +"21 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand1x1_activation]/relu__0" [id=21, label="relu__IW128_OW128_#29", style=filled, type=relu_]; +"22 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=22, label="conv2d_IW32_OW128_G12_#30", style=filled, type=conv2d]; +"23 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand3x3_activation]/relu__0" [id=23, label="relu__IW128_OW128_#31", style=filled, type=relu_]; +"24 SqueezeNet/Sequential[features]/Fire[7]/cat_0" [id=24, label="cat_IW[128, 128]_OW256_#32", style=filled, type=cat]; +"25 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=25, label="conv2d_IW256_OW48_G13_#33", style=filled, type=conv2d]; +"26 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=26, label="conv2d_IW48_OW192_G17_#42", style=filled, type=conv2d]; +"27 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand1x1_activation]/relu__0" [id=27, label="relu__IW192_OW192_#43", style=filled, type=relu_]; +"28 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=28, label="conv2d_IW48_OW192_G18_#44", style=filled, type=conv2d]; +"29 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand3x3_activation]/relu__0" [id=29, label="relu__IW192_OW192_#45", style=filled, type=relu_]; +"30 SqueezeNet/Sequential[features]/Fire[9]/cat_0" [id=30, label="cat_IW[192, 192]_OW384_#46", style=filled, type=cat]; +"31 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=31, label="conv2d_IW384_OW64_G19_#47", style=filled, type=conv2d]; +"32 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" [id=32, label="relu__IW64_OW64_#48", style=filled, type=relu_]; +"33 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=33, label="conv2d_IW64_OW256_G20_#49", style=filled, type=conv2d]; +"34 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand1x1_activation]/relu__0" [id=34, label="relu__IW256_OW256_#50", style=filled, type=relu_]; +"35 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=35, label="conv2d_IW64_OW256_G21_#51", style=filled, type=conv2d]; +"36 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand3x3_activation]/relu__0" [id=36, label="relu__IW256_OW256_#52", style=filled, type=relu_]; +"37 SqueezeNet/Sequential[features]/Fire[10]/cat_0" [id=37, label="cat_IW[256, 256]_OW512_#53", style=filled, type=cat]; +"38 SqueezeNet/Sequential[features]/MaxPool2d[11]/max_pool2d_0" [id=38, label="max_pool2d_IW512_OW512_#54", style=filled, type=max_pool2d]; +"39 SqueezeNet/Sequential[classifier]/Dropout[0]/dropout_0" [id=39, label="dropout_IW512_OW512_#62", style=filled, type=dropout]; +"40 SqueezeNet/Sequential[classifier]/NNCFConv2d[1]/conv2d_0" [color=lightblue, id=40, label="conv2d_IW512_#63", style=filled, type=conv2d]; +"41 SqueezeNet/Sequential[classifier]/ReLU[2]/relu__0" [id=41, label="relu__#64", style=filled, type=relu_]; +"42 SqueezeNet/Sequential[classifier]/AdaptiveAvgPool2d[3]/adaptive_avg_pool2d_0" [id=42, label="adaptive_avg_pool2d_#65", style=filled, type=adaptive_avg_pool2d]; +"43 SqueezeNet/view_0" [id=43, label="view_#66", style=filled, type=view]; +"44 /nncf_model_output_0" [id=44, label="nncf_model_output_#67", style=filled, type=nncf_model_output]; +"0 /nncf_model_input_0" -> "1 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" [label="(1, 3, 32, 32)", style=solid]; +"1 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" -> "2 SqueezeNet/Sequential[features]/ReLU[1]/relu__0" [label="(1, 96, 13, 13)", style=solid]; +"2 SqueezeNet/Sequential[features]/ReLU[1]/relu__0" -> "3 SqueezeNet/Sequential[features]/MaxPool2d[2]/max_pool2d_0" [label="(1, 96, 13, 13)", style=solid]; +"3 SqueezeNet/Sequential[features]/MaxPool2d[2]/max_pool2d_0" -> "4 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 96, 6, 6)", style=solid]; +"4 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" -> "5 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 16, 6, 6)", style=solid]; +"4 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" -> "7 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 16, 6, 6)", style=solid]; +"5 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand1x1]/conv2d_0" -> "6 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand1x1_activation]/relu__0" [label="(1, 64, 6, 6)", style=solid]; +"6 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand1x1_activation]/relu__0" -> "9 SqueezeNet/Sequential[features]/Fire[4]/cat_0" [label="(1, 64, 6, 6)", style=solid]; +"7 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand3x3]/conv2d_0" -> "8 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand3x3_activation]/relu__0" [label="(1, 64, 6, 6)", style=solid]; +"8 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand3x3_activation]/relu__0" -> "9 SqueezeNet/Sequential[features]/Fire[4]/cat_0" [label="(1, 64, 6, 6)", style=solid]; +"9 SqueezeNet/Sequential[features]/Fire[4]/cat_0" -> "10 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 128, 6, 6)", style=solid]; +"10 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[squeeze]/conv2d_0" -> "11 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" [label="(1, 32, 6, 6)", style=solid]; +"11 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" -> "12 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 32, 6, 6)", style=solid]; +"11 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" -> "14 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 32, 6, 6)", style=solid]; +"12 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand1x1]/conv2d_0" -> "13 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand1x1_activation]/relu__0" [label="(1, 128, 6, 6)", style=solid]; +"13 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand1x1_activation]/relu__0" -> "16 SqueezeNet/Sequential[features]/Fire[5]/cat_0" [label="(1, 128, 6, 6)", style=solid]; +"14 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand3x3]/conv2d_0" -> "15 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand3x3_activation]/relu__0" [label="(1, 128, 6, 6)", style=solid]; +"15 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand3x3_activation]/relu__0" -> "16 SqueezeNet/Sequential[features]/Fire[5]/cat_0" [label="(1, 128, 6, 6)", style=solid]; +"16 SqueezeNet/Sequential[features]/Fire[5]/cat_0" -> "17 SqueezeNet/Sequential[features]/MaxPool2d[6]/max_pool2d_0" [label="(1, 256, 6, 6)", style=solid]; +"17 SqueezeNet/Sequential[features]/MaxPool2d[6]/max_pool2d_0" -> "18 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 256, 3, 3)", style=solid]; +"18 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[squeeze]/conv2d_0" -> "19 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" [label="(1, 32, 3, 3)", style=solid]; +"19 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" -> "20 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 32, 3, 3)", style=solid]; +"19 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" -> "22 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 32, 3, 3)", style=solid]; +"20 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand1x1]/conv2d_0" -> "21 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand1x1_activation]/relu__0" [label="(1, 128, 3, 3)", style=solid]; +"21 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand1x1_activation]/relu__0" -> "24 SqueezeNet/Sequential[features]/Fire[7]/cat_0" [label="(1, 128, 3, 3)", style=solid]; +"22 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand3x3]/conv2d_0" -> "23 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand3x3_activation]/relu__0" [label="(1, 128, 3, 3)", style=solid]; +"23 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand3x3_activation]/relu__0" -> "24 SqueezeNet/Sequential[features]/Fire[7]/cat_0" [label="(1, 128, 3, 3)", style=solid]; +"24 SqueezeNet/Sequential[features]/Fire[7]/cat_0" -> "25 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 256, 3, 3)", style=solid]; +"25 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" -> "26 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 48, 3, 3)", style=solid]; +"25 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" -> "28 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 48, 3, 3)", style=solid]; +"26 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand1x1]/conv2d_0" -> "27 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand1x1_activation]/relu__0" [label="(1, 192, 3, 3)", style=solid]; +"27 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand1x1_activation]/relu__0" -> "30 SqueezeNet/Sequential[features]/Fire[9]/cat_0" [label="(1, 192, 3, 3)", style=solid]; +"28 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand3x3]/conv2d_0" -> "29 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand3x3_activation]/relu__0" [label="(1, 192, 3, 3)", style=solid]; +"29 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand3x3_activation]/relu__0" -> "30 SqueezeNet/Sequential[features]/Fire[9]/cat_0" [label="(1, 192, 3, 3)", style=solid]; +"30 SqueezeNet/Sequential[features]/Fire[9]/cat_0" -> "31 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 384, 3, 3)", style=solid]; +"31 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[squeeze]/conv2d_0" -> "32 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" [label="(1, 64, 3, 3)", style=solid]; +"32 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" -> "33 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 64, 3, 3)", style=solid]; +"32 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" -> "35 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 64, 3, 3)", style=solid]; +"33 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand1x1]/conv2d_0" -> "34 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand1x1_activation]/relu__0" [label="(1, 256, 3, 3)", style=solid]; +"34 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand1x1_activation]/relu__0" -> "37 SqueezeNet/Sequential[features]/Fire[10]/cat_0" [label="(1, 256, 3, 3)", style=solid]; +"35 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand3x3]/conv2d_0" -> "36 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand3x3_activation]/relu__0" [label="(1, 256, 3, 3)", style=solid]; +"36 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand3x3_activation]/relu__0" -> "37 SqueezeNet/Sequential[features]/Fire[10]/cat_0" [label="(1, 256, 3, 3)", style=solid]; +"37 SqueezeNet/Sequential[features]/Fire[10]/cat_0" -> "38 SqueezeNet/Sequential[features]/MaxPool2d[11]/max_pool2d_0" [label="(1, 512, 3, 3)", style=solid]; +"38 SqueezeNet/Sequential[features]/MaxPool2d[11]/max_pool2d_0" -> "39 SqueezeNet/Sequential[classifier]/Dropout[0]/dropout_0" [label="(1, 512, 1, 1)", style=solid]; +"39 SqueezeNet/Sequential[classifier]/Dropout[0]/dropout_0" -> "40 SqueezeNet/Sequential[classifier]/NNCFConv2d[1]/conv2d_0" [label="(1, 512, 1, 1)", style=solid]; +"40 SqueezeNet/Sequential[classifier]/NNCFConv2d[1]/conv2d_0" -> "41 SqueezeNet/Sequential[classifier]/ReLU[2]/relu__0" [label="(1, 1000, 1, 1)", style=solid]; +"41 SqueezeNet/Sequential[classifier]/ReLU[2]/relu__0" -> "42 SqueezeNet/Sequential[classifier]/AdaptiveAvgPool2d[3]/adaptive_avg_pool2d_0" [label="(1, 1000, 1, 1)", style=solid]; +"42 SqueezeNet/Sequential[classifier]/AdaptiveAvgPool2d[3]/adaptive_avg_pool2d_0" -> "43 SqueezeNet/view_0" [label="(1, 1000, 1, 1)", style=solid]; +"43 SqueezeNet/view_0" -> "44 /nncf_model_output_0" [label="(1, 1000)", style=solid]; } diff --git a/tests/torch/data/reference_graphs/nas/squeezenet1_0_kernel.dot b/tests/torch/data/reference_graphs/nas/squeezenet1_0_kernel.dot index 17e0a83099e..5e767d0857d 100644 --- a/tests/torch/data/reference_graphs/nas/squeezenet1_0_kernel.dot +++ b/tests/torch/data/reference_graphs/nas/squeezenet1_0_kernel.dot @@ -1,151 +1,145 @@ strict digraph { "0 /nncf_model_input_0" [id=0, label="nncf_model_input_#0", style=filled, type=nncf_model_input]; -"1 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" [id=1, label="linear_#1", style=filled, type=linear]; -"2 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [id=2, label="view_#2", style=filled, type=view]; -"3 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [id=3, label="view_#3", style=filled, type=view]; -"4 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" [color=lightblue, id=4, label="conv2d_OW96_G0_#1", style=filled, type=conv2d]; -"5 SqueezeNet/Sequential[features]/ReLU[1]/relu__0" [id=5, label="relu__IW96_OW96_#2", style=filled, type=relu_]; -"6 SqueezeNet/Sequential[features]/MaxPool2d[2]/max_pool2d_0" [id=6, label="max_pool2d_IW96_OW96_#3", style=filled, type=max_pool2d]; -"7 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=7, label="conv2d_IW96_OW16_G1_#4", style=filled, type=conv2d]; -"8 SqueezeNet/Sequential[features]/Fire[3]/ReLU[squeeze_activation]/relu__0" [id=8, label="relu__IW16_OW16_#5", style=filled, type=relu_]; -"9 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=9, label="conv2d_IW16_OW64_G2_#6", style=filled, type=conv2d]; -"10 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand1x1_activation]/relu__0" [id=10, label="relu__IW64_OW64_#7", style=filled, type=relu_]; -"11 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=11, label="conv2d_IW16_OW64_G3_#8", style=filled, type=conv2d]; -"12 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand3x3_activation]/relu__0" [id=12, label="relu__IW64_OW64_#9", style=filled, type=relu_]; -"13 SqueezeNet/Sequential[features]/Fire[3]/cat_0" [id=13, label="cat_IW[64, 64]_OW128_#10", style=filled, type=cat]; -"14 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=14, label="conv2d_IW128_OW16_G4_#11", style=filled, type=conv2d]; -"15 SqueezeNet/Sequential[features]/Fire[4]/ReLU[squeeze_activation]/relu__0" [id=15, label="relu__IW16_OW16_#12", style=filled, type=relu_]; -"16 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=16, label="conv2d_IW16_OW64_G5_#13", style=filled, type=conv2d]; -"17 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand1x1_activation]/relu__0" [id=17, label="relu__IW64_OW64_#14", style=filled, type=relu_]; -"18 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=18, label="conv2d_IW16_OW64_G6_#15", style=filled, type=conv2d]; -"19 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand3x3_activation]/relu__0" [id=19, label="relu__IW64_OW64_#16", style=filled, type=relu_]; -"20 SqueezeNet/Sequential[features]/Fire[4]/cat_0" [id=20, label="cat_IW[64, 64]_OW128_#17", style=filled, type=cat]; -"21 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=21, label="conv2d_IW128_OW32_G7_#18", style=filled, type=conv2d]; -"22 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" [id=22, label="relu__IW32_OW32_#19", style=filled, type=relu_]; -"23 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=23, label="conv2d_IW32_OW128_G8_#20", style=filled, type=conv2d]; -"24 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand1x1_activation]/relu__0" [id=24, label="relu__IW128_OW128_#21", style=filled, type=relu_]; -"25 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=25, label="conv2d_IW32_OW128_G9_#22", style=filled, type=conv2d]; -"26 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand3x3_activation]/relu__0" [id=26, label="relu__IW128_OW128_#23", style=filled, type=relu_]; -"27 SqueezeNet/Sequential[features]/Fire[5]/cat_0" [id=27, label="cat_IW[128, 128]_OW256_#24", style=filled, type=cat]; -"28 SqueezeNet/Sequential[features]/MaxPool2d[6]/max_pool2d_0" [id=28, label="max_pool2d_IW256_OW256_#25", style=filled, type=max_pool2d]; -"29 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=29, label="conv2d_IW256_OW32_G10_#26", style=filled, type=conv2d]; -"30 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" [id=30, label="relu__IW32_OW32_#27", style=filled, type=relu_]; -"31 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=31, label="conv2d_IW32_OW128_G11_#28", style=filled, type=conv2d]; -"32 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand1x1_activation]/relu__0" [id=32, label="relu__IW128_OW128_#29", style=filled, type=relu_]; -"33 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=33, label="conv2d_IW32_OW128_G12_#30", style=filled, type=conv2d]; -"34 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand3x3_activation]/relu__0" [id=34, label="relu__IW128_OW128_#31", style=filled, type=relu_]; -"35 SqueezeNet/Sequential[features]/Fire[7]/cat_0" [id=35, label="cat_IW[128, 128]_OW256_#32", style=filled, type=cat]; -"36 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=36, label="conv2d_IW256_OW48_G13_#33", style=filled, type=conv2d]; -"37 SqueezeNet/Sequential[features]/Fire[8]/ReLU[squeeze_activation]/relu__0" [id=37, label="relu__IW48_OW48_#34", style=filled, type=relu_]; -"38 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=38, label="conv2d_IW48_OW192_G14_#35", style=filled, type=conv2d]; -"39 SqueezeNet/Sequential[features]/Fire[8]/ReLU[expand1x1_activation]/relu__0" [id=39, label="relu__IW192_OW192_#36", style=filled, type=relu_]; -"40 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=40, label="conv2d_IW48_OW192_G15_#37", style=filled, type=conv2d]; -"41 SqueezeNet/Sequential[features]/Fire[8]/ReLU[expand3x3_activation]/relu__0" [id=41, label="relu__IW192_OW192_#38", style=filled, type=relu_]; -"42 SqueezeNet/Sequential[features]/Fire[8]/cat_0" [id=42, label="cat_IW[192, 192]_OW384_#39", style=filled, type=cat]; -"43 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=43, label="conv2d_IW384_OW48_G16_#40", style=filled, type=conv2d]; -"44 SqueezeNet/Sequential[features]/Fire[9]/ReLU[squeeze_activation]/relu__0" [id=44, label="relu__IW48_OW48_#41", style=filled, type=relu_]; -"45 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=45, label="conv2d_IW48_OW192_G17_#42", style=filled, type=conv2d]; -"46 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand1x1_activation]/relu__0" [id=46, label="relu__IW192_OW192_#43", style=filled, type=relu_]; -"47 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=47, label="conv2d_IW48_OW192_G18_#44", style=filled, type=conv2d]; -"48 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand3x3_activation]/relu__0" [id=48, label="relu__IW192_OW192_#45", style=filled, type=relu_]; -"49 SqueezeNet/Sequential[features]/Fire[9]/cat_0" [id=49, label="cat_IW[192, 192]_OW384_#46", style=filled, type=cat]; -"50 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=50, label="conv2d_IW384_OW64_G19_#47", style=filled, type=conv2d]; -"51 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" [id=51, label="relu__IW64_OW64_#48", style=filled, type=relu_]; -"52 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=52, label="conv2d_IW64_OW256_G20_#49", style=filled, type=conv2d]; -"53 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand1x1_activation]/relu__0" [id=53, label="relu__IW256_OW256_#50", style=filled, type=relu_]; -"54 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=54, label="conv2d_IW64_OW256_G21_#51", style=filled, type=conv2d]; -"55 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand3x3_activation]/relu__0" [id=55, label="relu__IW256_OW256_#52", style=filled, type=relu_]; -"56 SqueezeNet/Sequential[features]/Fire[10]/cat_0" [id=56, label="cat_IW[256, 256]_OW512_#53", style=filled, type=cat]; -"57 SqueezeNet/Sequential[features]/MaxPool2d[11]/max_pool2d_0" [id=57, label="max_pool2d_IW512_OW512_#54", style=filled, type=max_pool2d]; -"58 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=58, label="conv2d_IW512_OW64_G22_#55", style=filled, type=conv2d]; -"59 SqueezeNet/Sequential[features]/Fire[12]/ReLU[squeeze_activation]/relu__0" [id=59, label="relu__IW64_OW64_#56", style=filled, type=relu_]; -"60 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=60, label="conv2d_IW64_OW256_G23_#57", style=filled, type=conv2d]; -"61 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand1x1_activation]/relu__0" [id=61, label="relu__IW256_OW256_#58", style=filled, type=relu_]; -"62 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=62, label="conv2d_IW64_OW256_G24_#59", style=filled, type=conv2d]; -"63 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand3x3_activation]/relu__0" [id=63, label="relu__IW256_OW256_#60", style=filled, type=relu_]; -"64 SqueezeNet/Sequential[features]/Fire[12]/cat_0" [id=64, label="cat_IW[256, 256]_OW512_#61", style=filled, type=cat]; -"65 SqueezeNet/Sequential[classifier]/Dropout[0]/dropout_0" [id=65, label="dropout_IW512_OW512_#62", style=filled, type=dropout]; -"66 SqueezeNet/Sequential[classifier]/NNCFConv2d[1]/conv2d_0" [color=lightblue, id=66, label="conv2d_IW512_#63", style=filled, type=conv2d]; -"67 SqueezeNet/Sequential[classifier]/ReLU[2]/relu__0" [id=67, label="relu__#64", style=filled, type=relu_]; -"68 SqueezeNet/Sequential[classifier]/AdaptiveAvgPool2d[3]/adaptive_avg_pool2d_0" [id=68, label="adaptive_avg_pool2d_#65", style=filled, type=adaptive_avg_pool2d]; -"69 SqueezeNet/view_0" [id=69, label="view_#66", style=filled, type=view]; -"70 /nncf_model_output_0" [id=70, label="nncf_model_output_#67", style=filled, type=nncf_model_output]; -"0 /nncf_model_input_0" -> "4 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" [label="(1, 3, 32, 32)", style=solid]; -"1 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" -> "2 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [label="(288, 25)", style=solid]; -"2 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" -> "3 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [label="(96, 3, 25)", style=solid]; -"3 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" -> "4 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" [label="(96, 3, 5, 5)", style=solid]; -"4 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" -> "5 SqueezeNet/Sequential[features]/ReLU[1]/relu__0" [label="(1, 96, 16, 16)", style=solid]; -"5 SqueezeNet/Sequential[features]/ReLU[1]/relu__0" -> "6 SqueezeNet/Sequential[features]/MaxPool2d[2]/max_pool2d_0" [label="(1, 96, 16, 16)", style=solid]; -"6 SqueezeNet/Sequential[features]/MaxPool2d[2]/max_pool2d_0" -> "7 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 96, 8, 8)", style=solid]; -"7 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" -> "8 SqueezeNet/Sequential[features]/Fire[3]/ReLU[squeeze_activation]/relu__0" [label="(1, 16, 8, 8)", style=solid]; -"8 SqueezeNet/Sequential[features]/Fire[3]/ReLU[squeeze_activation]/relu__0" -> "9 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 16, 8, 8)", style=solid]; -"8 SqueezeNet/Sequential[features]/Fire[3]/ReLU[squeeze_activation]/relu__0" -> "11 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 16, 8, 8)", style=solid]; -"9 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand1x1]/conv2d_0" -> "10 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand1x1_activation]/relu__0" [label="(1, 64, 8, 8)", style=solid]; -"10 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand1x1_activation]/relu__0" -> "13 SqueezeNet/Sequential[features]/Fire[3]/cat_0" [label="(1, 64, 8, 8)", style=solid]; -"11 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand3x3]/conv2d_0" -> "12 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand3x3_activation]/relu__0" [label="(1, 64, 8, 8)", style=solid]; -"12 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand3x3_activation]/relu__0" -> "13 SqueezeNet/Sequential[features]/Fire[3]/cat_0" [label="(1, 64, 8, 8)", style=solid]; -"13 SqueezeNet/Sequential[features]/Fire[3]/cat_0" -> "14 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 128, 8, 8)", style=solid]; -"14 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[squeeze]/conv2d_0" -> "15 SqueezeNet/Sequential[features]/Fire[4]/ReLU[squeeze_activation]/relu__0" [label="(1, 16, 8, 8)", style=solid]; -"15 SqueezeNet/Sequential[features]/Fire[4]/ReLU[squeeze_activation]/relu__0" -> "16 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 16, 8, 8)", style=solid]; -"15 SqueezeNet/Sequential[features]/Fire[4]/ReLU[squeeze_activation]/relu__0" -> "18 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 16, 8, 8)", style=solid]; -"16 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand1x1]/conv2d_0" -> "17 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand1x1_activation]/relu__0" [label="(1, 64, 8, 8)", style=solid]; -"17 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand1x1_activation]/relu__0" -> "20 SqueezeNet/Sequential[features]/Fire[4]/cat_0" [label="(1, 64, 8, 8)", style=solid]; -"18 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand3x3]/conv2d_0" -> "19 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand3x3_activation]/relu__0" [label="(1, 64, 8, 8)", style=solid]; -"19 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand3x3_activation]/relu__0" -> "20 SqueezeNet/Sequential[features]/Fire[4]/cat_0" [label="(1, 64, 8, 8)", style=solid]; -"20 SqueezeNet/Sequential[features]/Fire[4]/cat_0" -> "21 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 128, 8, 8)", style=solid]; -"21 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[squeeze]/conv2d_0" -> "22 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" [label="(1, 32, 8, 8)", style=solid]; -"22 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" -> "23 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 32, 8, 8)", style=solid]; -"22 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" -> "25 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 32, 8, 8)", style=solid]; -"23 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand1x1]/conv2d_0" -> "24 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand1x1_activation]/relu__0" [label="(1, 128, 8, 8)", style=solid]; -"24 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand1x1_activation]/relu__0" -> "27 SqueezeNet/Sequential[features]/Fire[5]/cat_0" [label="(1, 128, 8, 8)", style=solid]; -"25 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand3x3]/conv2d_0" -> "26 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand3x3_activation]/relu__0" [label="(1, 128, 8, 8)", style=solid]; -"26 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand3x3_activation]/relu__0" -> "27 SqueezeNet/Sequential[features]/Fire[5]/cat_0" [label="(1, 128, 8, 8)", style=solid]; -"27 SqueezeNet/Sequential[features]/Fire[5]/cat_0" -> "28 SqueezeNet/Sequential[features]/MaxPool2d[6]/max_pool2d_0" [label="(1, 256, 8, 8)", style=solid]; -"28 SqueezeNet/Sequential[features]/MaxPool2d[6]/max_pool2d_0" -> "29 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 256, 4, 4)", style=solid]; -"29 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[squeeze]/conv2d_0" -> "30 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" [label="(1, 32, 4, 4)", style=solid]; -"30 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" -> "31 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 32, 4, 4)", style=solid]; -"30 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" -> "33 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 32, 4, 4)", style=solid]; -"31 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand1x1]/conv2d_0" -> "32 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand1x1_activation]/relu__0" [label="(1, 128, 4, 4)", style=solid]; -"32 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand1x1_activation]/relu__0" -> "35 SqueezeNet/Sequential[features]/Fire[7]/cat_0" [label="(1, 128, 4, 4)", style=solid]; -"33 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand3x3]/conv2d_0" -> "34 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand3x3_activation]/relu__0" [label="(1, 128, 4, 4)", style=solid]; -"34 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand3x3_activation]/relu__0" -> "35 SqueezeNet/Sequential[features]/Fire[7]/cat_0" [label="(1, 128, 4, 4)", style=solid]; -"35 SqueezeNet/Sequential[features]/Fire[7]/cat_0" -> "36 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 256, 4, 4)", style=solid]; -"36 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" -> "37 SqueezeNet/Sequential[features]/Fire[8]/ReLU[squeeze_activation]/relu__0" [label="(1, 48, 4, 4)", style=solid]; -"37 SqueezeNet/Sequential[features]/Fire[8]/ReLU[squeeze_activation]/relu__0" -> "38 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 48, 4, 4)", style=solid]; -"37 SqueezeNet/Sequential[features]/Fire[8]/ReLU[squeeze_activation]/relu__0" -> "40 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 48, 4, 4)", style=solid]; -"38 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[expand1x1]/conv2d_0" -> "39 SqueezeNet/Sequential[features]/Fire[8]/ReLU[expand1x1_activation]/relu__0" [label="(1, 192, 4, 4)", style=solid]; -"39 SqueezeNet/Sequential[features]/Fire[8]/ReLU[expand1x1_activation]/relu__0" -> "42 SqueezeNet/Sequential[features]/Fire[8]/cat_0" [label="(1, 192, 4, 4)", style=solid]; -"40 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[expand3x3]/conv2d_0" -> "41 SqueezeNet/Sequential[features]/Fire[8]/ReLU[expand3x3_activation]/relu__0" [label="(1, 192, 4, 4)", style=solid]; -"41 SqueezeNet/Sequential[features]/Fire[8]/ReLU[expand3x3_activation]/relu__0" -> "42 SqueezeNet/Sequential[features]/Fire[8]/cat_0" [label="(1, 192, 4, 4)", style=solid]; -"42 SqueezeNet/Sequential[features]/Fire[8]/cat_0" -> "43 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 384, 4, 4)", style=solid]; -"43 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[squeeze]/conv2d_0" -> "44 SqueezeNet/Sequential[features]/Fire[9]/ReLU[squeeze_activation]/relu__0" [label="(1, 48, 4, 4)", style=solid]; -"44 SqueezeNet/Sequential[features]/Fire[9]/ReLU[squeeze_activation]/relu__0" -> "45 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 48, 4, 4)", style=solid]; -"44 SqueezeNet/Sequential[features]/Fire[9]/ReLU[squeeze_activation]/relu__0" -> "47 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 48, 4, 4)", style=solid]; -"45 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand1x1]/conv2d_0" -> "46 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand1x1_activation]/relu__0" [label="(1, 192, 4, 4)", style=solid]; -"46 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand1x1_activation]/relu__0" -> "49 SqueezeNet/Sequential[features]/Fire[9]/cat_0" [label="(1, 192, 4, 4)", style=solid]; -"47 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand3x3]/conv2d_0" -> "48 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand3x3_activation]/relu__0" [label="(1, 192, 4, 4)", style=solid]; -"48 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand3x3_activation]/relu__0" -> "49 SqueezeNet/Sequential[features]/Fire[9]/cat_0" [label="(1, 192, 4, 4)", style=solid]; -"49 SqueezeNet/Sequential[features]/Fire[9]/cat_0" -> "50 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 384, 4, 4)", style=solid]; -"50 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[squeeze]/conv2d_0" -> "51 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" [label="(1, 64, 4, 4)", style=solid]; -"51 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" -> "52 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 64, 4, 4)", style=solid]; -"51 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" -> "54 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 64, 4, 4)", style=solid]; -"52 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand1x1]/conv2d_0" -> "53 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand1x1_activation]/relu__0" [label="(1, 256, 4, 4)", style=solid]; -"53 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand1x1_activation]/relu__0" -> "56 SqueezeNet/Sequential[features]/Fire[10]/cat_0" [label="(1, 256, 4, 4)", style=solid]; -"54 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand3x3]/conv2d_0" -> "55 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand3x3_activation]/relu__0" [label="(1, 256, 4, 4)", style=solid]; -"55 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand3x3_activation]/relu__0" -> "56 SqueezeNet/Sequential[features]/Fire[10]/cat_0" [label="(1, 256, 4, 4)", style=solid]; -"56 SqueezeNet/Sequential[features]/Fire[10]/cat_0" -> "57 SqueezeNet/Sequential[features]/MaxPool2d[11]/max_pool2d_0" [label="(1, 512, 4, 4)", style=solid]; -"57 SqueezeNet/Sequential[features]/MaxPool2d[11]/max_pool2d_0" -> "58 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 512, 2, 2)", style=solid]; -"58 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[squeeze]/conv2d_0" -> "59 SqueezeNet/Sequential[features]/Fire[12]/ReLU[squeeze_activation]/relu__0" [label="(1, 64, 2, 2)", style=solid]; -"59 SqueezeNet/Sequential[features]/Fire[12]/ReLU[squeeze_activation]/relu__0" -> "60 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 64, 2, 2)", style=solid]; -"59 SqueezeNet/Sequential[features]/Fire[12]/ReLU[squeeze_activation]/relu__0" -> "62 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 64, 2, 2)", style=solid]; -"60 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand1x1]/conv2d_0" -> "61 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand1x1_activation]/relu__0" [label="(1, 256, 2, 2)", style=solid]; -"61 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand1x1_activation]/relu__0" -> "64 SqueezeNet/Sequential[features]/Fire[12]/cat_0" [label="(1, 256, 2, 2)", style=solid]; -"62 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand3x3]/conv2d_0" -> "63 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand3x3_activation]/relu__0" [label="(1, 256, 2, 2)", style=solid]; -"63 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand3x3_activation]/relu__0" -> "64 SqueezeNet/Sequential[features]/Fire[12]/cat_0" [label="(1, 256, 2, 2)", style=solid]; -"64 SqueezeNet/Sequential[features]/Fire[12]/cat_0" -> "65 SqueezeNet/Sequential[classifier]/Dropout[0]/dropout_0" [label="(1, 512, 2, 2)", style=solid]; -"65 SqueezeNet/Sequential[classifier]/Dropout[0]/dropout_0" -> "66 SqueezeNet/Sequential[classifier]/NNCFConv2d[1]/conv2d_0" [label="(1, 512, 2, 2)", style=solid]; -"66 SqueezeNet/Sequential[classifier]/NNCFConv2d[1]/conv2d_0" -> "67 SqueezeNet/Sequential[classifier]/ReLU[2]/relu__0" [label="(1, 1000, 2, 2)", style=solid]; -"67 SqueezeNet/Sequential[classifier]/ReLU[2]/relu__0" -> "68 SqueezeNet/Sequential[classifier]/AdaptiveAvgPool2d[3]/adaptive_avg_pool2d_0" [label="(1, 1000, 2, 2)", style=solid]; -"68 SqueezeNet/Sequential[classifier]/AdaptiveAvgPool2d[3]/adaptive_avg_pool2d_0" -> "69 SqueezeNet/view_0" [label="(1, 1000, 1, 1)", style=solid]; -"69 SqueezeNet/view_0" -> "70 /nncf_model_output_0" [label="(1, 1000)", style=solid]; +"1 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" [color=lightblue, id=1, label="conv2d_OW96_G0_#1", style=filled, type=conv2d]; +"2 SqueezeNet/Sequential[features]/ReLU[1]/relu__0" [id=2, label="relu__IW96_OW96_#2", style=filled, type=relu_]; +"3 SqueezeNet/Sequential[features]/MaxPool2d[2]/max_pool2d_0" [id=3, label="max_pool2d_IW96_OW96_#3", style=filled, type=max_pool2d]; +"4 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=4, label="conv2d_IW96_OW16_G1_#4", style=filled, type=conv2d]; +"5 SqueezeNet/Sequential[features]/Fire[3]/ReLU[squeeze_activation]/relu__0" [id=5, label="relu__IW16_OW16_#5", style=filled, type=relu_]; +"6 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=6, label="conv2d_IW16_OW64_G2_#6", style=filled, type=conv2d]; +"7 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand1x1_activation]/relu__0" [id=7, label="relu__IW64_OW64_#7", style=filled, type=relu_]; +"8 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=8, label="conv2d_IW16_OW64_G3_#8", style=filled, type=conv2d]; +"9 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand3x3_activation]/relu__0" [id=9, label="relu__IW64_OW64_#9", style=filled, type=relu_]; +"10 SqueezeNet/Sequential[features]/Fire[3]/cat_0" [id=10, label="cat_IW[64, 64]_OW128_#10", style=filled, type=cat]; +"11 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=11, label="conv2d_IW128_OW16_G4_#11", style=filled, type=conv2d]; +"12 SqueezeNet/Sequential[features]/Fire[4]/ReLU[squeeze_activation]/relu__0" [id=12, label="relu__IW16_OW16_#12", style=filled, type=relu_]; +"13 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=13, label="conv2d_IW16_OW64_G5_#13", style=filled, type=conv2d]; +"14 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand1x1_activation]/relu__0" [id=14, label="relu__IW64_OW64_#14", style=filled, type=relu_]; +"15 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=15, label="conv2d_IW16_OW64_G6_#15", style=filled, type=conv2d]; +"16 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand3x3_activation]/relu__0" [id=16, label="relu__IW64_OW64_#16", style=filled, type=relu_]; +"17 SqueezeNet/Sequential[features]/Fire[4]/cat_0" [id=17, label="cat_IW[64, 64]_OW128_#17", style=filled, type=cat]; +"18 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=18, label="conv2d_IW128_OW32_G7_#18", style=filled, type=conv2d]; +"19 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" [id=19, label="relu__IW32_OW32_#19", style=filled, type=relu_]; +"20 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=20, label="conv2d_IW32_OW128_G8_#20", style=filled, type=conv2d]; +"21 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand1x1_activation]/relu__0" [id=21, label="relu__IW128_OW128_#21", style=filled, type=relu_]; +"22 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=22, label="conv2d_IW32_OW128_G9_#22", style=filled, type=conv2d]; +"23 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand3x3_activation]/relu__0" [id=23, label="relu__IW128_OW128_#23", style=filled, type=relu_]; +"24 SqueezeNet/Sequential[features]/Fire[5]/cat_0" [id=24, label="cat_IW[128, 128]_OW256_#24", style=filled, type=cat]; +"25 SqueezeNet/Sequential[features]/MaxPool2d[6]/max_pool2d_0" [id=25, label="max_pool2d_IW256_OW256_#25", style=filled, type=max_pool2d]; +"26 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=26, label="conv2d_IW256_OW32_G10_#26", style=filled, type=conv2d]; +"27 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" [id=27, label="relu__IW32_OW32_#27", style=filled, type=relu_]; +"28 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=28, label="conv2d_IW32_OW128_G11_#28", style=filled, type=conv2d]; +"29 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand1x1_activation]/relu__0" [id=29, label="relu__IW128_OW128_#29", style=filled, type=relu_]; +"30 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=30, label="conv2d_IW32_OW128_G12_#30", style=filled, type=conv2d]; +"31 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand3x3_activation]/relu__0" [id=31, label="relu__IW128_OW128_#31", style=filled, type=relu_]; +"32 SqueezeNet/Sequential[features]/Fire[7]/cat_0" [id=32, label="cat_IW[128, 128]_OW256_#32", style=filled, type=cat]; +"33 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=33, label="conv2d_IW256_OW48_G13_#33", style=filled, type=conv2d]; +"34 SqueezeNet/Sequential[features]/Fire[8]/ReLU[squeeze_activation]/relu__0" [id=34, label="relu__IW48_OW48_#34", style=filled, type=relu_]; +"35 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=35, label="conv2d_IW48_OW192_G14_#35", style=filled, type=conv2d]; +"36 SqueezeNet/Sequential[features]/Fire[8]/ReLU[expand1x1_activation]/relu__0" [id=36, label="relu__IW192_OW192_#36", style=filled, type=relu_]; +"37 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=37, label="conv2d_IW48_OW192_G15_#37", style=filled, type=conv2d]; +"38 SqueezeNet/Sequential[features]/Fire[8]/ReLU[expand3x3_activation]/relu__0" [id=38, label="relu__IW192_OW192_#38", style=filled, type=relu_]; +"39 SqueezeNet/Sequential[features]/Fire[8]/cat_0" [id=39, label="cat_IW[192, 192]_OW384_#39", style=filled, type=cat]; +"40 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=40, label="conv2d_IW384_OW48_G16_#40", style=filled, type=conv2d]; +"41 SqueezeNet/Sequential[features]/Fire[9]/ReLU[squeeze_activation]/relu__0" [id=41, label="relu__IW48_OW48_#41", style=filled, type=relu_]; +"42 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=42, label="conv2d_IW48_OW192_G17_#42", style=filled, type=conv2d]; +"43 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand1x1_activation]/relu__0" [id=43, label="relu__IW192_OW192_#43", style=filled, type=relu_]; +"44 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=44, label="conv2d_IW48_OW192_G18_#44", style=filled, type=conv2d]; +"45 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand3x3_activation]/relu__0" [id=45, label="relu__IW192_OW192_#45", style=filled, type=relu_]; +"46 SqueezeNet/Sequential[features]/Fire[9]/cat_0" [id=46, label="cat_IW[192, 192]_OW384_#46", style=filled, type=cat]; +"47 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=47, label="conv2d_IW384_OW64_G19_#47", style=filled, type=conv2d]; +"48 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" [id=48, label="relu__IW64_OW64_#48", style=filled, type=relu_]; +"49 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=49, label="conv2d_IW64_OW256_G20_#49", style=filled, type=conv2d]; +"50 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand1x1_activation]/relu__0" [id=50, label="relu__IW256_OW256_#50", style=filled, type=relu_]; +"51 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=51, label="conv2d_IW64_OW256_G21_#51", style=filled, type=conv2d]; +"52 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand3x3_activation]/relu__0" [id=52, label="relu__IW256_OW256_#52", style=filled, type=relu_]; +"53 SqueezeNet/Sequential[features]/Fire[10]/cat_0" [id=53, label="cat_IW[256, 256]_OW512_#53", style=filled, type=cat]; +"54 SqueezeNet/Sequential[features]/MaxPool2d[11]/max_pool2d_0" [id=54, label="max_pool2d_IW512_OW512_#54", style=filled, type=max_pool2d]; +"55 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=55, label="conv2d_IW512_OW64_G22_#55", style=filled, type=conv2d]; +"56 SqueezeNet/Sequential[features]/Fire[12]/ReLU[squeeze_activation]/relu__0" [id=56, label="relu__IW64_OW64_#56", style=filled, type=relu_]; +"57 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=57, label="conv2d_IW64_OW256_G23_#57", style=filled, type=conv2d]; +"58 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand1x1_activation]/relu__0" [id=58, label="relu__IW256_OW256_#58", style=filled, type=relu_]; +"59 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=59, label="conv2d_IW64_OW256_G24_#59", style=filled, type=conv2d]; +"60 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand3x3_activation]/relu__0" [id=60, label="relu__IW256_OW256_#60", style=filled, type=relu_]; +"61 SqueezeNet/Sequential[features]/Fire[12]/cat_0" [id=61, label="cat_IW[256, 256]_OW512_#61", style=filled, type=cat]; +"62 SqueezeNet/Sequential[classifier]/Dropout[0]/dropout_0" [id=62, label="dropout_IW512_OW512_#62", style=filled, type=dropout]; +"63 SqueezeNet/Sequential[classifier]/NNCFConv2d[1]/conv2d_0" [color=lightblue, id=63, label="conv2d_IW512_#63", style=filled, type=conv2d]; +"64 SqueezeNet/Sequential[classifier]/ReLU[2]/relu__0" [id=64, label="relu__#64", style=filled, type=relu_]; +"65 SqueezeNet/Sequential[classifier]/AdaptiveAvgPool2d[3]/adaptive_avg_pool2d_0" [id=65, label="adaptive_avg_pool2d_#65", style=filled, type=adaptive_avg_pool2d]; +"66 SqueezeNet/view_0" [id=66, label="view_#66", style=filled, type=view]; +"67 /nncf_model_output_0" [id=67, label="nncf_model_output_#67", style=filled, type=nncf_model_output]; +"0 /nncf_model_input_0" -> "1 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" [label="(1, 3, 32, 32)", style=solid]; +"1 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" -> "2 SqueezeNet/Sequential[features]/ReLU[1]/relu__0" [label="(1, 96, 13, 13)", style=solid]; +"2 SqueezeNet/Sequential[features]/ReLU[1]/relu__0" -> "3 SqueezeNet/Sequential[features]/MaxPool2d[2]/max_pool2d_0" [label="(1, 96, 13, 13)", style=solid]; +"3 SqueezeNet/Sequential[features]/MaxPool2d[2]/max_pool2d_0" -> "4 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 96, 6, 6)", style=solid]; +"4 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" -> "5 SqueezeNet/Sequential[features]/Fire[3]/ReLU[squeeze_activation]/relu__0" [label="(1, 16, 6, 6)", style=solid]; +"5 SqueezeNet/Sequential[features]/Fire[3]/ReLU[squeeze_activation]/relu__0" -> "6 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 16, 6, 6)", style=solid]; +"5 SqueezeNet/Sequential[features]/Fire[3]/ReLU[squeeze_activation]/relu__0" -> "8 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 16, 6, 6)", style=solid]; +"6 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand1x1]/conv2d_0" -> "7 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand1x1_activation]/relu__0" [label="(1, 64, 6, 6)", style=solid]; +"7 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand1x1_activation]/relu__0" -> "10 SqueezeNet/Sequential[features]/Fire[3]/cat_0" [label="(1, 64, 6, 6)", style=solid]; +"8 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand3x3]/conv2d_0" -> "9 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand3x3_activation]/relu__0" [label="(1, 64, 6, 6)", style=solid]; +"9 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand3x3_activation]/relu__0" -> "10 SqueezeNet/Sequential[features]/Fire[3]/cat_0" [label="(1, 64, 6, 6)", style=solid]; +"10 SqueezeNet/Sequential[features]/Fire[3]/cat_0" -> "11 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 128, 6, 6)", style=solid]; +"11 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[squeeze]/conv2d_0" -> "12 SqueezeNet/Sequential[features]/Fire[4]/ReLU[squeeze_activation]/relu__0" [label="(1, 16, 6, 6)", style=solid]; +"12 SqueezeNet/Sequential[features]/Fire[4]/ReLU[squeeze_activation]/relu__0" -> "13 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 16, 6, 6)", style=solid]; +"12 SqueezeNet/Sequential[features]/Fire[4]/ReLU[squeeze_activation]/relu__0" -> "15 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 16, 6, 6)", style=solid]; +"13 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand1x1]/conv2d_0" -> "14 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand1x1_activation]/relu__0" [label="(1, 64, 6, 6)", style=solid]; +"14 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand1x1_activation]/relu__0" -> "17 SqueezeNet/Sequential[features]/Fire[4]/cat_0" [label="(1, 64, 6, 6)", style=solid]; +"15 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand3x3]/conv2d_0" -> "16 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand3x3_activation]/relu__0" [label="(1, 64, 6, 6)", style=solid]; +"16 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand3x3_activation]/relu__0" -> "17 SqueezeNet/Sequential[features]/Fire[4]/cat_0" [label="(1, 64, 6, 6)", style=solid]; +"17 SqueezeNet/Sequential[features]/Fire[4]/cat_0" -> "18 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 128, 6, 6)", style=solid]; +"18 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[squeeze]/conv2d_0" -> "19 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" [label="(1, 32, 6, 6)", style=solid]; +"19 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" -> "20 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 32, 6, 6)", style=solid]; +"19 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" -> "22 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 32, 6, 6)", style=solid]; +"20 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand1x1]/conv2d_0" -> "21 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand1x1_activation]/relu__0" [label="(1, 128, 6, 6)", style=solid]; +"21 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand1x1_activation]/relu__0" -> "24 SqueezeNet/Sequential[features]/Fire[5]/cat_0" [label="(1, 128, 6, 6)", style=solid]; +"22 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand3x3]/conv2d_0" -> "23 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand3x3_activation]/relu__0" [label="(1, 128, 6, 6)", style=solid]; +"23 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand3x3_activation]/relu__0" -> "24 SqueezeNet/Sequential[features]/Fire[5]/cat_0" [label="(1, 128, 6, 6)", style=solid]; +"24 SqueezeNet/Sequential[features]/Fire[5]/cat_0" -> "25 SqueezeNet/Sequential[features]/MaxPool2d[6]/max_pool2d_0" [label="(1, 256, 6, 6)", style=solid]; +"25 SqueezeNet/Sequential[features]/MaxPool2d[6]/max_pool2d_0" -> "26 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 256, 3, 3)", style=solid]; +"26 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[squeeze]/conv2d_0" -> "27 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" [label="(1, 32, 3, 3)", style=solid]; +"27 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" -> "28 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 32, 3, 3)", style=solid]; +"27 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" -> "30 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 32, 3, 3)", style=solid]; +"28 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand1x1]/conv2d_0" -> "29 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand1x1_activation]/relu__0" [label="(1, 128, 3, 3)", style=solid]; +"29 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand1x1_activation]/relu__0" -> "32 SqueezeNet/Sequential[features]/Fire[7]/cat_0" [label="(1, 128, 3, 3)", style=solid]; +"30 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand3x3]/conv2d_0" -> "31 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand3x3_activation]/relu__0" [label="(1, 128, 3, 3)", style=solid]; +"31 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand3x3_activation]/relu__0" -> "32 SqueezeNet/Sequential[features]/Fire[7]/cat_0" [label="(1, 128, 3, 3)", style=solid]; +"32 SqueezeNet/Sequential[features]/Fire[7]/cat_0" -> "33 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 256, 3, 3)", style=solid]; +"33 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" -> "34 SqueezeNet/Sequential[features]/Fire[8]/ReLU[squeeze_activation]/relu__0" [label="(1, 48, 3, 3)", style=solid]; +"34 SqueezeNet/Sequential[features]/Fire[8]/ReLU[squeeze_activation]/relu__0" -> "35 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 48, 3, 3)", style=solid]; +"34 SqueezeNet/Sequential[features]/Fire[8]/ReLU[squeeze_activation]/relu__0" -> "37 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 48, 3, 3)", style=solid]; +"35 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[expand1x1]/conv2d_0" -> "36 SqueezeNet/Sequential[features]/Fire[8]/ReLU[expand1x1_activation]/relu__0" [label="(1, 192, 3, 3)", style=solid]; +"36 SqueezeNet/Sequential[features]/Fire[8]/ReLU[expand1x1_activation]/relu__0" -> "39 SqueezeNet/Sequential[features]/Fire[8]/cat_0" [label="(1, 192, 3, 3)", style=solid]; +"37 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[expand3x3]/conv2d_0" -> "38 SqueezeNet/Sequential[features]/Fire[8]/ReLU[expand3x3_activation]/relu__0" [label="(1, 192, 3, 3)", style=solid]; +"38 SqueezeNet/Sequential[features]/Fire[8]/ReLU[expand3x3_activation]/relu__0" -> "39 SqueezeNet/Sequential[features]/Fire[8]/cat_0" [label="(1, 192, 3, 3)", style=solid]; +"39 SqueezeNet/Sequential[features]/Fire[8]/cat_0" -> "40 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 384, 3, 3)", style=solid]; +"40 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[squeeze]/conv2d_0" -> "41 SqueezeNet/Sequential[features]/Fire[9]/ReLU[squeeze_activation]/relu__0" [label="(1, 48, 3, 3)", style=solid]; +"41 SqueezeNet/Sequential[features]/Fire[9]/ReLU[squeeze_activation]/relu__0" -> "42 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 48, 3, 3)", style=solid]; +"41 SqueezeNet/Sequential[features]/Fire[9]/ReLU[squeeze_activation]/relu__0" -> "44 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 48, 3, 3)", style=solid]; +"42 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand1x1]/conv2d_0" -> "43 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand1x1_activation]/relu__0" [label="(1, 192, 3, 3)", style=solid]; +"43 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand1x1_activation]/relu__0" -> "46 SqueezeNet/Sequential[features]/Fire[9]/cat_0" [label="(1, 192, 3, 3)", style=solid]; +"44 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand3x3]/conv2d_0" -> "45 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand3x3_activation]/relu__0" [label="(1, 192, 3, 3)", style=solid]; +"45 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand3x3_activation]/relu__0" -> "46 SqueezeNet/Sequential[features]/Fire[9]/cat_0" [label="(1, 192, 3, 3)", style=solid]; +"46 SqueezeNet/Sequential[features]/Fire[9]/cat_0" -> "47 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 384, 3, 3)", style=solid]; +"47 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[squeeze]/conv2d_0" -> "48 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" [label="(1, 64, 3, 3)", style=solid]; +"48 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" -> "49 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 64, 3, 3)", style=solid]; +"48 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" -> "51 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 64, 3, 3)", style=solid]; +"49 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand1x1]/conv2d_0" -> "50 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand1x1_activation]/relu__0" [label="(1, 256, 3, 3)", style=solid]; +"50 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand1x1_activation]/relu__0" -> "53 SqueezeNet/Sequential[features]/Fire[10]/cat_0" [label="(1, 256, 3, 3)", style=solid]; +"51 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand3x3]/conv2d_0" -> "52 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand3x3_activation]/relu__0" [label="(1, 256, 3, 3)", style=solid]; +"52 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand3x3_activation]/relu__0" -> "53 SqueezeNet/Sequential[features]/Fire[10]/cat_0" [label="(1, 256, 3, 3)", style=solid]; +"53 SqueezeNet/Sequential[features]/Fire[10]/cat_0" -> "54 SqueezeNet/Sequential[features]/MaxPool2d[11]/max_pool2d_0" [label="(1, 512, 3, 3)", style=solid]; +"54 SqueezeNet/Sequential[features]/MaxPool2d[11]/max_pool2d_0" -> "55 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 512, 1, 1)", style=solid]; +"55 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[squeeze]/conv2d_0" -> "56 SqueezeNet/Sequential[features]/Fire[12]/ReLU[squeeze_activation]/relu__0" [label="(1, 64, 1, 1)", style=solid]; +"56 SqueezeNet/Sequential[features]/Fire[12]/ReLU[squeeze_activation]/relu__0" -> "57 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 64, 1, 1)", style=solid]; +"56 SqueezeNet/Sequential[features]/Fire[12]/ReLU[squeeze_activation]/relu__0" -> "59 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 64, 1, 1)", style=solid]; +"57 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand1x1]/conv2d_0" -> "58 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand1x1_activation]/relu__0" [label="(1, 256, 1, 1)", style=solid]; +"58 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand1x1_activation]/relu__0" -> "61 SqueezeNet/Sequential[features]/Fire[12]/cat_0" [label="(1, 256, 1, 1)", style=solid]; +"59 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand3x3]/conv2d_0" -> "60 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand3x3_activation]/relu__0" [label="(1, 256, 1, 1)", style=solid]; +"60 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand3x3_activation]/relu__0" -> "61 SqueezeNet/Sequential[features]/Fire[12]/cat_0" [label="(1, 256, 1, 1)", style=solid]; +"61 SqueezeNet/Sequential[features]/Fire[12]/cat_0" -> "62 SqueezeNet/Sequential[classifier]/Dropout[0]/dropout_0" [label="(1, 512, 1, 1)", style=solid]; +"62 SqueezeNet/Sequential[classifier]/Dropout[0]/dropout_0" -> "63 SqueezeNet/Sequential[classifier]/NNCFConv2d[1]/conv2d_0" [label="(1, 512, 1, 1)", style=solid]; +"63 SqueezeNet/Sequential[classifier]/NNCFConv2d[1]/conv2d_0" -> "64 SqueezeNet/Sequential[classifier]/ReLU[2]/relu__0" [label="(1, 1000, 1, 1)", style=solid]; +"64 SqueezeNet/Sequential[classifier]/ReLU[2]/relu__0" -> "65 SqueezeNet/Sequential[classifier]/AdaptiveAvgPool2d[3]/adaptive_avg_pool2d_0" [label="(1, 1000, 1, 1)", style=solid]; +"65 SqueezeNet/Sequential[classifier]/AdaptiveAvgPool2d[3]/adaptive_avg_pool2d_0" -> "66 SqueezeNet/view_0" [label="(1, 1000, 1, 1)", style=solid]; +"66 SqueezeNet/view_0" -> "67 /nncf_model_output_0" [label="(1, 1000)", style=solid]; } diff --git a/tests/torch/data/reference_graphs/nas/squeezenet1_0_width.dot b/tests/torch/data/reference_graphs/nas/squeezenet1_0_width.dot index 326f330b20c..cca7677294e 100644 --- a/tests/torch/data/reference_graphs/nas/squeezenet1_0_width.dot +++ b/tests/torch/data/reference_graphs/nas/squeezenet1_0_width.dot @@ -1,148 +1,115 @@ strict digraph { "0 /nncf_model_input_0" [id=0, label="nncf_model_input_#0", style=filled, type=nncf_model_input]; -"1 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" [id=1, label="linear_#1", style=filled, type=linear]; -"2 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [id=2, label="view_#2", style=filled, type=view]; -"3 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [id=3, label="view_#3", style=filled, type=view]; -"4 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/__getitem___0" [id=4, label="__getitem___#4", style=filled, type=__getitem__]; -"5 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/contiguous_0" [id=5, label="contiguous_#5", style=filled, type=contiguous]; -"6 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_2" [id=6, label="view_#6", style=filled, type=view]; -"7 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_3" [id=7, label="view_#7", style=filled, type=view]; -"8 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_1" [id=8, label="linear_#8", style=filled, type=linear]; -"9 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_4" [id=9, label="view_#9", style=filled, type=view]; -"10 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_5" [id=10, label="view_#10", style=filled, type=view]; -"11 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" [color=lightblue, id=11, label="conv2d_OW96_G0_#1", style=filled, type=conv2d]; -"12 SqueezeNet/Sequential[features]/ReLU[1]/relu__0" [id=12, label="relu__IW96_OW96_#2", style=filled, type=relu_]; -"13 SqueezeNet/Sequential[features]/MaxPool2d[2]/max_pool2d_0" [id=13, label="max_pool2d_IW96_OW96_#3", style=filled, type=max_pool2d]; -"14 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=14, label="conv2d_IW96_OW16_G1_#4", style=filled, type=conv2d]; -"15 SqueezeNet/Sequential[features]/Fire[3]/ReLU[squeeze_activation]/relu__0" [id=15, label="relu__IW16_OW16_#5", style=filled, type=relu_]; -"16 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=16, label="conv2d_IW16_OW64_G2_#6", style=filled, type=conv2d]; -"17 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand1x1_activation]/relu__0" [id=17, label="relu__IW64_OW64_#7", style=filled, type=relu_]; -"18 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=18, label="conv2d_IW16_OW64_G3_#8", style=filled, type=conv2d]; -"19 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand3x3_activation]/relu__0" [id=19, label="relu__IW64_OW64_#9", style=filled, type=relu_]; -"20 SqueezeNet/Sequential[features]/Fire[3]/cat_0" [id=20, label="cat_IW[64, 64]_OW128_#10", style=filled, type=cat]; -"21 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=21, label="conv2d_IW128_OW16_G4_#11", style=filled, type=conv2d]; -"22 SqueezeNet/Sequential[features]/Fire[4]/ReLU[squeeze_activation]/relu__0" [id=22, label="relu__IW16_OW16_#12", style=filled, type=relu_]; -"23 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=23, label="conv2d_IW16_OW64_G5_#13", style=filled, type=conv2d]; -"24 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand1x1_activation]/relu__0" [id=24, label="relu__IW64_OW64_#14", style=filled, type=relu_]; -"25 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=25, label="conv2d_IW16_OW64_G6_#15", style=filled, type=conv2d]; -"26 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand3x3_activation]/relu__0" [id=26, label="relu__IW64_OW64_#16", style=filled, type=relu_]; -"27 SqueezeNet/Sequential[features]/Fire[4]/cat_0" [id=27, label="cat_IW[64, 64]_OW128_#17", style=filled, type=cat]; -"28 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=28, label="conv2d_IW128_OW32_G7_#18", style=filled, type=conv2d]; -"29 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" [id=29, label="relu__IW32_OW32_#19", style=filled, type=relu_]; -"30 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=30, label="conv2d_IW32_OW128_G8_#20", style=filled, type=conv2d]; -"31 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand1x1_activation]/relu__0" [id=31, label="relu__IW128_OW128_#21", style=filled, type=relu_]; -"32 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=32, label="conv2d_IW32_OW128_G9_#22", style=filled, type=conv2d]; -"33 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand3x3_activation]/relu__0" [id=33, label="relu__IW128_OW128_#23", style=filled, type=relu_]; -"34 SqueezeNet/Sequential[features]/Fire[5]/cat_0" [id=34, label="cat_IW[128, 128]_OW256_#24", style=filled, type=cat]; -"35 SqueezeNet/Sequential[features]/MaxPool2d[6]/max_pool2d_0" [id=35, label="max_pool2d_IW256_OW256_#25", style=filled, type=max_pool2d]; -"36 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=36, label="conv2d_IW256_OW32_G10_#26", style=filled, type=conv2d]; -"37 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" [id=37, label="relu__IW32_OW32_#27", style=filled, type=relu_]; -"38 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=38, label="conv2d_IW32_OW128_G11_#28", style=filled, type=conv2d]; -"39 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand1x1_activation]/relu__0" [id=39, label="relu__IW128_OW128_#29", style=filled, type=relu_]; -"40 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=40, label="conv2d_IW32_OW128_G12_#30", style=filled, type=conv2d]; -"41 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand3x3_activation]/relu__0" [id=41, label="relu__IW128_OW128_#31", style=filled, type=relu_]; -"42 SqueezeNet/Sequential[features]/Fire[7]/cat_0" [id=42, label="cat_IW[128, 128]_OW256_#32", style=filled, type=cat]; -"43 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=43, label="conv2d_IW256_OW48_G13_#33", style=filled, type=conv2d]; -"44 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=44, label="conv2d_IW48_OW192_G17_#42", style=filled, type=conv2d]; -"45 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand1x1_activation]/relu__0" [id=45, label="relu__IW192_OW192_#43", style=filled, type=relu_]; -"46 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=46, label="conv2d_IW48_OW192_G18_#44", style=filled, type=conv2d]; -"47 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand3x3_activation]/relu__0" [id=47, label="relu__IW192_OW192_#45", style=filled, type=relu_]; -"48 SqueezeNet/Sequential[features]/Fire[9]/cat_0" [id=48, label="cat_IW[192, 192]_OW384_#46", style=filled, type=cat]; -"49 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=49, label="conv2d_IW384_OW64_G19_#47", style=filled, type=conv2d]; -"50 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" [id=50, label="relu__IW64_OW64_#48", style=filled, type=relu_]; -"51 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=51, label="conv2d_IW64_OW256_G20_#49", style=filled, type=conv2d]; -"52 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand1x1_activation]/relu__0" [id=52, label="relu__IW256_OW256_#50", style=filled, type=relu_]; -"53 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=53, label="conv2d_IW64_OW256_G21_#51", style=filled, type=conv2d]; -"54 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand3x3_activation]/relu__0" [id=54, label="relu__IW256_OW256_#52", style=filled, type=relu_]; -"55 SqueezeNet/Sequential[features]/Fire[10]/cat_0" [id=55, label="cat_IW[256, 256]_OW512_#53", style=filled, type=cat]; -"56 SqueezeNet/Sequential[features]/MaxPool2d[11]/max_pool2d_0" [id=56, label="max_pool2d_IW512_OW512_#54", style=filled, type=max_pool2d]; -"57 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=57, label="conv2d_IW512_OW64_G22_#55", style=filled, type=conv2d]; -"58 SqueezeNet/Sequential[features]/Fire[12]/ReLU[squeeze_activation]/relu__0" [id=58, label="relu__IW64_OW64_#56", style=filled, type=relu_]; -"59 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=59, label="conv2d_IW64_OW256_G23_#57", style=filled, type=conv2d]; -"60 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand1x1_activation]/relu__0" [id=60, label="relu__IW256_OW256_#58", style=filled, type=relu_]; -"61 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=61, label="conv2d_IW64_OW256_G24_#59", style=filled, type=conv2d]; -"62 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand3x3_activation]/relu__0" [id=62, label="relu__IW256_OW256_#60", style=filled, type=relu_]; -"63 SqueezeNet/Sequential[features]/Fire[12]/cat_0" [id=63, label="cat_IW[256, 256]_OW512_#61", style=filled, type=cat]; -"64 SqueezeNet/Sequential[classifier]/Dropout[0]/dropout_0" [id=64, label="dropout_IW512_OW512_#62", style=filled, type=dropout]; -"65 SqueezeNet/Sequential[classifier]/NNCFConv2d[1]/conv2d_0" [color=lightblue, id=65, label="conv2d_IW512_#63", style=filled, type=conv2d]; -"66 SqueezeNet/Sequential[classifier]/ReLU[2]/relu__0" [id=66, label="relu__#64", style=filled, type=relu_]; -"67 SqueezeNet/Sequential[classifier]/AdaptiveAvgPool2d[3]/adaptive_avg_pool2d_0" [id=67, label="adaptive_avg_pool2d_#65", style=filled, type=adaptive_avg_pool2d]; -"68 SqueezeNet/view_0" [id=68, label="view_#66", style=filled, type=view]; -"69 /nncf_model_output_0" [id=69, label="nncf_model_output_#67", style=filled, type=nncf_model_output]; -"0 /nncf_model_input_0" -> "11 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" [label="(1, 3, 32, 32)", style=solid]; -"1 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_0" -> "2 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" [label="(288, 25)", style=solid]; -"2 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_0" -> "3 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" [label="(96, 3, 25)", style=solid]; -"3 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_1" -> "4 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/__getitem___0" [label="(96, 3, 5, 5)", style=solid]; -"4 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/__getitem___0" -> "5 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/contiguous_0" [label="(96, 3, 3, 3)", style=solid]; -"5 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/contiguous_0" -> "6 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_2" [label="(96, 3, 3, 3)", style=solid]; -"6 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_2" -> "7 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_3" [label="(96, 3, 9)", style=solid]; -"7 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_3" -> "8 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_1" [label="(288, 9)", style=solid]; -"8 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/linear_1" -> "9 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_4" [label="(288, 9)", style=solid]; -"9 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_4" -> "10 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_5" [label="(96, 3, 9)", style=solid]; -"10 SqueezeNet/Sequential[features]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[3]/ElasticKernelConv2DOp[op]/view_5" -> "11 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" [label="(96, 3, 3, 3)", style=solid]; -"11 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" -> "12 SqueezeNet/Sequential[features]/ReLU[1]/relu__0" [label="(1, 96, 16, 16)", style=solid]; -"12 SqueezeNet/Sequential[features]/ReLU[1]/relu__0" -> "13 SqueezeNet/Sequential[features]/MaxPool2d[2]/max_pool2d_0" [label="(1, 96, 16, 16)", style=solid]; -"13 SqueezeNet/Sequential[features]/MaxPool2d[2]/max_pool2d_0" -> "14 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 96, 8, 8)", style=solid]; -"14 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" -> "15 SqueezeNet/Sequential[features]/Fire[3]/ReLU[squeeze_activation]/relu__0" [label="(1, 16, 8, 8)", style=solid]; -"15 SqueezeNet/Sequential[features]/Fire[3]/ReLU[squeeze_activation]/relu__0" -> "16 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 16, 8, 8)", style=solid]; -"15 SqueezeNet/Sequential[features]/Fire[3]/ReLU[squeeze_activation]/relu__0" -> "18 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 16, 8, 8)", style=solid]; -"16 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand1x1]/conv2d_0" -> "17 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand1x1_activation]/relu__0" [label="(1, 64, 8, 8)", style=solid]; -"17 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand1x1_activation]/relu__0" -> "20 SqueezeNet/Sequential[features]/Fire[3]/cat_0" [label="(1, 64, 8, 8)", style=solid]; -"18 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand3x3]/conv2d_0" -> "19 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand3x3_activation]/relu__0" [label="(1, 64, 8, 8)", style=solid]; -"19 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand3x3_activation]/relu__0" -> "20 SqueezeNet/Sequential[features]/Fire[3]/cat_0" [label="(1, 64, 8, 8)", style=solid]; -"20 SqueezeNet/Sequential[features]/Fire[3]/cat_0" -> "21 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 128, 8, 8)", style=solid]; -"21 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[squeeze]/conv2d_0" -> "22 SqueezeNet/Sequential[features]/Fire[4]/ReLU[squeeze_activation]/relu__0" [label="(1, 16, 8, 8)", style=solid]; -"22 SqueezeNet/Sequential[features]/Fire[4]/ReLU[squeeze_activation]/relu__0" -> "23 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 16, 8, 8)", style=solid]; -"22 SqueezeNet/Sequential[features]/Fire[4]/ReLU[squeeze_activation]/relu__0" -> "25 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 16, 8, 8)", style=solid]; -"23 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand1x1]/conv2d_0" -> "24 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand1x1_activation]/relu__0" [label="(1, 64, 8, 8)", style=solid]; -"24 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand1x1_activation]/relu__0" -> "27 SqueezeNet/Sequential[features]/Fire[4]/cat_0" [label="(1, 64, 8, 8)", style=solid]; -"25 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand3x3]/conv2d_0" -> "26 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand3x3_activation]/relu__0" [label="(1, 64, 8, 8)", style=solid]; -"26 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand3x3_activation]/relu__0" -> "27 SqueezeNet/Sequential[features]/Fire[4]/cat_0" [label="(1, 64, 8, 8)", style=solid]; -"27 SqueezeNet/Sequential[features]/Fire[4]/cat_0" -> "28 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 128, 8, 8)", style=solid]; -"28 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[squeeze]/conv2d_0" -> "29 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" [label="(1, 32, 8, 8)", style=solid]; -"29 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" -> "30 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 32, 8, 8)", style=solid]; -"29 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" -> "32 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 32, 8, 8)", style=solid]; -"30 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand1x1]/conv2d_0" -> "31 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand1x1_activation]/relu__0" [label="(1, 128, 8, 8)", style=solid]; -"31 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand1x1_activation]/relu__0" -> "34 SqueezeNet/Sequential[features]/Fire[5]/cat_0" [label="(1, 128, 8, 8)", style=solid]; -"32 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand3x3]/conv2d_0" -> "33 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand3x3_activation]/relu__0" [label="(1, 128, 8, 8)", style=solid]; -"33 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand3x3_activation]/relu__0" -> "34 SqueezeNet/Sequential[features]/Fire[5]/cat_0" [label="(1, 128, 8, 8)", style=solid]; -"34 SqueezeNet/Sequential[features]/Fire[5]/cat_0" -> "35 SqueezeNet/Sequential[features]/MaxPool2d[6]/max_pool2d_0" [label="(1, 256, 8, 8)", style=solid]; -"35 SqueezeNet/Sequential[features]/MaxPool2d[6]/max_pool2d_0" -> "36 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 256, 4, 4)", style=solid]; -"36 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[squeeze]/conv2d_0" -> "37 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" [label="(1, 32, 4, 4)", style=solid]; -"37 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" -> "38 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 32, 4, 4)", style=solid]; -"37 SqueezeNet/Sequential[features]/Fire[7]/ReLU[squeeze_activation]/relu__0" -> "40 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 32, 4, 4)", style=solid]; -"38 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand1x1]/conv2d_0" -> "39 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand1x1_activation]/relu__0" [label="(1, 128, 4, 4)", style=solid]; -"39 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand1x1_activation]/relu__0" -> "42 SqueezeNet/Sequential[features]/Fire[7]/cat_0" [label="(1, 128, 4, 4)", style=solid]; -"40 SqueezeNet/Sequential[features]/Fire[7]/NNCFConv2d[expand3x3]/conv2d_0" -> "41 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand3x3_activation]/relu__0" [label="(1, 128, 4, 4)", style=solid]; -"41 SqueezeNet/Sequential[features]/Fire[7]/ReLU[expand3x3_activation]/relu__0" -> "42 SqueezeNet/Sequential[features]/Fire[7]/cat_0" [label="(1, 128, 4, 4)", style=solid]; -"42 SqueezeNet/Sequential[features]/Fire[7]/cat_0" -> "43 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 256, 4, 4)", style=solid]; -"43 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" -> "44 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 48, 4, 4)", style=solid]; -"43 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" -> "46 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 48, 4, 4)", style=solid]; -"44 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand1x1]/conv2d_0" -> "45 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand1x1_activation]/relu__0" [label="(1, 192, 4, 4)", style=solid]; -"45 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand1x1_activation]/relu__0" -> "48 SqueezeNet/Sequential[features]/Fire[9]/cat_0" [label="(1, 192, 4, 4)", style=solid]; -"46 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand3x3]/conv2d_0" -> "47 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand3x3_activation]/relu__0" [label="(1, 192, 4, 4)", style=solid]; -"47 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand3x3_activation]/relu__0" -> "48 SqueezeNet/Sequential[features]/Fire[9]/cat_0" [label="(1, 192, 4, 4)", style=solid]; -"48 SqueezeNet/Sequential[features]/Fire[9]/cat_0" -> "49 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 384, 4, 4)", style=solid]; -"49 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[squeeze]/conv2d_0" -> "50 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" [label="(1, 64, 4, 4)", style=solid]; -"50 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" -> "51 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 64, 4, 4)", style=solid]; -"50 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" -> "53 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 64, 4, 4)", style=solid]; -"51 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand1x1]/conv2d_0" -> "52 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand1x1_activation]/relu__0" [label="(1, 256, 4, 4)", style=solid]; -"52 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand1x1_activation]/relu__0" -> "55 SqueezeNet/Sequential[features]/Fire[10]/cat_0" [label="(1, 256, 4, 4)", style=solid]; -"53 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand3x3]/conv2d_0" -> "54 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand3x3_activation]/relu__0" [label="(1, 256, 4, 4)", style=solid]; -"54 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand3x3_activation]/relu__0" -> "55 SqueezeNet/Sequential[features]/Fire[10]/cat_0" [label="(1, 256, 4, 4)", style=solid]; -"55 SqueezeNet/Sequential[features]/Fire[10]/cat_0" -> "56 SqueezeNet/Sequential[features]/MaxPool2d[11]/max_pool2d_0" [label="(1, 512, 4, 4)", style=solid]; -"56 SqueezeNet/Sequential[features]/MaxPool2d[11]/max_pool2d_0" -> "57 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 512, 2, 2)", style=solid]; -"57 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[squeeze]/conv2d_0" -> "58 SqueezeNet/Sequential[features]/Fire[12]/ReLU[squeeze_activation]/relu__0" [label="(1, 64, 2, 2)", style=solid]; -"58 SqueezeNet/Sequential[features]/Fire[12]/ReLU[squeeze_activation]/relu__0" -> "59 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 64, 2, 2)", style=solid]; -"58 SqueezeNet/Sequential[features]/Fire[12]/ReLU[squeeze_activation]/relu__0" -> "61 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 64, 2, 2)", style=solid]; -"59 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand1x1]/conv2d_0" -> "60 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand1x1_activation]/relu__0" [label="(1, 256, 2, 2)", style=solid]; -"60 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand1x1_activation]/relu__0" -> "63 SqueezeNet/Sequential[features]/Fire[12]/cat_0" [label="(1, 256, 2, 2)", style=solid]; -"61 SqueezeNet/Sequential[features]/Fire[12]/NNCFConv2d[expand3x3]/conv2d_0" -> "62 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand3x3_activation]/relu__0" [label="(1, 256, 2, 2)", style=solid]; -"62 SqueezeNet/Sequential[features]/Fire[12]/ReLU[expand3x3_activation]/relu__0" -> "63 SqueezeNet/Sequential[features]/Fire[12]/cat_0" [label="(1, 256, 2, 2)", style=solid]; -"63 SqueezeNet/Sequential[features]/Fire[12]/cat_0" -> "64 SqueezeNet/Sequential[classifier]/Dropout[0]/dropout_0" [label="(1, 512, 2, 2)", style=solid]; -"64 SqueezeNet/Sequential[classifier]/Dropout[0]/dropout_0" -> "65 SqueezeNet/Sequential[classifier]/NNCFConv2d[1]/conv2d_0" [label="(1, 512, 2, 2)", style=solid]; -"65 SqueezeNet/Sequential[classifier]/NNCFConv2d[1]/conv2d_0" -> "66 SqueezeNet/Sequential[classifier]/ReLU[2]/relu__0" [label="(1, 1000, 2, 2)", style=solid]; -"66 SqueezeNet/Sequential[classifier]/ReLU[2]/relu__0" -> "67 SqueezeNet/Sequential[classifier]/AdaptiveAvgPool2d[3]/adaptive_avg_pool2d_0" [label="(1, 1000, 2, 2)", style=solid]; -"67 SqueezeNet/Sequential[classifier]/AdaptiveAvgPool2d[3]/adaptive_avg_pool2d_0" -> "68 SqueezeNet/view_0" [label="(1, 1000, 1, 1)", style=solid]; -"68 SqueezeNet/view_0" -> "69 /nncf_model_output_0" [label="(1, 1000)", style=solid]; +"1 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" [color=lightblue, id=1, label="conv2d_OW96_G0_#1", style=filled, type=conv2d]; +"2 SqueezeNet/Sequential[features]/ReLU[1]/relu__0" [id=2, label="relu__IW96_OW96_#2", style=filled, type=relu_]; +"3 SqueezeNet/Sequential[features]/MaxPool2d[2]/max_pool2d_0" [id=3, label="max_pool2d_IW96_OW96_#3", style=filled, type=max_pool2d]; +"4 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=4, label="conv2d_IW96_OW16_G1_#4", style=filled, type=conv2d]; +"5 SqueezeNet/Sequential[features]/Fire[3]/ReLU[squeeze_activation]/relu__0" [id=5, label="relu__IW16_OW16_#5", style=filled, type=relu_]; +"6 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=6, label="conv2d_IW16_OW64_G2_#6", style=filled, type=conv2d]; +"7 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand1x1_activation]/relu__0" [id=7, label="relu__IW64_OW64_#7", style=filled, type=relu_]; +"8 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=8, label="conv2d_IW16_OW64_G3_#8", style=filled, type=conv2d]; +"9 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand3x3_activation]/relu__0" [id=9, label="relu__IW64_OW64_#9", style=filled, type=relu_]; +"10 SqueezeNet/Sequential[features]/Fire[3]/cat_0" [id=10, label="cat_IW[64, 64]_OW128_#10", style=filled, type=cat]; +"11 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=11, label="conv2d_IW128_OW16_G4_#11", style=filled, type=conv2d]; +"12 SqueezeNet/Sequential[features]/Fire[4]/ReLU[squeeze_activation]/relu__0" [id=12, label="relu__IW16_OW16_#12", style=filled, type=relu_]; +"13 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=13, label="conv2d_IW16_OW64_G5_#13", style=filled, type=conv2d]; +"14 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand1x1_activation]/relu__0" [id=14, label="relu__IW64_OW64_#14", style=filled, type=relu_]; +"15 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=15, label="conv2d_IW16_OW64_G6_#15", style=filled, type=conv2d]; +"16 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand3x3_activation]/relu__0" [id=16, label="relu__IW64_OW64_#16", style=filled, type=relu_]; +"17 SqueezeNet/Sequential[features]/Fire[4]/cat_0" [id=17, label="cat_IW[64, 64]_OW128_#17", style=filled, type=cat]; +"18 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=18, label="conv2d_IW128_OW32_G7_#18", style=filled, type=conv2d]; +"19 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" [id=19, label="relu__IW32_OW32_#19", style=filled, type=relu_]; +"20 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=20, label="conv2d_IW32_OW128_G8_#20", style=filled, type=conv2d]; +"21 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand1x1_activation]/relu__0" [id=21, label="relu__IW128_OW128_#21", style=filled, type=relu_]; +"22 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=22, label="conv2d_IW32_OW128_G9_#22", style=filled, type=conv2d]; +"23 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand3x3_activation]/relu__0" [id=23, label="relu__IW128_OW128_#23", style=filled, type=relu_]; +"24 SqueezeNet/Sequential[features]/Fire[5]/cat_0" [id=24, label="cat_IW[128, 128]_OW256_#24", style=filled, type=cat]; +"25 SqueezeNet/Sequential[features]/MaxPool2d[6]/max_pool2d_0" [id=25, label="max_pool2d_IW256_OW256_#25", style=filled, type=max_pool2d]; +"26 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=26, label="conv2d_IW256_OW48_G13_#33", style=filled, type=conv2d]; +"27 SqueezeNet/Sequential[features]/Fire[8]/ReLU[squeeze_activation]/relu__0" [id=27, label="relu__IW48_OW48_#34", style=filled, type=relu_]; +"28 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=28, label="conv2d_IW48_OW192_G14_#35", style=filled, type=conv2d]; +"29 SqueezeNet/Sequential[features]/Fire[8]/ReLU[expand1x1_activation]/relu__0" [id=29, label="relu__IW192_OW192_#36", style=filled, type=relu_]; +"30 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=30, label="conv2d_IW48_OW192_G15_#37", style=filled, type=conv2d]; +"31 SqueezeNet/Sequential[features]/Fire[8]/ReLU[expand3x3_activation]/relu__0" [id=31, label="relu__IW192_OW192_#38", style=filled, type=relu_]; +"32 SqueezeNet/Sequential[features]/Fire[8]/cat_0" [id=32, label="cat_IW[192, 192]_OW384_#39", style=filled, type=cat]; +"33 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=33, label="conv2d_IW384_OW48_G16_#40", style=filled, type=conv2d]; +"34 SqueezeNet/Sequential[features]/Fire[9]/ReLU[squeeze_activation]/relu__0" [id=34, label="relu__IW48_OW48_#41", style=filled, type=relu_]; +"35 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=35, label="conv2d_IW48_OW192_G17_#42", style=filled, type=conv2d]; +"36 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand1x1_activation]/relu__0" [id=36, label="relu__IW192_OW192_#43", style=filled, type=relu_]; +"37 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=37, label="conv2d_IW48_OW192_G18_#44", style=filled, type=conv2d]; +"38 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand3x3_activation]/relu__0" [id=38, label="relu__IW192_OW192_#45", style=filled, type=relu_]; +"39 SqueezeNet/Sequential[features]/Fire[9]/cat_0" [id=39, label="cat_IW[192, 192]_OW384_#46", style=filled, type=cat]; +"40 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[squeeze]/conv2d_0" [color=lightblue, id=40, label="conv2d_IW384_OW64_G19_#47", style=filled, type=conv2d]; +"41 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" [id=41, label="relu__IW64_OW64_#48", style=filled, type=relu_]; +"42 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand1x1]/conv2d_0" [color=lightblue, id=42, label="conv2d_IW64_OW256_G20_#49", style=filled, type=conv2d]; +"43 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand1x1_activation]/relu__0" [id=43, label="relu__IW256_OW256_#50", style=filled, type=relu_]; +"44 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand3x3]/conv2d_0" [color=lightblue, id=44, label="conv2d_IW64_OW256_G21_#51", style=filled, type=conv2d]; +"45 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand3x3_activation]/relu__0" [id=45, label="relu__IW256_OW256_#52", style=filled, type=relu_]; +"46 SqueezeNet/Sequential[features]/Fire[10]/cat_0" [id=46, label="cat_IW[256, 256]_OW512_#53", style=filled, type=cat]; +"47 SqueezeNet/Sequential[features]/MaxPool2d[11]/max_pool2d_0" [id=47, label="max_pool2d_IW512_OW512_#54", style=filled, type=max_pool2d]; +"48 SqueezeNet/Sequential[classifier]/Dropout[0]/dropout_0" [id=48, label="dropout_IW512_OW512_#62", style=filled, type=dropout]; +"49 SqueezeNet/Sequential[classifier]/NNCFConv2d[1]/conv2d_0" [color=lightblue, id=49, label="conv2d_IW512_#63", style=filled, type=conv2d]; +"50 SqueezeNet/Sequential[classifier]/ReLU[2]/relu__0" [id=50, label="relu__#64", style=filled, type=relu_]; +"51 SqueezeNet/Sequential[classifier]/AdaptiveAvgPool2d[3]/adaptive_avg_pool2d_0" [id=51, label="adaptive_avg_pool2d_#65", style=filled, type=adaptive_avg_pool2d]; +"52 SqueezeNet/view_0" [id=52, label="view_#66", style=filled, type=view]; +"53 /nncf_model_output_0" [id=53, label="nncf_model_output_#67", style=filled, type=nncf_model_output]; +"0 /nncf_model_input_0" -> "1 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" [label="(1, 3, 32, 32)", style=solid]; +"1 SqueezeNet/Sequential[features]/NNCFConv2d[0]/conv2d_0" -> "2 SqueezeNet/Sequential[features]/ReLU[1]/relu__0" [label="(1, 96, 13, 13)", style=solid]; +"2 SqueezeNet/Sequential[features]/ReLU[1]/relu__0" -> "3 SqueezeNet/Sequential[features]/MaxPool2d[2]/max_pool2d_0" [label="(1, 96, 13, 13)", style=solid]; +"3 SqueezeNet/Sequential[features]/MaxPool2d[2]/max_pool2d_0" -> "4 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 96, 6, 6)", style=solid]; +"4 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[squeeze]/conv2d_0" -> "5 SqueezeNet/Sequential[features]/Fire[3]/ReLU[squeeze_activation]/relu__0" [label="(1, 16, 6, 6)", style=solid]; +"5 SqueezeNet/Sequential[features]/Fire[3]/ReLU[squeeze_activation]/relu__0" -> "6 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 16, 6, 6)", style=solid]; +"5 SqueezeNet/Sequential[features]/Fire[3]/ReLU[squeeze_activation]/relu__0" -> "8 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 16, 6, 6)", style=solid]; +"6 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand1x1]/conv2d_0" -> "7 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand1x1_activation]/relu__0" [label="(1, 64, 6, 6)", style=solid]; +"7 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand1x1_activation]/relu__0" -> "10 SqueezeNet/Sequential[features]/Fire[3]/cat_0" [label="(1, 64, 6, 6)", style=solid]; +"8 SqueezeNet/Sequential[features]/Fire[3]/NNCFConv2d[expand3x3]/conv2d_0" -> "9 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand3x3_activation]/relu__0" [label="(1, 64, 6, 6)", style=solid]; +"9 SqueezeNet/Sequential[features]/Fire[3]/ReLU[expand3x3_activation]/relu__0" -> "10 SqueezeNet/Sequential[features]/Fire[3]/cat_0" [label="(1, 64, 6, 6)", style=solid]; +"10 SqueezeNet/Sequential[features]/Fire[3]/cat_0" -> "11 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 128, 6, 6)", style=solid]; +"11 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[squeeze]/conv2d_0" -> "12 SqueezeNet/Sequential[features]/Fire[4]/ReLU[squeeze_activation]/relu__0" [label="(1, 16, 6, 6)", style=solid]; +"12 SqueezeNet/Sequential[features]/Fire[4]/ReLU[squeeze_activation]/relu__0" -> "13 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 16, 6, 6)", style=solid]; +"12 SqueezeNet/Sequential[features]/Fire[4]/ReLU[squeeze_activation]/relu__0" -> "15 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 16, 6, 6)", style=solid]; +"13 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand1x1]/conv2d_0" -> "14 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand1x1_activation]/relu__0" [label="(1, 64, 6, 6)", style=solid]; +"14 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand1x1_activation]/relu__0" -> "17 SqueezeNet/Sequential[features]/Fire[4]/cat_0" [label="(1, 64, 6, 6)", style=solid]; +"15 SqueezeNet/Sequential[features]/Fire[4]/NNCFConv2d[expand3x3]/conv2d_0" -> "16 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand3x3_activation]/relu__0" [label="(1, 64, 6, 6)", style=solid]; +"16 SqueezeNet/Sequential[features]/Fire[4]/ReLU[expand3x3_activation]/relu__0" -> "17 SqueezeNet/Sequential[features]/Fire[4]/cat_0" [label="(1, 64, 6, 6)", style=solid]; +"17 SqueezeNet/Sequential[features]/Fire[4]/cat_0" -> "18 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 128, 6, 6)", style=solid]; +"18 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[squeeze]/conv2d_0" -> "19 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" [label="(1, 32, 6, 6)", style=solid]; +"19 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" -> "20 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 32, 6, 6)", style=solid]; +"19 SqueezeNet/Sequential[features]/Fire[5]/ReLU[squeeze_activation]/relu__0" -> "22 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 32, 6, 6)", style=solid]; +"20 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand1x1]/conv2d_0" -> "21 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand1x1_activation]/relu__0" [label="(1, 128, 6, 6)", style=solid]; +"21 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand1x1_activation]/relu__0" -> "24 SqueezeNet/Sequential[features]/Fire[5]/cat_0" [label="(1, 128, 6, 6)", style=solid]; +"22 SqueezeNet/Sequential[features]/Fire[5]/NNCFConv2d[expand3x3]/conv2d_0" -> "23 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand3x3_activation]/relu__0" [label="(1, 128, 6, 6)", style=solid]; +"23 SqueezeNet/Sequential[features]/Fire[5]/ReLU[expand3x3_activation]/relu__0" -> "24 SqueezeNet/Sequential[features]/Fire[5]/cat_0" [label="(1, 128, 6, 6)", style=solid]; +"24 SqueezeNet/Sequential[features]/Fire[5]/cat_0" -> "25 SqueezeNet/Sequential[features]/MaxPool2d[6]/max_pool2d_0" [label="(1, 256, 6, 6)", style=solid]; +"25 SqueezeNet/Sequential[features]/MaxPool2d[6]/max_pool2d_0" -> "26 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 256, 3, 3)", style=solid]; +"26 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[squeeze]/conv2d_0" -> "27 SqueezeNet/Sequential[features]/Fire[8]/ReLU[squeeze_activation]/relu__0" [label="(1, 48, 3, 3)", style=solid]; +"27 SqueezeNet/Sequential[features]/Fire[8]/ReLU[squeeze_activation]/relu__0" -> "28 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 48, 3, 3)", style=solid]; +"27 SqueezeNet/Sequential[features]/Fire[8]/ReLU[squeeze_activation]/relu__0" -> "30 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 48, 3, 3)", style=solid]; +"28 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[expand1x1]/conv2d_0" -> "29 SqueezeNet/Sequential[features]/Fire[8]/ReLU[expand1x1_activation]/relu__0" [label="(1, 192, 3, 3)", style=solid]; +"29 SqueezeNet/Sequential[features]/Fire[8]/ReLU[expand1x1_activation]/relu__0" -> "32 SqueezeNet/Sequential[features]/Fire[8]/cat_0" [label="(1, 192, 3, 3)", style=solid]; +"30 SqueezeNet/Sequential[features]/Fire[8]/NNCFConv2d[expand3x3]/conv2d_0" -> "31 SqueezeNet/Sequential[features]/Fire[8]/ReLU[expand3x3_activation]/relu__0" [label="(1, 192, 3, 3)", style=solid]; +"31 SqueezeNet/Sequential[features]/Fire[8]/ReLU[expand3x3_activation]/relu__0" -> "32 SqueezeNet/Sequential[features]/Fire[8]/cat_0" [label="(1, 192, 3, 3)", style=solid]; +"32 SqueezeNet/Sequential[features]/Fire[8]/cat_0" -> "33 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 384, 3, 3)", style=solid]; +"33 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[squeeze]/conv2d_0" -> "34 SqueezeNet/Sequential[features]/Fire[9]/ReLU[squeeze_activation]/relu__0" [label="(1, 48, 3, 3)", style=solid]; +"34 SqueezeNet/Sequential[features]/Fire[9]/ReLU[squeeze_activation]/relu__0" -> "35 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 48, 3, 3)", style=solid]; +"34 SqueezeNet/Sequential[features]/Fire[9]/ReLU[squeeze_activation]/relu__0" -> "37 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 48, 3, 3)", style=solid]; +"35 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand1x1]/conv2d_0" -> "36 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand1x1_activation]/relu__0" [label="(1, 192, 3, 3)", style=solid]; +"36 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand1x1_activation]/relu__0" -> "39 SqueezeNet/Sequential[features]/Fire[9]/cat_0" [label="(1, 192, 3, 3)", style=solid]; +"37 SqueezeNet/Sequential[features]/Fire[9]/NNCFConv2d[expand3x3]/conv2d_0" -> "38 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand3x3_activation]/relu__0" [label="(1, 192, 3, 3)", style=solid]; +"38 SqueezeNet/Sequential[features]/Fire[9]/ReLU[expand3x3_activation]/relu__0" -> "39 SqueezeNet/Sequential[features]/Fire[9]/cat_0" [label="(1, 192, 3, 3)", style=solid]; +"39 SqueezeNet/Sequential[features]/Fire[9]/cat_0" -> "40 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[squeeze]/conv2d_0" [label="(1, 384, 3, 3)", style=solid]; +"40 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[squeeze]/conv2d_0" -> "41 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" [label="(1, 64, 3, 3)", style=solid]; +"41 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" -> "42 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand1x1]/conv2d_0" [label="(1, 64, 3, 3)", style=solid]; +"41 SqueezeNet/Sequential[features]/Fire[10]/ReLU[squeeze_activation]/relu__0" -> "44 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand3x3]/conv2d_0" [label="(1, 64, 3, 3)", style=solid]; +"42 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand1x1]/conv2d_0" -> "43 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand1x1_activation]/relu__0" [label="(1, 256, 3, 3)", style=solid]; +"43 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand1x1_activation]/relu__0" -> "46 SqueezeNet/Sequential[features]/Fire[10]/cat_0" [label="(1, 256, 3, 3)", style=solid]; +"44 SqueezeNet/Sequential[features]/Fire[10]/NNCFConv2d[expand3x3]/conv2d_0" -> "45 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand3x3_activation]/relu__0" [label="(1, 256, 3, 3)", style=solid]; +"45 SqueezeNet/Sequential[features]/Fire[10]/ReLU[expand3x3_activation]/relu__0" -> "46 SqueezeNet/Sequential[features]/Fire[10]/cat_0" [label="(1, 256, 3, 3)", style=solid]; +"46 SqueezeNet/Sequential[features]/Fire[10]/cat_0" -> "47 SqueezeNet/Sequential[features]/MaxPool2d[11]/max_pool2d_0" [label="(1, 512, 3, 3)", style=solid]; +"47 SqueezeNet/Sequential[features]/MaxPool2d[11]/max_pool2d_0" -> "48 SqueezeNet/Sequential[classifier]/Dropout[0]/dropout_0" [label="(1, 512, 1, 1)", style=solid]; +"48 SqueezeNet/Sequential[classifier]/Dropout[0]/dropout_0" -> "49 SqueezeNet/Sequential[classifier]/NNCFConv2d[1]/conv2d_0" [label="(1, 512, 1, 1)", style=solid]; +"49 SqueezeNet/Sequential[classifier]/NNCFConv2d[1]/conv2d_0" -> "50 SqueezeNet/Sequential[classifier]/ReLU[2]/relu__0" [label="(1, 1000, 1, 1)", style=solid]; +"50 SqueezeNet/Sequential[classifier]/ReLU[2]/relu__0" -> "51 SqueezeNet/Sequential[classifier]/AdaptiveAvgPool2d[3]/adaptive_avg_pool2d_0" [label="(1, 1000, 1, 1)", style=solid]; +"51 SqueezeNet/Sequential[classifier]/AdaptiveAvgPool2d[3]/adaptive_avg_pool2d_0" -> "52 SqueezeNet/view_0" [label="(1, 1000, 1, 1)", style=solid]; +"52 SqueezeNet/view_0" -> "53 /nncf_model_output_0" [label="(1, 1000)", style=solid]; } diff --git a/tests/torch/data/reference_graphs/quantized/asymmetric/lstm_cell.dot b/tests/torch/data/reference_graphs/quantized/asymmetric/lstm_cell.dot index 18c4bb70dd3..1d4382423ac 100644 --- a/tests/torch/data/reference_graphs/quantized/asymmetric/lstm_cell.dot +++ b/tests/torch/data/reference_graphs/quantized/asymmetric/lstm_cell.dot @@ -56,8 +56,8 @@ strict digraph { "21 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/__mul___1" -> "22 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/AsymmetricQuantizer/asymmetric_quantize_6"; "22 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/AsymmetricQuantizer/asymmetric_quantize_6" -> "23 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/__add___1"; "23 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/__add___1" -> "24 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/AsymmetricQuantizer/asymmetric_quantize_7"; +"23 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/__add___1" -> "29 /nncf_model_output_1"; "24 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/AsymmetricQuantizer/asymmetric_quantize_7" -> "25 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/tanh_1"; -"24 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/AsymmetricQuantizer/asymmetric_quantize_7" -> "29 /nncf_model_output_1"; "25 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/tanh_1" -> "26 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/AsymmetricQuantizer/asymmetric_quantize_8"; "26 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/AsymmetricQuantizer/asymmetric_quantize_8" -> "27 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/__mul___2"; "27 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/__mul___2" -> "28 /nncf_model_output_0"; diff --git a/tests/torch/data/reference_graphs/quantized/ptq/symmetric/embedding_model.dot b/tests/torch/data/reference_graphs/quantized/ptq/symmetric/embedding_model.dot new file mode 100644 index 00000000000..1e471670ca5 --- /dev/null +++ b/tests/torch/data/reference_graphs/quantized/ptq/symmetric/embedding_model.dot @@ -0,0 +1,15 @@ +strict digraph { +"0 /nncf_model_input_0" [id=0, type=nncf_model_input]; +"1 EmbeddingModel/type_0" [id=1, type=type]; +"2 EmbeddingModel/NNCFEmbedding[embedding]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=2, type=symmetric_quantize]; +"3 EmbeddingModel/NNCFEmbedding[embedding]/embedding_0" [id=3, type=embedding]; +"4 EmbeddingModel/NNCFLinear[matmul]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=4, type=symmetric_quantize]; +"5 EmbeddingModel/NNCFLinear[matmul]/linear_0" [id=5, type=linear]; +"6 /nncf_model_output_0" [id=6, type=nncf_model_output]; +"0 /nncf_model_input_0" -> "1 EmbeddingModel/type_0"; +"1 EmbeddingModel/type_0" -> "3 EmbeddingModel/NNCFEmbedding[embedding]/embedding_0"; +"2 EmbeddingModel/NNCFEmbedding[embedding]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "3 EmbeddingModel/NNCFEmbedding[embedding]/embedding_0"; +"3 EmbeddingModel/NNCFEmbedding[embedding]/embedding_0" -> "5 EmbeddingModel/NNCFLinear[matmul]/linear_0"; +"4 EmbeddingModel/NNCFLinear[matmul]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "5 EmbeddingModel/NNCFLinear[matmul]/linear_0"; +"5 EmbeddingModel/NNCFLinear[matmul]/linear_0" -> "6 /nncf_model_output_0"; +} diff --git a/tests/torch/data/reference_graphs/quantized/ptq/symmetric/resnet50.dot b/tests/torch/data/reference_graphs/quantized/ptq/symmetric/resnet50.dot deleted file mode 100644 index 58b4a0e1748..00000000000 --- a/tests/torch/data/reference_graphs/quantized/ptq/symmetric/resnet50.dot +++ /dev/null @@ -1,621 +0,0 @@ -strict digraph { -"0 /nncf_model_input_0" [id=0, type=nncf_model_input]; -"1 SymmetricQuantizer/symmetric_quantize_0" [id=1, type=symmetric_quantize]; -"2 ResNet/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=2, type=symmetric_quantize]; -"3 ResNet/NNCFConv2d[conv1]/conv2d_0" [id=3, type=conv2d]; -"4 ResNet/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=4, type=batch_norm]; -"5 ResNet/relu_0" [id=5, type=relu]; -"6 ResNet/SymmetricQuantizer/symmetric_quantize_0" [id=6, type=symmetric_quantize]; -"7 ResNet/MaxPool2d[maxpool]/max_pool2d_0" [id=7, type=max_pool2d]; -"8 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=8, type=symmetric_quantize]; -"9 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0" [id=9, type=conv2d]; -"10 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=10, type=batch_norm]; -"11 ResNet/Sequential[layer1]/Bottleneck[0]/relu_0" [id=11, type=relu]; -"12 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0" [id=12, type=symmetric_quantize]; -"13 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=13, type=symmetric_quantize]; -"14 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0" [id=14, type=conv2d]; -"15 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=15, type=batch_norm]; -"16 ResNet/Sequential[layer1]/Bottleneck[0]/relu_1" [id=16, type=relu]; -"17 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1" [id=17, type=symmetric_quantize]; -"18 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=18, type=symmetric_quantize]; -"19 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0" [id=19, type=conv2d]; -"20 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=20, type=batch_norm]; -"21 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" [id=21, type=symmetric_quantize]; -"22 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=22, type=symmetric_quantize]; -"23 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0" [id=23, type=conv2d]; -"24 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0" [id=24, type=batch_norm]; -"25 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0" [id=25, type=symmetric_quantize]; -"26 ResNet/Sequential[layer1]/Bottleneck[0]/__iadd___0" [id=26, type=__iadd__]; -"27 ResNet/Sequential[layer1]/Bottleneck[0]/relu_2" [id=27, type=relu]; -"28 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" [id=28, type=symmetric_quantize]; -"29 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=29, type=symmetric_quantize]; -"30 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0" [id=30, type=conv2d]; -"31 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=31, type=batch_norm]; -"32 ResNet/Sequential[layer1]/Bottleneck[1]/relu_0" [id=32, type=relu]; -"33 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0" [id=33, type=symmetric_quantize]; -"34 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=34, type=symmetric_quantize]; -"35 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0" [id=35, type=conv2d]; -"36 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=36, type=batch_norm]; -"37 ResNet/Sequential[layer1]/Bottleneck[1]/relu_1" [id=37, type=relu]; -"38 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1" [id=38, type=symmetric_quantize]; -"39 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=39, type=symmetric_quantize]; -"40 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0" [id=40, type=conv2d]; -"41 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=41, type=batch_norm]; -"42 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" [id=42, type=symmetric_quantize]; -"43 ResNet/Sequential[layer1]/Bottleneck[1]/__iadd___0" [id=43, type=__iadd__]; -"44 ResNet/Sequential[layer1]/Bottleneck[1]/relu_2" [id=44, type=relu]; -"45 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" [id=45, type=symmetric_quantize]; -"46 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=46, type=symmetric_quantize]; -"47 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0" [id=47, type=conv2d]; -"48 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=48, type=batch_norm]; -"49 ResNet/Sequential[layer1]/Bottleneck[2]/relu_0" [id=49, type=relu]; -"50 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0" [id=50, type=symmetric_quantize]; -"51 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=51, type=symmetric_quantize]; -"52 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0" [id=52, type=conv2d]; -"53 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=53, type=batch_norm]; -"54 ResNet/Sequential[layer1]/Bottleneck[2]/relu_1" [id=54, type=relu]; -"55 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1" [id=55, type=symmetric_quantize]; -"56 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=56, type=symmetric_quantize]; -"57 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0" [id=57, type=conv2d]; -"58 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=58, type=batch_norm]; -"59 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" [id=59, type=symmetric_quantize]; -"60 ResNet/Sequential[layer1]/Bottleneck[2]/__iadd___0" [id=60, type=__iadd__]; -"61 ResNet/Sequential[layer1]/Bottleneck[2]/relu_2" [id=61, type=relu]; -"62 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" [id=62, type=symmetric_quantize]; -"63 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=63, type=symmetric_quantize]; -"64 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0" [id=64, type=conv2d]; -"65 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=65, type=batch_norm]; -"66 ResNet/Sequential[layer2]/Bottleneck[0]/relu_0" [id=66, type=relu]; -"67 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0" [id=67, type=symmetric_quantize]; -"68 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=68, type=symmetric_quantize]; -"69 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0" [id=69, type=conv2d]; -"70 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=70, type=batch_norm]; -"71 ResNet/Sequential[layer2]/Bottleneck[0]/relu_1" [id=71, type=relu]; -"72 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1" [id=72, type=symmetric_quantize]; -"73 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=73, type=symmetric_quantize]; -"74 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0" [id=74, type=conv2d]; -"75 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=75, type=batch_norm]; -"76 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" [id=76, type=symmetric_quantize]; -"77 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=77, type=symmetric_quantize]; -"78 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0" [id=78, type=conv2d]; -"79 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0" [id=79, type=batch_norm]; -"80 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0" [id=80, type=symmetric_quantize]; -"81 ResNet/Sequential[layer2]/Bottleneck[0]/__iadd___0" [id=81, type=__iadd__]; -"82 ResNet/Sequential[layer2]/Bottleneck[0]/relu_2" [id=82, type=relu]; -"83 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" [id=83, type=symmetric_quantize]; -"84 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=84, type=symmetric_quantize]; -"85 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0" [id=85, type=conv2d]; -"86 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=86, type=batch_norm]; -"87 ResNet/Sequential[layer2]/Bottleneck[1]/relu_0" [id=87, type=relu]; -"88 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0" [id=88, type=symmetric_quantize]; -"89 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=89, type=symmetric_quantize]; -"90 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0" [id=90, type=conv2d]; -"91 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=91, type=batch_norm]; -"92 ResNet/Sequential[layer2]/Bottleneck[1]/relu_1" [id=92, type=relu]; -"93 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1" [id=93, type=symmetric_quantize]; -"94 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=94, type=symmetric_quantize]; -"95 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0" [id=95, type=conv2d]; -"96 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=96, type=batch_norm]; -"97 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" [id=97, type=symmetric_quantize]; -"98 ResNet/Sequential[layer2]/Bottleneck[1]/__iadd___0" [id=98, type=__iadd__]; -"99 ResNet/Sequential[layer2]/Bottleneck[1]/relu_2" [id=99, type=relu]; -"100 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" [id=100, type=symmetric_quantize]; -"101 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=101, type=symmetric_quantize]; -"102 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0" [id=102, type=conv2d]; -"103 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=103, type=batch_norm]; -"104 ResNet/Sequential[layer2]/Bottleneck[2]/relu_0" [id=104, type=relu]; -"105 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0" [id=105, type=symmetric_quantize]; -"106 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=106, type=symmetric_quantize]; -"107 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0" [id=107, type=conv2d]; -"108 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=108, type=batch_norm]; -"109 ResNet/Sequential[layer2]/Bottleneck[2]/relu_1" [id=109, type=relu]; -"110 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1" [id=110, type=symmetric_quantize]; -"111 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=111, type=symmetric_quantize]; -"112 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0" [id=112, type=conv2d]; -"113 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=113, type=batch_norm]; -"114 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" [id=114, type=symmetric_quantize]; -"115 ResNet/Sequential[layer2]/Bottleneck[2]/__iadd___0" [id=115, type=__iadd__]; -"116 ResNet/Sequential[layer2]/Bottleneck[2]/relu_2" [id=116, type=relu]; -"117 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" [id=117, type=symmetric_quantize]; -"118 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=118, type=symmetric_quantize]; -"119 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv1]/conv2d_0" [id=119, type=conv2d]; -"120 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=120, type=batch_norm]; -"121 ResNet/Sequential[layer2]/Bottleneck[3]/relu_0" [id=121, type=relu]; -"122 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_0" [id=122, type=symmetric_quantize]; -"123 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=123, type=symmetric_quantize]; -"124 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv2]/conv2d_0" [id=124, type=conv2d]; -"125 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=125, type=batch_norm]; -"126 ResNet/Sequential[layer2]/Bottleneck[3]/relu_1" [id=126, type=relu]; -"127 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_1" [id=127, type=symmetric_quantize]; -"128 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=128, type=symmetric_quantize]; -"129 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv3]/conv2d_0" [id=129, type=conv2d]; -"130 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=130, type=batch_norm]; -"131 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" [id=131, type=symmetric_quantize]; -"132 ResNet/Sequential[layer2]/Bottleneck[3]/__iadd___0" [id=132, type=__iadd__]; -"133 ResNet/Sequential[layer2]/Bottleneck[3]/relu_2" [id=133, type=relu]; -"134 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_2" [id=134, type=symmetric_quantize]; -"135 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=135, type=symmetric_quantize]; -"136 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0" [id=136, type=conv2d]; -"137 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=137, type=batch_norm]; -"138 ResNet/Sequential[layer3]/Bottleneck[0]/relu_0" [id=138, type=relu]; -"139 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0" [id=139, type=symmetric_quantize]; -"140 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=140, type=symmetric_quantize]; -"141 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0" [id=141, type=conv2d]; -"142 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=142, type=batch_norm]; -"143 ResNet/Sequential[layer3]/Bottleneck[0]/relu_1" [id=143, type=relu]; -"144 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1" [id=144, type=symmetric_quantize]; -"145 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=145, type=symmetric_quantize]; -"146 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0" [id=146, type=conv2d]; -"147 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=147, type=batch_norm]; -"148 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" [id=148, type=symmetric_quantize]; -"149 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=149, type=symmetric_quantize]; -"150 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0" [id=150, type=conv2d]; -"151 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0" [id=151, type=batch_norm]; -"152 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0" [id=152, type=symmetric_quantize]; -"153 ResNet/Sequential[layer3]/Bottleneck[0]/__iadd___0" [id=153, type=__iadd__]; -"154 ResNet/Sequential[layer3]/Bottleneck[0]/relu_2" [id=154, type=relu]; -"155 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" [id=155, type=symmetric_quantize]; -"156 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=156, type=symmetric_quantize]; -"157 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0" [id=157, type=conv2d]; -"158 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=158, type=batch_norm]; -"159 ResNet/Sequential[layer3]/Bottleneck[1]/relu_0" [id=159, type=relu]; -"160 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0" [id=160, type=symmetric_quantize]; -"161 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=161, type=symmetric_quantize]; -"162 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0" [id=162, type=conv2d]; -"163 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=163, type=batch_norm]; -"164 ResNet/Sequential[layer3]/Bottleneck[1]/relu_1" [id=164, type=relu]; -"165 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1" [id=165, type=symmetric_quantize]; -"166 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=166, type=symmetric_quantize]; -"167 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0" [id=167, type=conv2d]; -"168 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=168, type=batch_norm]; -"169 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" [id=169, type=symmetric_quantize]; -"170 ResNet/Sequential[layer3]/Bottleneck[1]/__iadd___0" [id=170, type=__iadd__]; -"171 ResNet/Sequential[layer3]/Bottleneck[1]/relu_2" [id=171, type=relu]; -"172 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" [id=172, type=symmetric_quantize]; -"173 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=173, type=symmetric_quantize]; -"174 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0" [id=174, type=conv2d]; -"175 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=175, type=batch_norm]; -"176 ResNet/Sequential[layer3]/Bottleneck[2]/relu_0" [id=176, type=relu]; -"177 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0" [id=177, type=symmetric_quantize]; -"178 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=178, type=symmetric_quantize]; -"179 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0" [id=179, type=conv2d]; -"180 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=180, type=batch_norm]; -"181 ResNet/Sequential[layer3]/Bottleneck[2]/relu_1" [id=181, type=relu]; -"182 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1" [id=182, type=symmetric_quantize]; -"183 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=183, type=symmetric_quantize]; -"184 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0" [id=184, type=conv2d]; -"185 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=185, type=batch_norm]; -"186 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" [id=186, type=symmetric_quantize]; -"187 ResNet/Sequential[layer3]/Bottleneck[2]/__iadd___0" [id=187, type=__iadd__]; -"188 ResNet/Sequential[layer3]/Bottleneck[2]/relu_2" [id=188, type=relu]; -"189 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" [id=189, type=symmetric_quantize]; -"190 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=190, type=symmetric_quantize]; -"191 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv1]/conv2d_0" [id=191, type=conv2d]; -"192 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=192, type=batch_norm]; -"193 ResNet/Sequential[layer3]/Bottleneck[3]/relu_0" [id=193, type=relu]; -"194 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_0" [id=194, type=symmetric_quantize]; -"195 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=195, type=symmetric_quantize]; -"196 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv2]/conv2d_0" [id=196, type=conv2d]; -"197 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=197, type=batch_norm]; -"198 ResNet/Sequential[layer3]/Bottleneck[3]/relu_1" [id=198, type=relu]; -"199 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_1" [id=199, type=symmetric_quantize]; -"200 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=200, type=symmetric_quantize]; -"201 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv3]/conv2d_0" [id=201, type=conv2d]; -"202 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=202, type=batch_norm]; -"203 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" [id=203, type=symmetric_quantize]; -"204 ResNet/Sequential[layer3]/Bottleneck[3]/__iadd___0" [id=204, type=__iadd__]; -"205 ResNet/Sequential[layer3]/Bottleneck[3]/relu_2" [id=205, type=relu]; -"206 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_2" [id=206, type=symmetric_quantize]; -"207 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=207, type=symmetric_quantize]; -"208 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv1]/conv2d_0" [id=208, type=conv2d]; -"209 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=209, type=batch_norm]; -"210 ResNet/Sequential[layer3]/Bottleneck[4]/relu_0" [id=210, type=relu]; -"211 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_0" [id=211, type=symmetric_quantize]; -"212 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=212, type=symmetric_quantize]; -"213 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv2]/conv2d_0" [id=213, type=conv2d]; -"214 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=214, type=batch_norm]; -"215 ResNet/Sequential[layer3]/Bottleneck[4]/relu_1" [id=215, type=relu]; -"216 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_1" [id=216, type=symmetric_quantize]; -"217 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=217, type=symmetric_quantize]; -"218 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv3]/conv2d_0" [id=218, type=conv2d]; -"219 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=219, type=batch_norm]; -"220 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" [id=220, type=symmetric_quantize]; -"221 ResNet/Sequential[layer3]/Bottleneck[4]/__iadd___0" [id=221, type=__iadd__]; -"222 ResNet/Sequential[layer3]/Bottleneck[4]/relu_2" [id=222, type=relu]; -"223 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_2" [id=223, type=symmetric_quantize]; -"224 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=224, type=symmetric_quantize]; -"225 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv1]/conv2d_0" [id=225, type=conv2d]; -"226 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=226, type=batch_norm]; -"227 ResNet/Sequential[layer3]/Bottleneck[5]/relu_0" [id=227, type=relu]; -"228 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_0" [id=228, type=symmetric_quantize]; -"229 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=229, type=symmetric_quantize]; -"230 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv2]/conv2d_0" [id=230, type=conv2d]; -"231 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=231, type=batch_norm]; -"232 ResNet/Sequential[layer3]/Bottleneck[5]/relu_1" [id=232, type=relu]; -"233 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_1" [id=233, type=symmetric_quantize]; -"234 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=234, type=symmetric_quantize]; -"235 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv3]/conv2d_0" [id=235, type=conv2d]; -"236 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=236, type=batch_norm]; -"237 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" [id=237, type=symmetric_quantize]; -"238 ResNet/Sequential[layer3]/Bottleneck[5]/__iadd___0" [id=238, type=__iadd__]; -"239 ResNet/Sequential[layer3]/Bottleneck[5]/relu_2" [id=239, type=relu]; -"240 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_2" [id=240, type=symmetric_quantize]; -"241 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=241, type=symmetric_quantize]; -"242 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0" [id=242, type=conv2d]; -"243 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=243, type=batch_norm]; -"244 ResNet/Sequential[layer4]/Bottleneck[0]/relu_0" [id=244, type=relu]; -"245 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0" [id=245, type=symmetric_quantize]; -"246 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=246, type=symmetric_quantize]; -"247 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0" [id=247, type=conv2d]; -"248 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=248, type=batch_norm]; -"249 ResNet/Sequential[layer4]/Bottleneck[0]/relu_1" [id=249, type=relu]; -"250 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1" [id=250, type=symmetric_quantize]; -"251 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=251, type=symmetric_quantize]; -"252 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0" [id=252, type=conv2d]; -"253 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=253, type=batch_norm]; -"254 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" [id=254, type=symmetric_quantize]; -"255 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=255, type=symmetric_quantize]; -"256 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0" [id=256, type=conv2d]; -"257 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0" [id=257, type=batch_norm]; -"258 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0" [id=258, type=symmetric_quantize]; -"259 ResNet/Sequential[layer4]/Bottleneck[0]/__iadd___0" [id=259, type=__iadd__]; -"260 ResNet/Sequential[layer4]/Bottleneck[0]/relu_2" [id=260, type=relu]; -"261 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" [id=261, type=symmetric_quantize]; -"262 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=262, type=symmetric_quantize]; -"263 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0" [id=263, type=conv2d]; -"264 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=264, type=batch_norm]; -"265 ResNet/Sequential[layer4]/Bottleneck[1]/relu_0" [id=265, type=relu]; -"266 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0" [id=266, type=symmetric_quantize]; -"267 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=267, type=symmetric_quantize]; -"268 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0" [id=268, type=conv2d]; -"269 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=269, type=batch_norm]; -"270 ResNet/Sequential[layer4]/Bottleneck[1]/relu_1" [id=270, type=relu]; -"271 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1" [id=271, type=symmetric_quantize]; -"272 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=272, type=symmetric_quantize]; -"273 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0" [id=273, type=conv2d]; -"274 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=274, type=batch_norm]; -"275 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" [id=275, type=symmetric_quantize]; -"276 ResNet/Sequential[layer4]/Bottleneck[1]/__iadd___0" [id=276, type=__iadd__]; -"277 ResNet/Sequential[layer4]/Bottleneck[1]/relu_2" [id=277, type=relu]; -"278 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" [id=278, type=symmetric_quantize]; -"279 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=279, type=symmetric_quantize]; -"280 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0" [id=280, type=conv2d]; -"281 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=281, type=batch_norm]; -"282 ResNet/Sequential[layer4]/Bottleneck[2]/relu_0" [id=282, type=relu]; -"283 ResNet/Sequential[layer4]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0" [id=283, type=symmetric_quantize]; -"284 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=284, type=symmetric_quantize]; -"285 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0" [id=285, type=conv2d]; -"286 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=286, type=batch_norm]; -"287 ResNet/Sequential[layer4]/Bottleneck[2]/relu_1" [id=287, type=relu]; -"288 ResNet/Sequential[layer4]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1" [id=288, type=symmetric_quantize]; -"289 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=289, type=symmetric_quantize]; -"290 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0" [id=290, type=conv2d]; -"291 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=291, type=batch_norm]; -"292 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" [id=292, type=symmetric_quantize]; -"293 ResNet/Sequential[layer4]/Bottleneck[2]/__iadd___0" [id=293, type=__iadd__]; -"294 ResNet/Sequential[layer4]/Bottleneck[2]/relu_2" [id=294, type=relu]; -"295 ResNet/Sequential[layer4]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" [id=295, type=symmetric_quantize]; -"296 ResNet/AdaptiveAvgPool2d[avgpool]/adaptive_avg_pool2d_0" [id=296, type=adaptive_avg_pool2d]; -"297 ResNet/AdaptiveAvgPool2d[avgpool]/SymmetricQuantizer/symmetric_quantize_0" [id=297, type=symmetric_quantize]; -"298 ResNet/flatten_0" [id=298, type=flatten]; -"299 ResNet/NNCFLinear[linear]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=299, type=symmetric_quantize]; -"300 ResNet/NNCFLinear[linear]/linear_0" [id=300, type=linear]; -"301 /nncf_model_output_0" [id=301, type=nncf_model_output]; -"0 /nncf_model_input_0" -> "1 SymmetricQuantizer/symmetric_quantize_0"; -"1 SymmetricQuantizer/symmetric_quantize_0" -> "3 ResNet/NNCFConv2d[conv1]/conv2d_0"; -"2 ResNet/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "3 ResNet/NNCFConv2d[conv1]/conv2d_0"; -"3 ResNet/NNCFConv2d[conv1]/conv2d_0" -> "4 ResNet/NNCFBatchNorm2d[bn1]/batch_norm_0"; -"4 ResNet/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "5 ResNet/relu_0"; -"5 ResNet/relu_0" -> "6 ResNet/SymmetricQuantizer/symmetric_quantize_0"; -"6 ResNet/SymmetricQuantizer/symmetric_quantize_0" -> "7 ResNet/MaxPool2d[maxpool]/max_pool2d_0"; -"7 ResNet/MaxPool2d[maxpool]/max_pool2d_0" -> "9 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0"; -"7 ResNet/MaxPool2d[maxpool]/max_pool2d_0" -> "23 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0"; -"8 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "9 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0"; -"9 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0" -> "10 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0"; -"10 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "11 ResNet/Sequential[layer1]/Bottleneck[0]/relu_0"; -"11 ResNet/Sequential[layer1]/Bottleneck[0]/relu_0" -> "12 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0"; -"12 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0" -> "14 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0"; -"13 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "14 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0"; -"14 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0" -> "15 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0"; -"15 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "16 ResNet/Sequential[layer1]/Bottleneck[0]/relu_1"; -"16 ResNet/Sequential[layer1]/Bottleneck[0]/relu_1" -> "17 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1"; -"17 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1" -> "19 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0"; -"18 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "19 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0"; -"19 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0" -> "20 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0"; -"20 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "21 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0"; -"21 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" -> "26 ResNet/Sequential[layer1]/Bottleneck[0]/__iadd___0"; -"22 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "23 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0"; -"23 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0" -> "24 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0"; -"24 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0" -> "25 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0"; -"25 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0" -> "26 ResNet/Sequential[layer1]/Bottleneck[0]/__iadd___0"; -"26 ResNet/Sequential[layer1]/Bottleneck[0]/__iadd___0" -> "27 ResNet/Sequential[layer1]/Bottleneck[0]/relu_2"; -"27 ResNet/Sequential[layer1]/Bottleneck[0]/relu_2" -> "28 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2"; -"28 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" -> "30 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0"; -"28 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" -> "43 ResNet/Sequential[layer1]/Bottleneck[1]/__iadd___0"; -"29 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "30 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0"; -"30 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0" -> "31 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0"; -"31 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "32 ResNet/Sequential[layer1]/Bottleneck[1]/relu_0"; -"32 ResNet/Sequential[layer1]/Bottleneck[1]/relu_0" -> "33 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0"; -"33 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0" -> "35 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0"; -"34 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "35 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0"; -"35 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0" -> "36 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0"; -"36 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "37 ResNet/Sequential[layer1]/Bottleneck[1]/relu_1"; -"37 ResNet/Sequential[layer1]/Bottleneck[1]/relu_1" -> "38 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1"; -"38 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1" -> "40 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0"; -"39 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "40 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0"; -"40 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0" -> "41 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0"; -"41 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "42 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0"; -"42 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" -> "43 ResNet/Sequential[layer1]/Bottleneck[1]/__iadd___0"; -"43 ResNet/Sequential[layer1]/Bottleneck[1]/__iadd___0" -> "44 ResNet/Sequential[layer1]/Bottleneck[1]/relu_2"; -"44 ResNet/Sequential[layer1]/Bottleneck[1]/relu_2" -> "45 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2"; -"45 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" -> "47 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0"; -"45 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" -> "60 ResNet/Sequential[layer1]/Bottleneck[2]/__iadd___0"; -"46 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "47 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0"; -"47 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0" -> "48 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0"; -"48 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "49 ResNet/Sequential[layer1]/Bottleneck[2]/relu_0"; -"49 ResNet/Sequential[layer1]/Bottleneck[2]/relu_0" -> "50 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0"; -"50 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0" -> "52 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0"; -"51 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "52 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0"; -"52 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0" -> "53 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0"; -"53 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "54 ResNet/Sequential[layer1]/Bottleneck[2]/relu_1"; -"54 ResNet/Sequential[layer1]/Bottleneck[2]/relu_1" -> "55 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1"; -"55 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1" -> "57 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0"; -"56 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "57 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0"; -"57 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0" -> "58 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0"; -"58 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "59 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0"; -"59 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" -> "60 ResNet/Sequential[layer1]/Bottleneck[2]/__iadd___0"; -"60 ResNet/Sequential[layer1]/Bottleneck[2]/__iadd___0" -> "61 ResNet/Sequential[layer1]/Bottleneck[2]/relu_2"; -"61 ResNet/Sequential[layer1]/Bottleneck[2]/relu_2" -> "62 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2"; -"62 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" -> "64 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0"; -"62 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" -> "78 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0"; -"63 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "64 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0"; -"64 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0" -> "65 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0"; -"65 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "66 ResNet/Sequential[layer2]/Bottleneck[0]/relu_0"; -"66 ResNet/Sequential[layer2]/Bottleneck[0]/relu_0" -> "67 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0"; -"67 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0" -> "69 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0"; -"68 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "69 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0"; -"69 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0" -> "70 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0"; -"70 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "71 ResNet/Sequential[layer2]/Bottleneck[0]/relu_1"; -"71 ResNet/Sequential[layer2]/Bottleneck[0]/relu_1" -> "72 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1"; -"72 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1" -> "74 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0"; -"73 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "74 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0"; -"74 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0" -> "75 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0"; -"75 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "76 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0"; -"76 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" -> "81 ResNet/Sequential[layer2]/Bottleneck[0]/__iadd___0"; -"77 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "78 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0"; -"78 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0" -> "79 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0"; -"79 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0" -> "80 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0"; -"80 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0" -> "81 ResNet/Sequential[layer2]/Bottleneck[0]/__iadd___0"; -"81 ResNet/Sequential[layer2]/Bottleneck[0]/__iadd___0" -> "82 ResNet/Sequential[layer2]/Bottleneck[0]/relu_2"; -"82 ResNet/Sequential[layer2]/Bottleneck[0]/relu_2" -> "83 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2"; -"83 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" -> "85 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0"; -"83 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" -> "98 ResNet/Sequential[layer2]/Bottleneck[1]/__iadd___0"; -"84 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "85 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0"; -"85 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0" -> "86 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0"; -"86 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "87 ResNet/Sequential[layer2]/Bottleneck[1]/relu_0"; -"87 ResNet/Sequential[layer2]/Bottleneck[1]/relu_0" -> "88 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0"; -"88 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0" -> "90 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0"; -"89 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "90 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0"; -"90 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0" -> "91 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0"; -"91 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "92 ResNet/Sequential[layer2]/Bottleneck[1]/relu_1"; -"92 ResNet/Sequential[layer2]/Bottleneck[1]/relu_1" -> "93 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1"; -"93 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1" -> "95 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0"; -"94 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "95 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0"; -"95 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0" -> "96 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0"; -"96 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "97 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0"; -"97 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" -> "98 ResNet/Sequential[layer2]/Bottleneck[1]/__iadd___0"; -"98 ResNet/Sequential[layer2]/Bottleneck[1]/__iadd___0" -> "99 ResNet/Sequential[layer2]/Bottleneck[1]/relu_2"; -"99 ResNet/Sequential[layer2]/Bottleneck[1]/relu_2" -> "100 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2"; -"100 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" -> "102 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0"; -"100 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" -> "115 ResNet/Sequential[layer2]/Bottleneck[2]/__iadd___0"; -"101 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "102 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0"; -"102 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0" -> "103 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0"; -"103 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "104 ResNet/Sequential[layer2]/Bottleneck[2]/relu_0"; -"104 ResNet/Sequential[layer2]/Bottleneck[2]/relu_0" -> "105 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0"; -"105 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0" -> "107 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0"; -"106 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "107 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0"; -"107 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0" -> "108 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0"; -"108 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "109 ResNet/Sequential[layer2]/Bottleneck[2]/relu_1"; -"109 ResNet/Sequential[layer2]/Bottleneck[2]/relu_1" -> "110 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1"; -"110 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1" -> "112 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0"; -"111 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "112 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0"; -"112 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0" -> "113 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0"; -"113 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "114 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0"; -"114 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" -> "115 ResNet/Sequential[layer2]/Bottleneck[2]/__iadd___0"; -"115 ResNet/Sequential[layer2]/Bottleneck[2]/__iadd___0" -> "116 ResNet/Sequential[layer2]/Bottleneck[2]/relu_2"; -"116 ResNet/Sequential[layer2]/Bottleneck[2]/relu_2" -> "117 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2"; -"117 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" -> "119 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv1]/conv2d_0"; -"117 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" -> "132 ResNet/Sequential[layer2]/Bottleneck[3]/__iadd___0"; -"118 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "119 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv1]/conv2d_0"; -"119 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv1]/conv2d_0" -> "120 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn1]/batch_norm_0"; -"120 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "121 ResNet/Sequential[layer2]/Bottleneck[3]/relu_0"; -"121 ResNet/Sequential[layer2]/Bottleneck[3]/relu_0" -> "122 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_0"; -"122 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_0" -> "124 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv2]/conv2d_0"; -"123 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "124 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv2]/conv2d_0"; -"124 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv2]/conv2d_0" -> "125 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn2]/batch_norm_0"; -"125 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "126 ResNet/Sequential[layer2]/Bottleneck[3]/relu_1"; -"126 ResNet/Sequential[layer2]/Bottleneck[3]/relu_1" -> "127 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_1"; -"127 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_1" -> "129 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv3]/conv2d_0"; -"128 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "129 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv3]/conv2d_0"; -"129 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv3]/conv2d_0" -> "130 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn3]/batch_norm_0"; -"130 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "131 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0"; -"131 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" -> "132 ResNet/Sequential[layer2]/Bottleneck[3]/__iadd___0"; -"132 ResNet/Sequential[layer2]/Bottleneck[3]/__iadd___0" -> "133 ResNet/Sequential[layer2]/Bottleneck[3]/relu_2"; -"133 ResNet/Sequential[layer2]/Bottleneck[3]/relu_2" -> "134 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_2"; -"134 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_2" -> "136 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0"; -"134 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_2" -> "150 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0"; -"135 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "136 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0"; -"136 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0" -> "137 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0"; -"137 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "138 ResNet/Sequential[layer3]/Bottleneck[0]/relu_0"; -"138 ResNet/Sequential[layer3]/Bottleneck[0]/relu_0" -> "139 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0"; -"139 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0" -> "141 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0"; -"140 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "141 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0"; -"141 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0" -> "142 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0"; -"142 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "143 ResNet/Sequential[layer3]/Bottleneck[0]/relu_1"; -"143 ResNet/Sequential[layer3]/Bottleneck[0]/relu_1" -> "144 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1"; -"144 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1" -> "146 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0"; -"145 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "146 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0"; -"146 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0" -> "147 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0"; -"147 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "148 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0"; -"148 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" -> "153 ResNet/Sequential[layer3]/Bottleneck[0]/__iadd___0"; -"149 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "150 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0"; -"150 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0" -> "151 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0"; -"151 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0" -> "152 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0"; -"152 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0" -> "153 ResNet/Sequential[layer3]/Bottleneck[0]/__iadd___0"; -"153 ResNet/Sequential[layer3]/Bottleneck[0]/__iadd___0" -> "154 ResNet/Sequential[layer3]/Bottleneck[0]/relu_2"; -"154 ResNet/Sequential[layer3]/Bottleneck[0]/relu_2" -> "155 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2"; -"155 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" -> "157 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0"; -"155 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" -> "170 ResNet/Sequential[layer3]/Bottleneck[1]/__iadd___0"; -"156 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "157 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0"; -"157 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0" -> "158 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0"; -"158 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "159 ResNet/Sequential[layer3]/Bottleneck[1]/relu_0"; -"159 ResNet/Sequential[layer3]/Bottleneck[1]/relu_0" -> "160 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0"; -"160 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0" -> "162 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0"; -"161 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "162 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0"; -"162 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0" -> "163 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0"; -"163 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "164 ResNet/Sequential[layer3]/Bottleneck[1]/relu_1"; -"164 ResNet/Sequential[layer3]/Bottleneck[1]/relu_1" -> "165 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1"; -"165 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1" -> "167 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0"; -"166 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "167 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0"; -"167 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0" -> "168 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0"; -"168 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "169 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0"; -"169 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" -> "170 ResNet/Sequential[layer3]/Bottleneck[1]/__iadd___0"; -"170 ResNet/Sequential[layer3]/Bottleneck[1]/__iadd___0" -> "171 ResNet/Sequential[layer3]/Bottleneck[1]/relu_2"; -"171 ResNet/Sequential[layer3]/Bottleneck[1]/relu_2" -> "172 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2"; -"172 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" -> "174 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0"; -"172 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" -> "187 ResNet/Sequential[layer3]/Bottleneck[2]/__iadd___0"; -"173 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "174 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0"; -"174 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0" -> "175 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0"; -"175 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "176 ResNet/Sequential[layer3]/Bottleneck[2]/relu_0"; -"176 ResNet/Sequential[layer3]/Bottleneck[2]/relu_0" -> "177 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0"; -"177 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0" -> "179 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0"; -"178 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "179 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0"; -"179 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0" -> "180 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0"; -"180 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "181 ResNet/Sequential[layer3]/Bottleneck[2]/relu_1"; -"181 ResNet/Sequential[layer3]/Bottleneck[2]/relu_1" -> "182 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1"; -"182 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1" -> "184 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0"; -"183 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "184 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0"; -"184 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0" -> "185 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0"; -"185 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "186 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0"; -"186 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" -> "187 ResNet/Sequential[layer3]/Bottleneck[2]/__iadd___0"; -"187 ResNet/Sequential[layer3]/Bottleneck[2]/__iadd___0" -> "188 ResNet/Sequential[layer3]/Bottleneck[2]/relu_2"; -"188 ResNet/Sequential[layer3]/Bottleneck[2]/relu_2" -> "189 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2"; -"189 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" -> "191 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv1]/conv2d_0"; -"189 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" -> "204 ResNet/Sequential[layer3]/Bottleneck[3]/__iadd___0"; -"190 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "191 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv1]/conv2d_0"; -"191 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv1]/conv2d_0" -> "192 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn1]/batch_norm_0"; -"192 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "193 ResNet/Sequential[layer3]/Bottleneck[3]/relu_0"; -"193 ResNet/Sequential[layer3]/Bottleneck[3]/relu_0" -> "194 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_0"; -"194 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_0" -> "196 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv2]/conv2d_0"; -"195 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "196 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv2]/conv2d_0"; -"196 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv2]/conv2d_0" -> "197 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn2]/batch_norm_0"; -"197 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "198 ResNet/Sequential[layer3]/Bottleneck[3]/relu_1"; -"198 ResNet/Sequential[layer3]/Bottleneck[3]/relu_1" -> "199 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_1"; -"199 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_1" -> "201 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv3]/conv2d_0"; -"200 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "201 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv3]/conv2d_0"; -"201 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv3]/conv2d_0" -> "202 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn3]/batch_norm_0"; -"202 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "203 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0"; -"203 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" -> "204 ResNet/Sequential[layer3]/Bottleneck[3]/__iadd___0"; -"204 ResNet/Sequential[layer3]/Bottleneck[3]/__iadd___0" -> "205 ResNet/Sequential[layer3]/Bottleneck[3]/relu_2"; -"205 ResNet/Sequential[layer3]/Bottleneck[3]/relu_2" -> "206 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_2"; -"206 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_2" -> "208 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv1]/conv2d_0"; -"206 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_2" -> "221 ResNet/Sequential[layer3]/Bottleneck[4]/__iadd___0"; -"207 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "208 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv1]/conv2d_0"; -"208 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv1]/conv2d_0" -> "209 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn1]/batch_norm_0"; -"209 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "210 ResNet/Sequential[layer3]/Bottleneck[4]/relu_0"; -"210 ResNet/Sequential[layer3]/Bottleneck[4]/relu_0" -> "211 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_0"; -"211 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_0" -> "213 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv2]/conv2d_0"; -"212 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "213 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv2]/conv2d_0"; -"213 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv2]/conv2d_0" -> "214 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn2]/batch_norm_0"; -"214 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "215 ResNet/Sequential[layer3]/Bottleneck[4]/relu_1"; -"215 ResNet/Sequential[layer3]/Bottleneck[4]/relu_1" -> "216 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_1"; -"216 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_1" -> "218 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv3]/conv2d_0"; -"217 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "218 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv3]/conv2d_0"; -"218 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv3]/conv2d_0" -> "219 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn3]/batch_norm_0"; -"219 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "220 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0"; -"220 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" -> "221 ResNet/Sequential[layer3]/Bottleneck[4]/__iadd___0"; -"221 ResNet/Sequential[layer3]/Bottleneck[4]/__iadd___0" -> "222 ResNet/Sequential[layer3]/Bottleneck[4]/relu_2"; -"222 ResNet/Sequential[layer3]/Bottleneck[4]/relu_2" -> "223 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_2"; -"223 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_2" -> "225 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv1]/conv2d_0"; -"223 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_2" -> "238 ResNet/Sequential[layer3]/Bottleneck[5]/__iadd___0"; -"224 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "225 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv1]/conv2d_0"; -"225 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv1]/conv2d_0" -> "226 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn1]/batch_norm_0"; -"226 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "227 ResNet/Sequential[layer3]/Bottleneck[5]/relu_0"; -"227 ResNet/Sequential[layer3]/Bottleneck[5]/relu_0" -> "228 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_0"; -"228 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_0" -> "230 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv2]/conv2d_0"; -"229 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "230 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv2]/conv2d_0"; -"230 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv2]/conv2d_0" -> "231 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn2]/batch_norm_0"; -"231 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "232 ResNet/Sequential[layer3]/Bottleneck[5]/relu_1"; -"232 ResNet/Sequential[layer3]/Bottleneck[5]/relu_1" -> "233 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_1"; -"233 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_1" -> "235 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv3]/conv2d_0"; -"234 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "235 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv3]/conv2d_0"; -"235 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv3]/conv2d_0" -> "236 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn3]/batch_norm_0"; -"236 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "237 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0"; -"237 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" -> "238 ResNet/Sequential[layer3]/Bottleneck[5]/__iadd___0"; -"238 ResNet/Sequential[layer3]/Bottleneck[5]/__iadd___0" -> "239 ResNet/Sequential[layer3]/Bottleneck[5]/relu_2"; -"239 ResNet/Sequential[layer3]/Bottleneck[5]/relu_2" -> "240 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_2"; -"240 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_2" -> "242 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0"; -"240 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_2" -> "256 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0"; -"241 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "242 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0"; -"242 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0" -> "243 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0"; -"243 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "244 ResNet/Sequential[layer4]/Bottleneck[0]/relu_0"; -"244 ResNet/Sequential[layer4]/Bottleneck[0]/relu_0" -> "245 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0"; -"245 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0" -> "247 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0"; -"246 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "247 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0"; -"247 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0" -> "248 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0"; -"248 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "249 ResNet/Sequential[layer4]/Bottleneck[0]/relu_1"; -"249 ResNet/Sequential[layer4]/Bottleneck[0]/relu_1" -> "250 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1"; -"250 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1" -> "252 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0"; -"251 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "252 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0"; -"252 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0" -> "253 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0"; -"253 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "254 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0"; -"254 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" -> "259 ResNet/Sequential[layer4]/Bottleneck[0]/__iadd___0"; -"255 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "256 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0"; -"256 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0" -> "257 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0"; -"257 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0" -> "258 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0"; -"258 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0" -> "259 ResNet/Sequential[layer4]/Bottleneck[0]/__iadd___0"; -"259 ResNet/Sequential[layer4]/Bottleneck[0]/__iadd___0" -> "260 ResNet/Sequential[layer4]/Bottleneck[0]/relu_2"; -"260 ResNet/Sequential[layer4]/Bottleneck[0]/relu_2" -> "261 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2"; -"261 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" -> "263 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0"; -"261 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" -> "276 ResNet/Sequential[layer4]/Bottleneck[1]/__iadd___0"; -"262 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "263 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0"; -"263 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0" -> "264 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0"; -"264 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "265 ResNet/Sequential[layer4]/Bottleneck[1]/relu_0"; -"265 ResNet/Sequential[layer4]/Bottleneck[1]/relu_0" -> "266 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0"; -"266 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0" -> "268 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0"; -"267 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "268 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0"; -"268 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0" -> "269 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0"; -"269 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "270 ResNet/Sequential[layer4]/Bottleneck[1]/relu_1"; -"270 ResNet/Sequential[layer4]/Bottleneck[1]/relu_1" -> "271 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1"; -"271 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1" -> "273 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0"; -"272 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "273 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0"; -"273 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0" -> "274 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0"; -"274 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "275 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0"; -"275 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" -> "276 ResNet/Sequential[layer4]/Bottleneck[1]/__iadd___0"; -"276 ResNet/Sequential[layer4]/Bottleneck[1]/__iadd___0" -> "277 ResNet/Sequential[layer4]/Bottleneck[1]/relu_2"; -"277 ResNet/Sequential[layer4]/Bottleneck[1]/relu_2" -> "278 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2"; -"278 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" -> "280 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0"; -"278 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" -> "293 ResNet/Sequential[layer4]/Bottleneck[2]/__iadd___0"; -"279 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "280 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0"; -"280 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0" -> "281 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0"; -"281 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "282 ResNet/Sequential[layer4]/Bottleneck[2]/relu_0"; -"282 ResNet/Sequential[layer4]/Bottleneck[2]/relu_0" -> "283 ResNet/Sequential[layer4]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0"; -"283 ResNet/Sequential[layer4]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0" -> "285 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0"; -"284 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "285 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0"; -"285 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0" -> "286 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0"; -"286 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "287 ResNet/Sequential[layer4]/Bottleneck[2]/relu_1"; -"287 ResNet/Sequential[layer4]/Bottleneck[2]/relu_1" -> "288 ResNet/Sequential[layer4]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1"; -"288 ResNet/Sequential[layer4]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1" -> "290 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0"; -"289 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "290 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0"; -"290 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0" -> "291 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0"; -"291 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "292 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0"; -"292 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/SymmetricQuantizer/symmetric_quantize_0" -> "293 ResNet/Sequential[layer4]/Bottleneck[2]/__iadd___0"; -"293 ResNet/Sequential[layer4]/Bottleneck[2]/__iadd___0" -> "294 ResNet/Sequential[layer4]/Bottleneck[2]/relu_2"; -"294 ResNet/Sequential[layer4]/Bottleneck[2]/relu_2" -> "295 ResNet/Sequential[layer4]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2"; -"295 ResNet/Sequential[layer4]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" -> "296 ResNet/AdaptiveAvgPool2d[avgpool]/adaptive_avg_pool2d_0"; -"296 ResNet/AdaptiveAvgPool2d[avgpool]/adaptive_avg_pool2d_0" -> "297 ResNet/AdaptiveAvgPool2d[avgpool]/SymmetricQuantizer/symmetric_quantize_0"; -"297 ResNet/AdaptiveAvgPool2d[avgpool]/SymmetricQuantizer/symmetric_quantize_0" -> "298 ResNet/flatten_0"; -"298 ResNet/flatten_0" -> "300 ResNet/NNCFLinear[linear]/linear_0"; -"299 ResNet/NNCFLinear[linear]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "300 ResNet/NNCFLinear[linear]/linear_0"; -"300 ResNet/NNCFLinear[linear]/linear_0" -> "301 /nncf_model_output_0"; -} diff --git a/tests/torch/data/reference_graphs/quantized/ptq/symmetric/resnet50_cpu_spr.dot b/tests/torch/data/reference_graphs/quantized/ptq/symmetric/resnet50_cpu_spr.dot new file mode 100644 index 00000000000..be69a7c2d68 --- /dev/null +++ b/tests/torch/data/reference_graphs/quantized/ptq/symmetric/resnet50_cpu_spr.dot @@ -0,0 +1,589 @@ +strict digraph { +"0 /nncf_model_input_0" [id=0, type=nncf_model_input]; +"1 SymmetricQuantizer/symmetric_quantize_0" [id=1, type=symmetric_quantize]; +"2 ResNet/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=2, type=symmetric_quantize]; +"3 ResNet/NNCFConv2d[conv1]/conv2d_0" [id=3, type=conv2d]; +"4 ResNet/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=4, type=batch_norm]; +"5 ResNet/relu_0" [id=5, type=relu]; +"6 ResNet/SymmetricQuantizer/symmetric_quantize_0" [id=6, type=symmetric_quantize]; +"7 ResNet/MaxPool2d[maxpool]/max_pool2d_0" [id=7, type=max_pool2d]; +"8 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=8, type=symmetric_quantize]; +"9 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0" [id=9, type=conv2d]; +"10 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=10, type=batch_norm]; +"11 ResNet/Sequential[layer1]/Bottleneck[0]/relu_0" [id=11, type=relu]; +"12 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0" [id=12, type=symmetric_quantize]; +"13 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=13, type=symmetric_quantize]; +"14 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0" [id=14, type=conv2d]; +"15 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=15, type=batch_norm]; +"16 ResNet/Sequential[layer1]/Bottleneck[0]/relu_1" [id=16, type=relu]; +"17 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1" [id=17, type=symmetric_quantize]; +"18 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=18, type=symmetric_quantize]; +"19 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0" [id=19, type=conv2d]; +"20 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=20, type=batch_norm]; +"21 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=21, type=symmetric_quantize]; +"22 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0" [id=22, type=conv2d]; +"23 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0" [id=23, type=batch_norm]; +"24 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0" [id=24, type=symmetric_quantize]; +"25 ResNet/Sequential[layer1]/Bottleneck[0]/__iadd___0" [id=25, type=__iadd__]; +"26 ResNet/Sequential[layer1]/Bottleneck[0]/relu_2" [id=26, type=relu]; +"27 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" [id=27, type=symmetric_quantize]; +"28 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=28, type=symmetric_quantize]; +"29 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0" [id=29, type=conv2d]; +"30 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=30, type=batch_norm]; +"31 ResNet/Sequential[layer1]/Bottleneck[1]/relu_0" [id=31, type=relu]; +"32 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0" [id=32, type=symmetric_quantize]; +"33 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=33, type=symmetric_quantize]; +"34 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0" [id=34, type=conv2d]; +"35 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=35, type=batch_norm]; +"36 ResNet/Sequential[layer1]/Bottleneck[1]/relu_1" [id=36, type=relu]; +"37 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1" [id=37, type=symmetric_quantize]; +"38 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=38, type=symmetric_quantize]; +"39 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0" [id=39, type=conv2d]; +"40 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=40, type=batch_norm]; +"41 ResNet/Sequential[layer1]/Bottleneck[1]/__iadd___0" [id=41, type=__iadd__]; +"42 ResNet/Sequential[layer1]/Bottleneck[1]/relu_2" [id=42, type=relu]; +"43 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" [id=43, type=symmetric_quantize]; +"44 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=44, type=symmetric_quantize]; +"45 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0" [id=45, type=conv2d]; +"46 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=46, type=batch_norm]; +"47 ResNet/Sequential[layer1]/Bottleneck[2]/relu_0" [id=47, type=relu]; +"48 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0" [id=48, type=symmetric_quantize]; +"49 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=49, type=symmetric_quantize]; +"50 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0" [id=50, type=conv2d]; +"51 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=51, type=batch_norm]; +"52 ResNet/Sequential[layer1]/Bottleneck[2]/relu_1" [id=52, type=relu]; +"53 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1" [id=53, type=symmetric_quantize]; +"54 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=54, type=symmetric_quantize]; +"55 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0" [id=55, type=conv2d]; +"56 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=56, type=batch_norm]; +"57 ResNet/Sequential[layer1]/Bottleneck[2]/__iadd___0" [id=57, type=__iadd__]; +"58 ResNet/Sequential[layer1]/Bottleneck[2]/relu_2" [id=58, type=relu]; +"59 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" [id=59, type=symmetric_quantize]; +"60 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=60, type=symmetric_quantize]; +"61 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0" [id=61, type=conv2d]; +"62 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=62, type=batch_norm]; +"63 ResNet/Sequential[layer2]/Bottleneck[0]/relu_0" [id=63, type=relu]; +"64 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0" [id=64, type=symmetric_quantize]; +"65 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=65, type=symmetric_quantize]; +"66 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0" [id=66, type=conv2d]; +"67 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=67, type=batch_norm]; +"68 ResNet/Sequential[layer2]/Bottleneck[0]/relu_1" [id=68, type=relu]; +"69 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1" [id=69, type=symmetric_quantize]; +"70 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=70, type=symmetric_quantize]; +"71 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0" [id=71, type=conv2d]; +"72 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=72, type=batch_norm]; +"73 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=73, type=symmetric_quantize]; +"74 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0" [id=74, type=conv2d]; +"75 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0" [id=75, type=batch_norm]; +"76 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0" [id=76, type=symmetric_quantize]; +"77 ResNet/Sequential[layer2]/Bottleneck[0]/__iadd___0" [id=77, type=__iadd__]; +"78 ResNet/Sequential[layer2]/Bottleneck[0]/relu_2" [id=78, type=relu]; +"79 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" [id=79, type=symmetric_quantize]; +"80 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=80, type=symmetric_quantize]; +"81 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0" [id=81, type=conv2d]; +"82 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=82, type=batch_norm]; +"83 ResNet/Sequential[layer2]/Bottleneck[1]/relu_0" [id=83, type=relu]; +"84 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0" [id=84, type=symmetric_quantize]; +"85 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=85, type=symmetric_quantize]; +"86 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0" [id=86, type=conv2d]; +"87 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=87, type=batch_norm]; +"88 ResNet/Sequential[layer2]/Bottleneck[1]/relu_1" [id=88, type=relu]; +"89 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1" [id=89, type=symmetric_quantize]; +"90 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=90, type=symmetric_quantize]; +"91 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0" [id=91, type=conv2d]; +"92 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=92, type=batch_norm]; +"93 ResNet/Sequential[layer2]/Bottleneck[1]/__iadd___0" [id=93, type=__iadd__]; +"94 ResNet/Sequential[layer2]/Bottleneck[1]/relu_2" [id=94, type=relu]; +"95 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" [id=95, type=symmetric_quantize]; +"96 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=96, type=symmetric_quantize]; +"97 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0" [id=97, type=conv2d]; +"98 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=98, type=batch_norm]; +"99 ResNet/Sequential[layer2]/Bottleneck[2]/relu_0" [id=99, type=relu]; +"100 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0" [id=100, type=symmetric_quantize]; +"101 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=101, type=symmetric_quantize]; +"102 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0" [id=102, type=conv2d]; +"103 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=103, type=batch_norm]; +"104 ResNet/Sequential[layer2]/Bottleneck[2]/relu_1" [id=104, type=relu]; +"105 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1" [id=105, type=symmetric_quantize]; +"106 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=106, type=symmetric_quantize]; +"107 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0" [id=107, type=conv2d]; +"108 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=108, type=batch_norm]; +"109 ResNet/Sequential[layer2]/Bottleneck[2]/__iadd___0" [id=109, type=__iadd__]; +"110 ResNet/Sequential[layer2]/Bottleneck[2]/relu_2" [id=110, type=relu]; +"111 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" [id=111, type=symmetric_quantize]; +"112 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=112, type=symmetric_quantize]; +"113 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv1]/conv2d_0" [id=113, type=conv2d]; +"114 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=114, type=batch_norm]; +"115 ResNet/Sequential[layer2]/Bottleneck[3]/relu_0" [id=115, type=relu]; +"116 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_0" [id=116, type=symmetric_quantize]; +"117 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=117, type=symmetric_quantize]; +"118 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv2]/conv2d_0" [id=118, type=conv2d]; +"119 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=119, type=batch_norm]; +"120 ResNet/Sequential[layer2]/Bottleneck[3]/relu_1" [id=120, type=relu]; +"121 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_1" [id=121, type=symmetric_quantize]; +"122 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=122, type=symmetric_quantize]; +"123 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv3]/conv2d_0" [id=123, type=conv2d]; +"124 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=124, type=batch_norm]; +"125 ResNet/Sequential[layer2]/Bottleneck[3]/__iadd___0" [id=125, type=__iadd__]; +"126 ResNet/Sequential[layer2]/Bottleneck[3]/relu_2" [id=126, type=relu]; +"127 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_2" [id=127, type=symmetric_quantize]; +"128 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=128, type=symmetric_quantize]; +"129 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0" [id=129, type=conv2d]; +"130 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=130, type=batch_norm]; +"131 ResNet/Sequential[layer3]/Bottleneck[0]/relu_0" [id=131, type=relu]; +"132 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0" [id=132, type=symmetric_quantize]; +"133 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=133, type=symmetric_quantize]; +"134 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0" [id=134, type=conv2d]; +"135 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=135, type=batch_norm]; +"136 ResNet/Sequential[layer3]/Bottleneck[0]/relu_1" [id=136, type=relu]; +"137 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1" [id=137, type=symmetric_quantize]; +"138 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=138, type=symmetric_quantize]; +"139 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0" [id=139, type=conv2d]; +"140 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=140, type=batch_norm]; +"141 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=141, type=symmetric_quantize]; +"142 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0" [id=142, type=conv2d]; +"143 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0" [id=143, type=batch_norm]; +"144 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0" [id=144, type=symmetric_quantize]; +"145 ResNet/Sequential[layer3]/Bottleneck[0]/__iadd___0" [id=145, type=__iadd__]; +"146 ResNet/Sequential[layer3]/Bottleneck[0]/relu_2" [id=146, type=relu]; +"147 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" [id=147, type=symmetric_quantize]; +"148 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=148, type=symmetric_quantize]; +"149 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0" [id=149, type=conv2d]; +"150 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=150, type=batch_norm]; +"151 ResNet/Sequential[layer3]/Bottleneck[1]/relu_0" [id=151, type=relu]; +"152 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0" [id=152, type=symmetric_quantize]; +"153 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=153, type=symmetric_quantize]; +"154 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0" [id=154, type=conv2d]; +"155 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=155, type=batch_norm]; +"156 ResNet/Sequential[layer3]/Bottleneck[1]/relu_1" [id=156, type=relu]; +"157 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1" [id=157, type=symmetric_quantize]; +"158 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=158, type=symmetric_quantize]; +"159 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0" [id=159, type=conv2d]; +"160 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=160, type=batch_norm]; +"161 ResNet/Sequential[layer3]/Bottleneck[1]/__iadd___0" [id=161, type=__iadd__]; +"162 ResNet/Sequential[layer3]/Bottleneck[1]/relu_2" [id=162, type=relu]; +"163 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" [id=163, type=symmetric_quantize]; +"164 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=164, type=symmetric_quantize]; +"165 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0" [id=165, type=conv2d]; +"166 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=166, type=batch_norm]; +"167 ResNet/Sequential[layer3]/Bottleneck[2]/relu_0" [id=167, type=relu]; +"168 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0" [id=168, type=symmetric_quantize]; +"169 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=169, type=symmetric_quantize]; +"170 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0" [id=170, type=conv2d]; +"171 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=171, type=batch_norm]; +"172 ResNet/Sequential[layer3]/Bottleneck[2]/relu_1" [id=172, type=relu]; +"173 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1" [id=173, type=symmetric_quantize]; +"174 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=174, type=symmetric_quantize]; +"175 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0" [id=175, type=conv2d]; +"176 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=176, type=batch_norm]; +"177 ResNet/Sequential[layer3]/Bottleneck[2]/__iadd___0" [id=177, type=__iadd__]; +"178 ResNet/Sequential[layer3]/Bottleneck[2]/relu_2" [id=178, type=relu]; +"179 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" [id=179, type=symmetric_quantize]; +"180 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=180, type=symmetric_quantize]; +"181 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv1]/conv2d_0" [id=181, type=conv2d]; +"182 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=182, type=batch_norm]; +"183 ResNet/Sequential[layer3]/Bottleneck[3]/relu_0" [id=183, type=relu]; +"184 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_0" [id=184, type=symmetric_quantize]; +"185 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=185, type=symmetric_quantize]; +"186 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv2]/conv2d_0" [id=186, type=conv2d]; +"187 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=187, type=batch_norm]; +"188 ResNet/Sequential[layer3]/Bottleneck[3]/relu_1" [id=188, type=relu]; +"189 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_1" [id=189, type=symmetric_quantize]; +"190 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=190, type=symmetric_quantize]; +"191 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv3]/conv2d_0" [id=191, type=conv2d]; +"192 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=192, type=batch_norm]; +"193 ResNet/Sequential[layer3]/Bottleneck[3]/__iadd___0" [id=193, type=__iadd__]; +"194 ResNet/Sequential[layer3]/Bottleneck[3]/relu_2" [id=194, type=relu]; +"195 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_2" [id=195, type=symmetric_quantize]; +"196 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=196, type=symmetric_quantize]; +"197 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv1]/conv2d_0" [id=197, type=conv2d]; +"198 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=198, type=batch_norm]; +"199 ResNet/Sequential[layer3]/Bottleneck[4]/relu_0" [id=199, type=relu]; +"200 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_0" [id=200, type=symmetric_quantize]; +"201 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=201, type=symmetric_quantize]; +"202 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv2]/conv2d_0" [id=202, type=conv2d]; +"203 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=203, type=batch_norm]; +"204 ResNet/Sequential[layer3]/Bottleneck[4]/relu_1" [id=204, type=relu]; +"205 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_1" [id=205, type=symmetric_quantize]; +"206 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=206, type=symmetric_quantize]; +"207 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv3]/conv2d_0" [id=207, type=conv2d]; +"208 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=208, type=batch_norm]; +"209 ResNet/Sequential[layer3]/Bottleneck[4]/__iadd___0" [id=209, type=__iadd__]; +"210 ResNet/Sequential[layer3]/Bottleneck[4]/relu_2" [id=210, type=relu]; +"211 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_2" [id=211, type=symmetric_quantize]; +"212 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=212, type=symmetric_quantize]; +"213 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv1]/conv2d_0" [id=213, type=conv2d]; +"214 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=214, type=batch_norm]; +"215 ResNet/Sequential[layer3]/Bottleneck[5]/relu_0" [id=215, type=relu]; +"216 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_0" [id=216, type=symmetric_quantize]; +"217 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=217, type=symmetric_quantize]; +"218 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv2]/conv2d_0" [id=218, type=conv2d]; +"219 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=219, type=batch_norm]; +"220 ResNet/Sequential[layer3]/Bottleneck[5]/relu_1" [id=220, type=relu]; +"221 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_1" [id=221, type=symmetric_quantize]; +"222 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=222, type=symmetric_quantize]; +"223 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv3]/conv2d_0" [id=223, type=conv2d]; +"224 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=224, type=batch_norm]; +"225 ResNet/Sequential[layer3]/Bottleneck[5]/__iadd___0" [id=225, type=__iadd__]; +"226 ResNet/Sequential[layer3]/Bottleneck[5]/relu_2" [id=226, type=relu]; +"227 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_2" [id=227, type=symmetric_quantize]; +"228 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=228, type=symmetric_quantize]; +"229 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0" [id=229, type=conv2d]; +"230 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=230, type=batch_norm]; +"231 ResNet/Sequential[layer4]/Bottleneck[0]/relu_0" [id=231, type=relu]; +"232 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0" [id=232, type=symmetric_quantize]; +"233 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=233, type=symmetric_quantize]; +"234 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0" [id=234, type=conv2d]; +"235 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=235, type=batch_norm]; +"236 ResNet/Sequential[layer4]/Bottleneck[0]/relu_1" [id=236, type=relu]; +"237 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1" [id=237, type=symmetric_quantize]; +"238 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=238, type=symmetric_quantize]; +"239 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0" [id=239, type=conv2d]; +"240 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=240, type=batch_norm]; +"241 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=241, type=symmetric_quantize]; +"242 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0" [id=242, type=conv2d]; +"243 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0" [id=243, type=batch_norm]; +"244 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0" [id=244, type=symmetric_quantize]; +"245 ResNet/Sequential[layer4]/Bottleneck[0]/__iadd___0" [id=245, type=__iadd__]; +"246 ResNet/Sequential[layer4]/Bottleneck[0]/relu_2" [id=246, type=relu]; +"247 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" [id=247, type=symmetric_quantize]; +"248 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=248, type=symmetric_quantize]; +"249 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0" [id=249, type=conv2d]; +"250 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=250, type=batch_norm]; +"251 ResNet/Sequential[layer4]/Bottleneck[1]/relu_0" [id=251, type=relu]; +"252 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0" [id=252, type=symmetric_quantize]; +"253 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=253, type=symmetric_quantize]; +"254 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0" [id=254, type=conv2d]; +"255 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=255, type=batch_norm]; +"256 ResNet/Sequential[layer4]/Bottleneck[1]/relu_1" [id=256, type=relu]; +"257 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1" [id=257, type=symmetric_quantize]; +"258 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=258, type=symmetric_quantize]; +"259 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0" [id=259, type=conv2d]; +"260 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=260, type=batch_norm]; +"261 ResNet/Sequential[layer4]/Bottleneck[1]/__iadd___0" [id=261, type=__iadd__]; +"262 ResNet/Sequential[layer4]/Bottleneck[1]/relu_2" [id=262, type=relu]; +"263 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" [id=263, type=symmetric_quantize]; +"264 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=264, type=symmetric_quantize]; +"265 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0" [id=265, type=conv2d]; +"266 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0" [id=266, type=batch_norm]; +"267 ResNet/Sequential[layer4]/Bottleneck[2]/relu_0" [id=267, type=relu]; +"268 ResNet/Sequential[layer4]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0" [id=268, type=symmetric_quantize]; +"269 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=269, type=symmetric_quantize]; +"270 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0" [id=270, type=conv2d]; +"271 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0" [id=271, type=batch_norm]; +"272 ResNet/Sequential[layer4]/Bottleneck[2]/relu_1" [id=272, type=relu]; +"273 ResNet/Sequential[layer4]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1" [id=273, type=symmetric_quantize]; +"274 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=274, type=symmetric_quantize]; +"275 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0" [id=275, type=conv2d]; +"276 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0" [id=276, type=batch_norm]; +"277 ResNet/Sequential[layer4]/Bottleneck[2]/__iadd___0" [id=277, type=__iadd__]; +"278 ResNet/Sequential[layer4]/Bottleneck[2]/relu_2" [id=278, type=relu]; +"279 ResNet/Sequential[layer4]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" [id=279, type=symmetric_quantize]; +"280 ResNet/AdaptiveAvgPool2d[avgpool]/adaptive_avg_pool2d_0" [id=280, type=adaptive_avg_pool2d]; +"281 ResNet/AdaptiveAvgPool2d[avgpool]/SymmetricQuantizer/symmetric_quantize_0" [id=281, type=symmetric_quantize]; +"282 ResNet/flatten_0" [id=282, type=flatten]; +"283 ResNet/NNCFLinear[linear]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=283, type=symmetric_quantize]; +"284 ResNet/NNCFLinear[linear]/linear_0" [id=284, type=linear]; +"285 /nncf_model_output_0" [id=285, type=nncf_model_output]; +"0 /nncf_model_input_0" -> "1 SymmetricQuantizer/symmetric_quantize_0"; +"1 SymmetricQuantizer/symmetric_quantize_0" -> "3 ResNet/NNCFConv2d[conv1]/conv2d_0"; +"2 ResNet/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "3 ResNet/NNCFConv2d[conv1]/conv2d_0"; +"3 ResNet/NNCFConv2d[conv1]/conv2d_0" -> "4 ResNet/NNCFBatchNorm2d[bn1]/batch_norm_0"; +"4 ResNet/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "5 ResNet/relu_0"; +"5 ResNet/relu_0" -> "6 ResNet/SymmetricQuantizer/symmetric_quantize_0"; +"6 ResNet/SymmetricQuantizer/symmetric_quantize_0" -> "7 ResNet/MaxPool2d[maxpool]/max_pool2d_0"; +"7 ResNet/MaxPool2d[maxpool]/max_pool2d_0" -> "9 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0"; +"7 ResNet/MaxPool2d[maxpool]/max_pool2d_0" -> "22 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0"; +"8 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "9 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0"; +"9 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0" -> "10 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0"; +"10 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "11 ResNet/Sequential[layer1]/Bottleneck[0]/relu_0"; +"11 ResNet/Sequential[layer1]/Bottleneck[0]/relu_0" -> "12 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0"; +"12 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0" -> "14 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0"; +"13 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "14 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0"; +"14 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0" -> "15 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0"; +"15 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "16 ResNet/Sequential[layer1]/Bottleneck[0]/relu_1"; +"16 ResNet/Sequential[layer1]/Bottleneck[0]/relu_1" -> "17 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1"; +"17 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1" -> "19 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0"; +"18 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "19 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0"; +"19 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0" -> "20 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0"; +"20 ResNet/Sequential[layer1]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "25 ResNet/Sequential[layer1]/Bottleneck[0]/__iadd___0"; +"21 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "22 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0"; +"22 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0" -> "23 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0"; +"23 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0" -> "24 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0"; +"24 ResNet/Sequential[layer1]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0" -> "25 ResNet/Sequential[layer1]/Bottleneck[0]/__iadd___0"; +"25 ResNet/Sequential[layer1]/Bottleneck[0]/__iadd___0" -> "26 ResNet/Sequential[layer1]/Bottleneck[0]/relu_2"; +"26 ResNet/Sequential[layer1]/Bottleneck[0]/relu_2" -> "27 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2"; +"27 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" -> "29 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0"; +"27 ResNet/Sequential[layer1]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" -> "41 ResNet/Sequential[layer1]/Bottleneck[1]/__iadd___0"; +"28 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "29 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0"; +"29 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0" -> "30 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0"; +"30 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "31 ResNet/Sequential[layer1]/Bottleneck[1]/relu_0"; +"31 ResNet/Sequential[layer1]/Bottleneck[1]/relu_0" -> "32 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0"; +"32 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0" -> "34 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0"; +"33 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "34 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0"; +"34 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0" -> "35 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0"; +"35 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "36 ResNet/Sequential[layer1]/Bottleneck[1]/relu_1"; +"36 ResNet/Sequential[layer1]/Bottleneck[1]/relu_1" -> "37 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1"; +"37 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1" -> "39 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0"; +"38 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "39 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0"; +"39 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0" -> "40 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0"; +"40 ResNet/Sequential[layer1]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "41 ResNet/Sequential[layer1]/Bottleneck[1]/__iadd___0"; +"41 ResNet/Sequential[layer1]/Bottleneck[1]/__iadd___0" -> "42 ResNet/Sequential[layer1]/Bottleneck[1]/relu_2"; +"42 ResNet/Sequential[layer1]/Bottleneck[1]/relu_2" -> "43 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2"; +"43 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" -> "45 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0"; +"43 ResNet/Sequential[layer1]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" -> "57 ResNet/Sequential[layer1]/Bottleneck[2]/__iadd___0"; +"44 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "45 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0"; +"45 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0" -> "46 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0"; +"46 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "47 ResNet/Sequential[layer1]/Bottleneck[2]/relu_0"; +"47 ResNet/Sequential[layer1]/Bottleneck[2]/relu_0" -> "48 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0"; +"48 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0" -> "50 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0"; +"49 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "50 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0"; +"50 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0" -> "51 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0"; +"51 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "52 ResNet/Sequential[layer1]/Bottleneck[2]/relu_1"; +"52 ResNet/Sequential[layer1]/Bottleneck[2]/relu_1" -> "53 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1"; +"53 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1" -> "55 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0"; +"54 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "55 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0"; +"55 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0" -> "56 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0"; +"56 ResNet/Sequential[layer1]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "57 ResNet/Sequential[layer1]/Bottleneck[2]/__iadd___0"; +"57 ResNet/Sequential[layer1]/Bottleneck[2]/__iadd___0" -> "58 ResNet/Sequential[layer1]/Bottleneck[2]/relu_2"; +"58 ResNet/Sequential[layer1]/Bottleneck[2]/relu_2" -> "59 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2"; +"59 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" -> "61 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0"; +"59 ResNet/Sequential[layer1]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" -> "74 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0"; +"60 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "61 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0"; +"61 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0" -> "62 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0"; +"62 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "63 ResNet/Sequential[layer2]/Bottleneck[0]/relu_0"; +"63 ResNet/Sequential[layer2]/Bottleneck[0]/relu_0" -> "64 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0"; +"64 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0" -> "66 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0"; +"65 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "66 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0"; +"66 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0" -> "67 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0"; +"67 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "68 ResNet/Sequential[layer2]/Bottleneck[0]/relu_1"; +"68 ResNet/Sequential[layer2]/Bottleneck[0]/relu_1" -> "69 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1"; +"69 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1" -> "71 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0"; +"70 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "71 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0"; +"71 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0" -> "72 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0"; +"72 ResNet/Sequential[layer2]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "77 ResNet/Sequential[layer2]/Bottleneck[0]/__iadd___0"; +"73 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "74 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0"; +"74 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0" -> "75 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0"; +"75 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0" -> "76 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0"; +"76 ResNet/Sequential[layer2]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0" -> "77 ResNet/Sequential[layer2]/Bottleneck[0]/__iadd___0"; +"77 ResNet/Sequential[layer2]/Bottleneck[0]/__iadd___0" -> "78 ResNet/Sequential[layer2]/Bottleneck[0]/relu_2"; +"78 ResNet/Sequential[layer2]/Bottleneck[0]/relu_2" -> "79 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2"; +"79 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" -> "81 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0"; +"79 ResNet/Sequential[layer2]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" -> "93 ResNet/Sequential[layer2]/Bottleneck[1]/__iadd___0"; +"80 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "81 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0"; +"81 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0" -> "82 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0"; +"82 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "83 ResNet/Sequential[layer2]/Bottleneck[1]/relu_0"; +"83 ResNet/Sequential[layer2]/Bottleneck[1]/relu_0" -> "84 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0"; +"84 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0" -> "86 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0"; +"85 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "86 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0"; +"86 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0" -> "87 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0"; +"87 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "88 ResNet/Sequential[layer2]/Bottleneck[1]/relu_1"; +"88 ResNet/Sequential[layer2]/Bottleneck[1]/relu_1" -> "89 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1"; +"89 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1" -> "91 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0"; +"90 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "91 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0"; +"91 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0" -> "92 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0"; +"92 ResNet/Sequential[layer2]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "93 ResNet/Sequential[layer2]/Bottleneck[1]/__iadd___0"; +"93 ResNet/Sequential[layer2]/Bottleneck[1]/__iadd___0" -> "94 ResNet/Sequential[layer2]/Bottleneck[1]/relu_2"; +"94 ResNet/Sequential[layer2]/Bottleneck[1]/relu_2" -> "95 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2"; +"95 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" -> "97 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0"; +"95 ResNet/Sequential[layer2]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" -> "109 ResNet/Sequential[layer2]/Bottleneck[2]/__iadd___0"; +"96 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "97 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0"; +"97 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0" -> "98 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0"; +"98 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "99 ResNet/Sequential[layer2]/Bottleneck[2]/relu_0"; +"99 ResNet/Sequential[layer2]/Bottleneck[2]/relu_0" -> "100 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0"; +"100 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0" -> "102 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0"; +"101 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "102 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0"; +"102 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0" -> "103 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0"; +"103 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "104 ResNet/Sequential[layer2]/Bottleneck[2]/relu_1"; +"104 ResNet/Sequential[layer2]/Bottleneck[2]/relu_1" -> "105 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1"; +"105 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1" -> "107 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0"; +"106 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "107 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0"; +"107 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0" -> "108 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0"; +"108 ResNet/Sequential[layer2]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "109 ResNet/Sequential[layer2]/Bottleneck[2]/__iadd___0"; +"109 ResNet/Sequential[layer2]/Bottleneck[2]/__iadd___0" -> "110 ResNet/Sequential[layer2]/Bottleneck[2]/relu_2"; +"110 ResNet/Sequential[layer2]/Bottleneck[2]/relu_2" -> "111 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2"; +"111 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" -> "113 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv1]/conv2d_0"; +"111 ResNet/Sequential[layer2]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" -> "125 ResNet/Sequential[layer2]/Bottleneck[3]/__iadd___0"; +"112 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "113 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv1]/conv2d_0"; +"113 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv1]/conv2d_0" -> "114 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn1]/batch_norm_0"; +"114 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "115 ResNet/Sequential[layer2]/Bottleneck[3]/relu_0"; +"115 ResNet/Sequential[layer2]/Bottleneck[3]/relu_0" -> "116 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_0"; +"116 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_0" -> "118 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv2]/conv2d_0"; +"117 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "118 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv2]/conv2d_0"; +"118 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv2]/conv2d_0" -> "119 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn2]/batch_norm_0"; +"119 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "120 ResNet/Sequential[layer2]/Bottleneck[3]/relu_1"; +"120 ResNet/Sequential[layer2]/Bottleneck[3]/relu_1" -> "121 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_1"; +"121 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_1" -> "123 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv3]/conv2d_0"; +"122 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "123 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv3]/conv2d_0"; +"123 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFConv2d[conv3]/conv2d_0" -> "124 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn3]/batch_norm_0"; +"124 ResNet/Sequential[layer2]/Bottleneck[3]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "125 ResNet/Sequential[layer2]/Bottleneck[3]/__iadd___0"; +"125 ResNet/Sequential[layer2]/Bottleneck[3]/__iadd___0" -> "126 ResNet/Sequential[layer2]/Bottleneck[3]/relu_2"; +"126 ResNet/Sequential[layer2]/Bottleneck[3]/relu_2" -> "127 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_2"; +"127 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_2" -> "129 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0"; +"127 ResNet/Sequential[layer2]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_2" -> "142 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0"; +"128 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "129 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0"; +"129 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0" -> "130 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0"; +"130 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "131 ResNet/Sequential[layer3]/Bottleneck[0]/relu_0"; +"131 ResNet/Sequential[layer3]/Bottleneck[0]/relu_0" -> "132 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0"; +"132 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0" -> "134 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0"; +"133 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "134 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0"; +"134 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0" -> "135 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0"; +"135 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "136 ResNet/Sequential[layer3]/Bottleneck[0]/relu_1"; +"136 ResNet/Sequential[layer3]/Bottleneck[0]/relu_1" -> "137 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1"; +"137 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1" -> "139 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0"; +"138 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "139 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0"; +"139 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0" -> "140 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0"; +"140 ResNet/Sequential[layer3]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "145 ResNet/Sequential[layer3]/Bottleneck[0]/__iadd___0"; +"141 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "142 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0"; +"142 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0" -> "143 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0"; +"143 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0" -> "144 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0"; +"144 ResNet/Sequential[layer3]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0" -> "145 ResNet/Sequential[layer3]/Bottleneck[0]/__iadd___0"; +"145 ResNet/Sequential[layer3]/Bottleneck[0]/__iadd___0" -> "146 ResNet/Sequential[layer3]/Bottleneck[0]/relu_2"; +"146 ResNet/Sequential[layer3]/Bottleneck[0]/relu_2" -> "147 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2"; +"147 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" -> "149 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0"; +"147 ResNet/Sequential[layer3]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" -> "161 ResNet/Sequential[layer3]/Bottleneck[1]/__iadd___0"; +"148 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "149 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0"; +"149 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0" -> "150 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0"; +"150 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "151 ResNet/Sequential[layer3]/Bottleneck[1]/relu_0"; +"151 ResNet/Sequential[layer3]/Bottleneck[1]/relu_0" -> "152 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0"; +"152 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0" -> "154 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0"; +"153 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "154 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0"; +"154 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0" -> "155 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0"; +"155 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "156 ResNet/Sequential[layer3]/Bottleneck[1]/relu_1"; +"156 ResNet/Sequential[layer3]/Bottleneck[1]/relu_1" -> "157 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1"; +"157 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1" -> "159 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0"; +"158 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "159 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0"; +"159 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0" -> "160 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0"; +"160 ResNet/Sequential[layer3]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "161 ResNet/Sequential[layer3]/Bottleneck[1]/__iadd___0"; +"161 ResNet/Sequential[layer3]/Bottleneck[1]/__iadd___0" -> "162 ResNet/Sequential[layer3]/Bottleneck[1]/relu_2"; +"162 ResNet/Sequential[layer3]/Bottleneck[1]/relu_2" -> "163 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2"; +"163 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" -> "165 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0"; +"163 ResNet/Sequential[layer3]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" -> "177 ResNet/Sequential[layer3]/Bottleneck[2]/__iadd___0"; +"164 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "165 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0"; +"165 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0" -> "166 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0"; +"166 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "167 ResNet/Sequential[layer3]/Bottleneck[2]/relu_0"; +"167 ResNet/Sequential[layer3]/Bottleneck[2]/relu_0" -> "168 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0"; +"168 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0" -> "170 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0"; +"169 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "170 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0"; +"170 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0" -> "171 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0"; +"171 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "172 ResNet/Sequential[layer3]/Bottleneck[2]/relu_1"; +"172 ResNet/Sequential[layer3]/Bottleneck[2]/relu_1" -> "173 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1"; +"173 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1" -> "175 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0"; +"174 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "175 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0"; +"175 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0" -> "176 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0"; +"176 ResNet/Sequential[layer3]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "177 ResNet/Sequential[layer3]/Bottleneck[2]/__iadd___0"; +"177 ResNet/Sequential[layer3]/Bottleneck[2]/__iadd___0" -> "178 ResNet/Sequential[layer3]/Bottleneck[2]/relu_2"; +"178 ResNet/Sequential[layer3]/Bottleneck[2]/relu_2" -> "179 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2"; +"179 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" -> "181 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv1]/conv2d_0"; +"179 ResNet/Sequential[layer3]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" -> "193 ResNet/Sequential[layer3]/Bottleneck[3]/__iadd___0"; +"180 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "181 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv1]/conv2d_0"; +"181 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv1]/conv2d_0" -> "182 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn1]/batch_norm_0"; +"182 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "183 ResNet/Sequential[layer3]/Bottleneck[3]/relu_0"; +"183 ResNet/Sequential[layer3]/Bottleneck[3]/relu_0" -> "184 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_0"; +"184 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_0" -> "186 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv2]/conv2d_0"; +"185 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "186 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv2]/conv2d_0"; +"186 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv2]/conv2d_0" -> "187 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn2]/batch_norm_0"; +"187 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "188 ResNet/Sequential[layer3]/Bottleneck[3]/relu_1"; +"188 ResNet/Sequential[layer3]/Bottleneck[3]/relu_1" -> "189 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_1"; +"189 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_1" -> "191 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv3]/conv2d_0"; +"190 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "191 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv3]/conv2d_0"; +"191 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFConv2d[conv3]/conv2d_0" -> "192 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn3]/batch_norm_0"; +"192 ResNet/Sequential[layer3]/Bottleneck[3]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "193 ResNet/Sequential[layer3]/Bottleneck[3]/__iadd___0"; +"193 ResNet/Sequential[layer3]/Bottleneck[3]/__iadd___0" -> "194 ResNet/Sequential[layer3]/Bottleneck[3]/relu_2"; +"194 ResNet/Sequential[layer3]/Bottleneck[3]/relu_2" -> "195 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_2"; +"195 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_2" -> "197 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv1]/conv2d_0"; +"195 ResNet/Sequential[layer3]/Bottleneck[3]/SymmetricQuantizer/symmetric_quantize_2" -> "209 ResNet/Sequential[layer3]/Bottleneck[4]/__iadd___0"; +"196 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "197 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv1]/conv2d_0"; +"197 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv1]/conv2d_0" -> "198 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn1]/batch_norm_0"; +"198 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "199 ResNet/Sequential[layer3]/Bottleneck[4]/relu_0"; +"199 ResNet/Sequential[layer3]/Bottleneck[4]/relu_0" -> "200 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_0"; +"200 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_0" -> "202 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv2]/conv2d_0"; +"201 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "202 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv2]/conv2d_0"; +"202 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv2]/conv2d_0" -> "203 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn2]/batch_norm_0"; +"203 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "204 ResNet/Sequential[layer3]/Bottleneck[4]/relu_1"; +"204 ResNet/Sequential[layer3]/Bottleneck[4]/relu_1" -> "205 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_1"; +"205 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_1" -> "207 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv3]/conv2d_0"; +"206 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "207 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv3]/conv2d_0"; +"207 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFConv2d[conv3]/conv2d_0" -> "208 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn3]/batch_norm_0"; +"208 ResNet/Sequential[layer3]/Bottleneck[4]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "209 ResNet/Sequential[layer3]/Bottleneck[4]/__iadd___0"; +"209 ResNet/Sequential[layer3]/Bottleneck[4]/__iadd___0" -> "210 ResNet/Sequential[layer3]/Bottleneck[4]/relu_2"; +"210 ResNet/Sequential[layer3]/Bottleneck[4]/relu_2" -> "211 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_2"; +"211 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_2" -> "213 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv1]/conv2d_0"; +"211 ResNet/Sequential[layer3]/Bottleneck[4]/SymmetricQuantizer/symmetric_quantize_2" -> "225 ResNet/Sequential[layer3]/Bottleneck[5]/__iadd___0"; +"212 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "213 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv1]/conv2d_0"; +"213 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv1]/conv2d_0" -> "214 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn1]/batch_norm_0"; +"214 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "215 ResNet/Sequential[layer3]/Bottleneck[5]/relu_0"; +"215 ResNet/Sequential[layer3]/Bottleneck[5]/relu_0" -> "216 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_0"; +"216 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_0" -> "218 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv2]/conv2d_0"; +"217 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "218 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv2]/conv2d_0"; +"218 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv2]/conv2d_0" -> "219 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn2]/batch_norm_0"; +"219 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "220 ResNet/Sequential[layer3]/Bottleneck[5]/relu_1"; +"220 ResNet/Sequential[layer3]/Bottleneck[5]/relu_1" -> "221 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_1"; +"221 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_1" -> "223 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv3]/conv2d_0"; +"222 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "223 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv3]/conv2d_0"; +"223 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFConv2d[conv3]/conv2d_0" -> "224 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn3]/batch_norm_0"; +"224 ResNet/Sequential[layer3]/Bottleneck[5]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "225 ResNet/Sequential[layer3]/Bottleneck[5]/__iadd___0"; +"225 ResNet/Sequential[layer3]/Bottleneck[5]/__iadd___0" -> "226 ResNet/Sequential[layer3]/Bottleneck[5]/relu_2"; +"226 ResNet/Sequential[layer3]/Bottleneck[5]/relu_2" -> "227 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_2"; +"227 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_2" -> "229 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0"; +"227 ResNet/Sequential[layer3]/Bottleneck[5]/SymmetricQuantizer/symmetric_quantize_2" -> "242 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0"; +"228 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "229 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0"; +"229 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv1]/conv2d_0" -> "230 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0"; +"230 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "231 ResNet/Sequential[layer4]/Bottleneck[0]/relu_0"; +"231 ResNet/Sequential[layer4]/Bottleneck[0]/relu_0" -> "232 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0"; +"232 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_0" -> "234 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0"; +"233 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "234 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0"; +"234 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv2]/conv2d_0" -> "235 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0"; +"235 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "236 ResNet/Sequential[layer4]/Bottleneck[0]/relu_1"; +"236 ResNet/Sequential[layer4]/Bottleneck[0]/relu_1" -> "237 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1"; +"237 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_1" -> "239 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0"; +"238 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "239 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0"; +"239 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFConv2d[conv3]/conv2d_0" -> "240 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0"; +"240 ResNet/Sequential[layer4]/Bottleneck[0]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "245 ResNet/Sequential[layer4]/Bottleneck[0]/__iadd___0"; +"241 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "242 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0"; +"242 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFConv2d[0]/conv2d_0" -> "243 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0"; +"243 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/batch_norm_0" -> "244 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0"; +"244 ResNet/Sequential[layer4]/Bottleneck[0]/Sequential[shortcut]/NNCFBatchNorm2d[1]/SymmetricQuantizer/symmetric_quantize_0" -> "245 ResNet/Sequential[layer4]/Bottleneck[0]/__iadd___0"; +"245 ResNet/Sequential[layer4]/Bottleneck[0]/__iadd___0" -> "246 ResNet/Sequential[layer4]/Bottleneck[0]/relu_2"; +"246 ResNet/Sequential[layer4]/Bottleneck[0]/relu_2" -> "247 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2"; +"247 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" -> "249 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0"; +"247 ResNet/Sequential[layer4]/Bottleneck[0]/SymmetricQuantizer/symmetric_quantize_2" -> "261 ResNet/Sequential[layer4]/Bottleneck[1]/__iadd___0"; +"248 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "249 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0"; +"249 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv1]/conv2d_0" -> "250 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0"; +"250 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "251 ResNet/Sequential[layer4]/Bottleneck[1]/relu_0"; +"251 ResNet/Sequential[layer4]/Bottleneck[1]/relu_0" -> "252 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0"; +"252 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_0" -> "254 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0"; +"253 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "254 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0"; +"254 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv2]/conv2d_0" -> "255 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0"; +"255 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "256 ResNet/Sequential[layer4]/Bottleneck[1]/relu_1"; +"256 ResNet/Sequential[layer4]/Bottleneck[1]/relu_1" -> "257 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1"; +"257 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_1" -> "259 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0"; +"258 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "259 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0"; +"259 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFConv2d[conv3]/conv2d_0" -> "260 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0"; +"260 ResNet/Sequential[layer4]/Bottleneck[1]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "261 ResNet/Sequential[layer4]/Bottleneck[1]/__iadd___0"; +"261 ResNet/Sequential[layer4]/Bottleneck[1]/__iadd___0" -> "262 ResNet/Sequential[layer4]/Bottleneck[1]/relu_2"; +"262 ResNet/Sequential[layer4]/Bottleneck[1]/relu_2" -> "263 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2"; +"263 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" -> "265 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0"; +"263 ResNet/Sequential[layer4]/Bottleneck[1]/SymmetricQuantizer/symmetric_quantize_2" -> "277 ResNet/Sequential[layer4]/Bottleneck[2]/__iadd___0"; +"264 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv1]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "265 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0"; +"265 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv1]/conv2d_0" -> "266 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0"; +"266 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn1]/batch_norm_0" -> "267 ResNet/Sequential[layer4]/Bottleneck[2]/relu_0"; +"267 ResNet/Sequential[layer4]/Bottleneck[2]/relu_0" -> "268 ResNet/Sequential[layer4]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0"; +"268 ResNet/Sequential[layer4]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_0" -> "270 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0"; +"269 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv2]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "270 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0"; +"270 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv2]/conv2d_0" -> "271 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0"; +"271 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn2]/batch_norm_0" -> "272 ResNet/Sequential[layer4]/Bottleneck[2]/relu_1"; +"272 ResNet/Sequential[layer4]/Bottleneck[2]/relu_1" -> "273 ResNet/Sequential[layer4]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1"; +"273 ResNet/Sequential[layer4]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_1" -> "275 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0"; +"274 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv3]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "275 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0"; +"275 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFConv2d[conv3]/conv2d_0" -> "276 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0"; +"276 ResNet/Sequential[layer4]/Bottleneck[2]/NNCFBatchNorm2d[bn3]/batch_norm_0" -> "277 ResNet/Sequential[layer4]/Bottleneck[2]/__iadd___0"; +"277 ResNet/Sequential[layer4]/Bottleneck[2]/__iadd___0" -> "278 ResNet/Sequential[layer4]/Bottleneck[2]/relu_2"; +"278 ResNet/Sequential[layer4]/Bottleneck[2]/relu_2" -> "279 ResNet/Sequential[layer4]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2"; +"279 ResNet/Sequential[layer4]/Bottleneck[2]/SymmetricQuantizer/symmetric_quantize_2" -> "280 ResNet/AdaptiveAvgPool2d[avgpool]/adaptive_avg_pool2d_0"; +"280 ResNet/AdaptiveAvgPool2d[avgpool]/adaptive_avg_pool2d_0" -> "281 ResNet/AdaptiveAvgPool2d[avgpool]/SymmetricQuantizer/symmetric_quantize_0"; +"281 ResNet/AdaptiveAvgPool2d[avgpool]/SymmetricQuantizer/symmetric_quantize_0" -> "282 ResNet/flatten_0"; +"282 ResNet/flatten_0" -> "284 ResNet/NNCFLinear[linear]/linear_0"; +"283 ResNet/NNCFLinear[linear]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "284 ResNet/NNCFLinear[linear]/linear_0"; +"284 ResNet/NNCFLinear[linear]/linear_0" -> "285 /nncf_model_output_0"; +} diff --git a/tests/torch/data/reference_graphs/quantized/symmetric/lstm_cell.dot b/tests/torch/data/reference_graphs/quantized/symmetric/lstm_cell.dot index b3314efd450..63777a9d506 100644 --- a/tests/torch/data/reference_graphs/quantized/symmetric/lstm_cell.dot +++ b/tests/torch/data/reference_graphs/quantized/symmetric/lstm_cell.dot @@ -56,8 +56,8 @@ strict digraph { "21 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/__mul___1" -> "22 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/SymmetricQuantizer/symmetric_quantize_6"; "22 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/SymmetricQuantizer/symmetric_quantize_6" -> "23 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/__add___1"; "23 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/__add___1" -> "24 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/SymmetricQuantizer/symmetric_quantize_7"; +"23 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/__add___1" -> "29 /nncf_model_output_1"; "24 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/SymmetricQuantizer/symmetric_quantize_7" -> "25 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/tanh_1"; -"24 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/SymmetricQuantizer/symmetric_quantize_7" -> "29 /nncf_model_output_1"; "25 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/tanh_1" -> "26 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/SymmetricQuantizer/symmetric_quantize_8"; "26 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/SymmetricQuantizer/symmetric_quantize_8" -> "27 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/__mul___2"; "27 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/__mul___2" -> "28 /nncf_model_output_0"; diff --git a/tests/torch/data/reference_graphs/quantized/synthetic_model/MHA.dot b/tests/torch/data/reference_graphs/quantized/synthetic_model/MHA.dot index b8a5ddd1fb9..17983c63255 100644 --- a/tests/torch/data/reference_graphs/quantized/synthetic_model/MHA.dot +++ b/tests/torch/data/reference_graphs/quantized/synthetic_model/MHA.dot @@ -11,34 +11,29 @@ strict digraph { "9 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_1" [id=9, type=symmetric_quantize]; "10 TestModel/MultiheadAttention[_layer]/linear_2" [id=10, type=linear]; "11 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_2" [id=11, type=symmetric_quantize]; -"12 TestModel/MultiheadAttention[_layer]/contiguous_0" [id=12, type=contiguous]; -"13 TestModel/MultiheadAttention[_layer]/view_0" [id=13, type=view]; -"14 TestModel/MultiheadAttention[_layer]/transpose_0" [id=14, type=transpose]; -"15 TestModel/MultiheadAttention[_layer]/contiguous_1" [id=15, type=contiguous]; -"16 TestModel/MultiheadAttention[_layer]/view_1" [id=16, type=view]; -"17 TestModel/MultiheadAttention[_layer]/transpose_1" [id=17, type=transpose]; -"18 TestModel/MultiheadAttention[_layer]/contiguous_2" [id=18, type=contiguous]; -"19 TestModel/MultiheadAttention[_layer]/view_2" [id=19, type=view]; -"20 TestModel/MultiheadAttention[_layer]/transpose_2" [id=20, type=transpose]; -"21 TestModel/MultiheadAttention[_layer]/__truediv___0" [id=21, type=__truediv__]; -"22 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_3" [id=22, type=symmetric_quantize]; -"23 TestModel/MultiheadAttention[_layer]/transpose_3" [id=23, type=transpose]; -"24 TestModel/MultiheadAttention[_layer]/bmm_0" [id=24, type=bmm]; -"25 TestModel/MultiheadAttention[_layer]/softmax_0" [id=25, type=softmax]; -"26 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_4" [id=26, type=symmetric_quantize]; -"27 TestModel/MultiheadAttention[_layer]/bmm_1" [id=27, type=bmm]; -"28 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_5" [id=28, type=symmetric_quantize]; -"29 TestModel/MultiheadAttention[_layer]/transpose_4" [id=29, type=transpose]; -"30 TestModel/MultiheadAttention[_layer]/contiguous_3" [id=30, type=contiguous]; -"31 TestModel/MultiheadAttention[_layer]/view_3" [id=31, type=view]; -"32 TestModel/MultiheadAttention[_layer]/linear_3" [id=32, type=linear]; -"33 TestModel/MultiheadAttention[_layer]/view_4" [id=33, type=view]; -"34 TestModel/MultiheadAttention[_layer]/view_5" [id=34, type=view]; -"35 TestModel/MultiheadAttention[_layer]/sum_0" [id=35, type=sum]; -"36 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_6" [id=36, type=symmetric_quantize]; -"37 TestModel/MultiheadAttention[_layer]/__truediv___1" [id=37, type=__truediv__]; -"38 /nncf_model_output_0" [id=38, type=nncf_model_output]; -"39 /nncf_model_output_1" [id=39, type=nncf_model_output]; +"12 TestModel/MultiheadAttention[_layer]/view_0" [id=12, type=view]; +"13 TestModel/MultiheadAttention[_layer]/transpose_0" [id=13, type=transpose]; +"14 TestModel/MultiheadAttention[_layer]/view_1" [id=14, type=view]; +"15 TestModel/MultiheadAttention[_layer]/transpose_1" [id=15, type=transpose]; +"16 TestModel/MultiheadAttention[_layer]/view_2" [id=16, type=view]; +"17 TestModel/MultiheadAttention[_layer]/transpose_2" [id=17, type=transpose]; +"18 TestModel/MultiheadAttention[_layer]/__truediv___0" [id=18, type=__truediv__]; +"19 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_3" [id=19, type=symmetric_quantize]; +"20 TestModel/MultiheadAttention[_layer]/transpose_3" [id=20, type=transpose]; +"21 TestModel/MultiheadAttention[_layer]/bmm_0" [id=21, type=bmm]; +"22 TestModel/MultiheadAttention[_layer]/softmax_0" [id=22, type=softmax]; +"23 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_4" [id=23, type=symmetric_quantize]; +"24 TestModel/MultiheadAttention[_layer]/bmm_1" [id=24, type=bmm]; +"25 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_5" [id=25, type=symmetric_quantize]; +"26 TestModel/MultiheadAttention[_layer]/transpose_4" [id=26, type=transpose]; +"27 TestModel/MultiheadAttention[_layer]/contiguous_0" [id=27, type=contiguous]; +"28 TestModel/MultiheadAttention[_layer]/view_3" [id=28, type=view]; +"29 TestModel/MultiheadAttention[_layer]/linear_3" [id=29, type=linear]; +"30 TestModel/MultiheadAttention[_layer]/view_4" [id=30, type=view]; +"31 TestModel/MultiheadAttention[_layer]/view_5" [id=31, type=view]; +"32 TestModel/MultiheadAttention[_layer]/mean_0" [id=32, type=mean]; +"33 /nncf_model_output_0" [id=33, type=nncf_model_output]; +"34 /nncf_model_output_1" [id=34, type=nncf_model_output]; "0 /nncf_model_input_0" -> "1 SymmetricQuantizer/symmetric_quantize_0"; "1 SymmetricQuantizer/symmetric_quantize_0" -> "6 TestModel/MultiheadAttention[_layer]/linear_0"; "2 /nncf_model_input_1" -> "3 SymmetricQuantizer/symmetric_quantize_1"; @@ -46,36 +41,31 @@ strict digraph { "4 /nncf_model_input_2" -> "5 SymmetricQuantizer/symmetric_quantize_2"; "5 SymmetricQuantizer/symmetric_quantize_2" -> "10 TestModel/MultiheadAttention[_layer]/linear_2"; "6 TestModel/MultiheadAttention[_layer]/linear_0" -> "7 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_0"; -"7 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_0" -> "12 TestModel/MultiheadAttention[_layer]/contiguous_0"; +"7 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_0" -> "12 TestModel/MultiheadAttention[_layer]/view_0"; "8 TestModel/MultiheadAttention[_layer]/linear_1" -> "9 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_1"; -"9 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_1" -> "15 TestModel/MultiheadAttention[_layer]/contiguous_1"; +"9 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_1" -> "14 TestModel/MultiheadAttention[_layer]/view_1"; "10 TestModel/MultiheadAttention[_layer]/linear_2" -> "11 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_2"; -"11 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_2" -> "18 TestModel/MultiheadAttention[_layer]/contiguous_2"; -"12 TestModel/MultiheadAttention[_layer]/contiguous_0" -> "13 TestModel/MultiheadAttention[_layer]/view_0"; -"13 TestModel/MultiheadAttention[_layer]/view_0" -> "14 TestModel/MultiheadAttention[_layer]/transpose_0"; -"14 TestModel/MultiheadAttention[_layer]/transpose_0" -> "21 TestModel/MultiheadAttention[_layer]/__truediv___0"; -"15 TestModel/MultiheadAttention[_layer]/contiguous_1" -> "16 TestModel/MultiheadAttention[_layer]/view_1"; -"16 TestModel/MultiheadAttention[_layer]/view_1" -> "17 TestModel/MultiheadAttention[_layer]/transpose_1"; -"17 TestModel/MultiheadAttention[_layer]/transpose_1" -> "23 TestModel/MultiheadAttention[_layer]/transpose_3"; -"18 TestModel/MultiheadAttention[_layer]/contiguous_2" -> "19 TestModel/MultiheadAttention[_layer]/view_2"; -"19 TestModel/MultiheadAttention[_layer]/view_2" -> "20 TestModel/MultiheadAttention[_layer]/transpose_2"; -"20 TestModel/MultiheadAttention[_layer]/transpose_2" -> "27 TestModel/MultiheadAttention[_layer]/bmm_1"; -"21 TestModel/MultiheadAttention[_layer]/__truediv___0" -> "22 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_3"; -"22 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_3" -> "24 TestModel/MultiheadAttention[_layer]/bmm_0"; -"23 TestModel/MultiheadAttention[_layer]/transpose_3" -> "24 TestModel/MultiheadAttention[_layer]/bmm_0"; -"24 TestModel/MultiheadAttention[_layer]/bmm_0" -> "25 TestModel/MultiheadAttention[_layer]/softmax_0"; -"25 TestModel/MultiheadAttention[_layer]/softmax_0" -> "26 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_4"; -"25 TestModel/MultiheadAttention[_layer]/softmax_0" -> "34 TestModel/MultiheadAttention[_layer]/view_5"; -"26 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_4" -> "27 TestModel/MultiheadAttention[_layer]/bmm_1"; -"27 TestModel/MultiheadAttention[_layer]/bmm_1" -> "28 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_5"; -"28 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_5" -> "29 TestModel/MultiheadAttention[_layer]/transpose_4"; -"29 TestModel/MultiheadAttention[_layer]/transpose_4" -> "30 TestModel/MultiheadAttention[_layer]/contiguous_3"; -"30 TestModel/MultiheadAttention[_layer]/contiguous_3" -> "31 TestModel/MultiheadAttention[_layer]/view_3"; -"31 TestModel/MultiheadAttention[_layer]/view_3" -> "32 TestModel/MultiheadAttention[_layer]/linear_3"; -"32 TestModel/MultiheadAttention[_layer]/linear_3" -> "33 TestModel/MultiheadAttention[_layer]/view_4"; -"33 TestModel/MultiheadAttention[_layer]/view_4" -> "38 /nncf_model_output_0"; -"34 TestModel/MultiheadAttention[_layer]/view_5" -> "35 TestModel/MultiheadAttention[_layer]/sum_0"; -"35 TestModel/MultiheadAttention[_layer]/sum_0" -> "36 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_6"; -"36 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_6" -> "37 TestModel/MultiheadAttention[_layer]/__truediv___1"; -"37 TestModel/MultiheadAttention[_layer]/__truediv___1" -> "39 /nncf_model_output_1"; +"11 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_2" -> "16 TestModel/MultiheadAttention[_layer]/view_2"; +"12 TestModel/MultiheadAttention[_layer]/view_0" -> "13 TestModel/MultiheadAttention[_layer]/transpose_0"; +"13 TestModel/MultiheadAttention[_layer]/transpose_0" -> "18 TestModel/MultiheadAttention[_layer]/__truediv___0"; +"14 TestModel/MultiheadAttention[_layer]/view_1" -> "15 TestModel/MultiheadAttention[_layer]/transpose_1"; +"15 TestModel/MultiheadAttention[_layer]/transpose_1" -> "20 TestModel/MultiheadAttention[_layer]/transpose_3"; +"16 TestModel/MultiheadAttention[_layer]/view_2" -> "17 TestModel/MultiheadAttention[_layer]/transpose_2"; +"17 TestModel/MultiheadAttention[_layer]/transpose_2" -> "24 TestModel/MultiheadAttention[_layer]/bmm_1"; +"18 TestModel/MultiheadAttention[_layer]/__truediv___0" -> "19 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_3"; +"19 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_3" -> "21 TestModel/MultiheadAttention[_layer]/bmm_0"; +"20 TestModel/MultiheadAttention[_layer]/transpose_3" -> "21 TestModel/MultiheadAttention[_layer]/bmm_0"; +"21 TestModel/MultiheadAttention[_layer]/bmm_0" -> "22 TestModel/MultiheadAttention[_layer]/softmax_0"; +"22 TestModel/MultiheadAttention[_layer]/softmax_0" -> "23 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_4"; +"23 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_4" -> "24 TestModel/MultiheadAttention[_layer]/bmm_1"; +"23 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_4" -> "31 TestModel/MultiheadAttention[_layer]/view_5"; +"24 TestModel/MultiheadAttention[_layer]/bmm_1" -> "25 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_5"; +"25 TestModel/MultiheadAttention[_layer]/SymmetricQuantizer/symmetric_quantize_5" -> "26 TestModel/MultiheadAttention[_layer]/transpose_4"; +"26 TestModel/MultiheadAttention[_layer]/transpose_4" -> "27 TestModel/MultiheadAttention[_layer]/contiguous_0"; +"27 TestModel/MultiheadAttention[_layer]/contiguous_0" -> "28 TestModel/MultiheadAttention[_layer]/view_3"; +"28 TestModel/MultiheadAttention[_layer]/view_3" -> "29 TestModel/MultiheadAttention[_layer]/linear_3"; +"29 TestModel/MultiheadAttention[_layer]/linear_3" -> "30 TestModel/MultiheadAttention[_layer]/view_4"; +"30 TestModel/MultiheadAttention[_layer]/view_4" -> "33 /nncf_model_output_0"; +"31 TestModel/MultiheadAttention[_layer]/view_5" -> "32 TestModel/MultiheadAttention[_layer]/mean_0"; +"32 TestModel/MultiheadAttention[_layer]/mean_0" -> "34 /nncf_model_output_1"; } diff --git a/tests/torch/data/reference_graphs/quantized/synthetic_model/MHA_single_input.dot b/tests/torch/data/reference_graphs/quantized/synthetic_model/MHA_single_input.dot index 267a8354b3a..22a0af5d152 100644 --- a/tests/torch/data/reference_graphs/quantized/synthetic_model/MHA_single_input.dot +++ b/tests/torch/data/reference_graphs/quantized/synthetic_model/MHA_single_input.dot @@ -2,68 +2,72 @@ strict digraph { "0 /nncf_model_input_0" [id=0, type=nncf_model_input]; "1 SymmetricQuantizer/symmetric_quantize_0" [id=1, type=symmetric_quantize]; "2 MHA_single_input/MultiheadAttention[mha]/linear_0" [id=2, type=linear]; -"3 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_0" [id=3, type=symmetric_quantize]; -"4 MHA_single_input/MultiheadAttention[mha]/chunk_0" [id=4, type=chunk]; -"5 MHA_single_input/MultiheadAttention[mha]/contiguous_0" [id=5, type=contiguous]; -"6 MHA_single_input/MultiheadAttention[mha]/view_0" [id=6, type=view]; -"7 MHA_single_input/MultiheadAttention[mha]/transpose_0" [id=7, type=transpose]; -"8 MHA_single_input/MultiheadAttention[mha]/contiguous_1" [id=8, type=contiguous]; -"9 MHA_single_input/MultiheadAttention[mha]/view_1" [id=9, type=view]; -"10 MHA_single_input/MultiheadAttention[mha]/transpose_1" [id=10, type=transpose]; -"11 MHA_single_input/MultiheadAttention[mha]/contiguous_2" [id=11, type=contiguous]; -"12 MHA_single_input/MultiheadAttention[mha]/view_2" [id=12, type=view]; -"13 MHA_single_input/MultiheadAttention[mha]/transpose_2" [id=13, type=transpose]; -"14 MHA_single_input/MultiheadAttention[mha]/__truediv___0" [id=14, type=__truediv__]; -"15 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_1" [id=15, type=symmetric_quantize]; -"16 MHA_single_input/MultiheadAttention[mha]/transpose_3" [id=16, type=transpose]; -"17 MHA_single_input/MultiheadAttention[mha]/bmm_0" [id=17, type=bmm]; -"18 MHA_single_input/MultiheadAttention[mha]/softmax_0" [id=18, type=softmax]; -"19 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_2" [id=19, type=symmetric_quantize]; -"20 MHA_single_input/MultiheadAttention[mha]/bmm_1" [id=20, type=bmm]; -"21 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_3" [id=21, type=symmetric_quantize]; -"22 MHA_single_input/MultiheadAttention[mha]/transpose_4" [id=22, type=transpose]; -"23 MHA_single_input/MultiheadAttention[mha]/contiguous_3" [id=23, type=contiguous]; -"24 MHA_single_input/MultiheadAttention[mha]/view_3" [id=24, type=view]; -"25 MHA_single_input/MultiheadAttention[mha]/linear_1" [id=25, type=linear]; -"26 MHA_single_input/MultiheadAttention[mha]/view_4" [id=26, type=view]; -"27 MHA_single_input/MultiheadAttention[mha]/view_5" [id=27, type=view]; -"28 MHA_single_input/MultiheadAttention[mha]/sum_0" [id=28, type=sum]; -"29 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_4" [id=29, type=symmetric_quantize]; -"30 MHA_single_input/MultiheadAttention[mha]/__truediv___1" [id=30, type=__truediv__]; -"31 /nncf_model_output_0" [id=31, type=nncf_model_output]; -"32 /nncf_model_output_1" [id=32, type=nncf_model_output]; +"3 MHA_single_input/MultiheadAttention[mha]/unflatten_0" [id=3, type=unflatten]; +"4 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_0" [id=4, type=symmetric_quantize]; +"5 MHA_single_input/MultiheadAttention[mha]/unsqueeze_0" [id=5, type=unsqueeze]; +"6 MHA_single_input/MultiheadAttention[mha]/transpose_0" [id=6, type=transpose]; +"7 MHA_single_input/MultiheadAttention[mha]/squeeze_0" [id=7, type=squeeze]; +"8 MHA_single_input/MultiheadAttention[mha]/contiguous_0" [id=8, type=contiguous]; +"9 MHA_single_input/MultiheadAttention[mha]/__getitem___0" [id=9, type=__getitem__]; +"10 MHA_single_input/MultiheadAttention[mha]/__getitem___1" [id=10, type=__getitem__]; +"11 MHA_single_input/MultiheadAttention[mha]/__getitem___2" [id=11, type=__getitem__]; +"12 MHA_single_input/MultiheadAttention[mha]/view_0" [id=12, type=view]; +"13 MHA_single_input/MultiheadAttention[mha]/transpose_1" [id=13, type=transpose]; +"14 MHA_single_input/MultiheadAttention[mha]/view_1" [id=14, type=view]; +"15 MHA_single_input/MultiheadAttention[mha]/transpose_2" [id=15, type=transpose]; +"16 MHA_single_input/MultiheadAttention[mha]/view_2" [id=16, type=view]; +"17 MHA_single_input/MultiheadAttention[mha]/transpose_3" [id=17, type=transpose]; +"18 MHA_single_input/MultiheadAttention[mha]/__truediv___0" [id=18, type=__truediv__]; +"19 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_1" [id=19, type=symmetric_quantize]; +"20 MHA_single_input/MultiheadAttention[mha]/transpose_4" [id=20, type=transpose]; +"21 MHA_single_input/MultiheadAttention[mha]/bmm_0" [id=21, type=bmm]; +"22 MHA_single_input/MultiheadAttention[mha]/softmax_0" [id=22, type=softmax]; +"23 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_2" [id=23, type=symmetric_quantize]; +"24 MHA_single_input/MultiheadAttention[mha]/bmm_1" [id=24, type=bmm]; +"25 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_3" [id=25, type=symmetric_quantize]; +"26 MHA_single_input/MultiheadAttention[mha]/transpose_5" [id=26, type=transpose]; +"27 MHA_single_input/MultiheadAttention[mha]/contiguous_1" [id=27, type=contiguous]; +"28 MHA_single_input/MultiheadAttention[mha]/view_3" [id=28, type=view]; +"29 MHA_single_input/MultiheadAttention[mha]/linear_1" [id=29, type=linear]; +"30 MHA_single_input/MultiheadAttention[mha]/view_4" [id=30, type=view]; +"31 MHA_single_input/MultiheadAttention[mha]/view_5" [id=31, type=view]; +"32 MHA_single_input/MultiheadAttention[mha]/mean_0" [id=32, type=mean]; +"33 /nncf_model_output_0" [id=33, type=nncf_model_output]; +"34 /nncf_model_output_1" [id=34, type=nncf_model_output]; "0 /nncf_model_input_0" -> "1 SymmetricQuantizer/symmetric_quantize_0"; "1 SymmetricQuantizer/symmetric_quantize_0" -> "2 MHA_single_input/MultiheadAttention[mha]/linear_0"; -"2 MHA_single_input/MultiheadAttention[mha]/linear_0" -> "3 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_0"; -"3 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_0" -> "4 MHA_single_input/MultiheadAttention[mha]/chunk_0"; -"4 MHA_single_input/MultiheadAttention[mha]/chunk_0" -> "5 MHA_single_input/MultiheadAttention[mha]/contiguous_0"; -"4 MHA_single_input/MultiheadAttention[mha]/chunk_0" -> "8 MHA_single_input/MultiheadAttention[mha]/contiguous_1"; -"4 MHA_single_input/MultiheadAttention[mha]/chunk_0" -> "11 MHA_single_input/MultiheadAttention[mha]/contiguous_2"; -"5 MHA_single_input/MultiheadAttention[mha]/contiguous_0" -> "6 MHA_single_input/MultiheadAttention[mha]/view_0"; -"6 MHA_single_input/MultiheadAttention[mha]/view_0" -> "7 MHA_single_input/MultiheadAttention[mha]/transpose_0"; -"7 MHA_single_input/MultiheadAttention[mha]/transpose_0" -> "14 MHA_single_input/MultiheadAttention[mha]/__truediv___0"; -"8 MHA_single_input/MultiheadAttention[mha]/contiguous_1" -> "9 MHA_single_input/MultiheadAttention[mha]/view_1"; -"9 MHA_single_input/MultiheadAttention[mha]/view_1" -> "10 MHA_single_input/MultiheadAttention[mha]/transpose_1"; -"10 MHA_single_input/MultiheadAttention[mha]/transpose_1" -> "16 MHA_single_input/MultiheadAttention[mha]/transpose_3"; -"11 MHA_single_input/MultiheadAttention[mha]/contiguous_2" -> "12 MHA_single_input/MultiheadAttention[mha]/view_2"; -"12 MHA_single_input/MultiheadAttention[mha]/view_2" -> "13 MHA_single_input/MultiheadAttention[mha]/transpose_2"; -"13 MHA_single_input/MultiheadAttention[mha]/transpose_2" -> "20 MHA_single_input/MultiheadAttention[mha]/bmm_1"; -"14 MHA_single_input/MultiheadAttention[mha]/__truediv___0" -> "15 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_1"; -"15 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_1" -> "17 MHA_single_input/MultiheadAttention[mha]/bmm_0"; -"16 MHA_single_input/MultiheadAttention[mha]/transpose_3" -> "17 MHA_single_input/MultiheadAttention[mha]/bmm_0"; -"17 MHA_single_input/MultiheadAttention[mha]/bmm_0" -> "18 MHA_single_input/MultiheadAttention[mha]/softmax_0"; -"18 MHA_single_input/MultiheadAttention[mha]/softmax_0" -> "19 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_2"; -"18 MHA_single_input/MultiheadAttention[mha]/softmax_0" -> "27 MHA_single_input/MultiheadAttention[mha]/view_5"; -"19 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_2" -> "20 MHA_single_input/MultiheadAttention[mha]/bmm_1"; -"20 MHA_single_input/MultiheadAttention[mha]/bmm_1" -> "21 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_3"; -"21 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_3" -> "22 MHA_single_input/MultiheadAttention[mha]/transpose_4"; -"22 MHA_single_input/MultiheadAttention[mha]/transpose_4" -> "23 MHA_single_input/MultiheadAttention[mha]/contiguous_3"; -"23 MHA_single_input/MultiheadAttention[mha]/contiguous_3" -> "24 MHA_single_input/MultiheadAttention[mha]/view_3"; -"24 MHA_single_input/MultiheadAttention[mha]/view_3" -> "25 MHA_single_input/MultiheadAttention[mha]/linear_1"; -"25 MHA_single_input/MultiheadAttention[mha]/linear_1" -> "26 MHA_single_input/MultiheadAttention[mha]/view_4"; -"26 MHA_single_input/MultiheadAttention[mha]/view_4" -> "31 /nncf_model_output_0"; -"27 MHA_single_input/MultiheadAttention[mha]/view_5" -> "28 MHA_single_input/MultiheadAttention[mha]/sum_0"; -"28 MHA_single_input/MultiheadAttention[mha]/sum_0" -> "29 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_4"; -"29 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_4" -> "30 MHA_single_input/MultiheadAttention[mha]/__truediv___1"; -"30 MHA_single_input/MultiheadAttention[mha]/__truediv___1" -> "32 /nncf_model_output_1"; +"2 MHA_single_input/MultiheadAttention[mha]/linear_0" -> "3 MHA_single_input/MultiheadAttention[mha]/unflatten_0"; +"3 MHA_single_input/MultiheadAttention[mha]/unflatten_0" -> "4 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_0"; +"4 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_0" -> "5 MHA_single_input/MultiheadAttention[mha]/unsqueeze_0"; +"5 MHA_single_input/MultiheadAttention[mha]/unsqueeze_0" -> "6 MHA_single_input/MultiheadAttention[mha]/transpose_0"; +"6 MHA_single_input/MultiheadAttention[mha]/transpose_0" -> "7 MHA_single_input/MultiheadAttention[mha]/squeeze_0"; +"7 MHA_single_input/MultiheadAttention[mha]/squeeze_0" -> "8 MHA_single_input/MultiheadAttention[mha]/contiguous_0"; +"8 MHA_single_input/MultiheadAttention[mha]/contiguous_0" -> "9 MHA_single_input/MultiheadAttention[mha]/__getitem___0"; +"8 MHA_single_input/MultiheadAttention[mha]/contiguous_0" -> "10 MHA_single_input/MultiheadAttention[mha]/__getitem___1"; +"8 MHA_single_input/MultiheadAttention[mha]/contiguous_0" -> "11 MHA_single_input/MultiheadAttention[mha]/__getitem___2"; +"9 MHA_single_input/MultiheadAttention[mha]/__getitem___0" -> "12 MHA_single_input/MultiheadAttention[mha]/view_0"; +"10 MHA_single_input/MultiheadAttention[mha]/__getitem___1" -> "14 MHA_single_input/MultiheadAttention[mha]/view_1"; +"11 MHA_single_input/MultiheadAttention[mha]/__getitem___2" -> "16 MHA_single_input/MultiheadAttention[mha]/view_2"; +"12 MHA_single_input/MultiheadAttention[mha]/view_0" -> "13 MHA_single_input/MultiheadAttention[mha]/transpose_1"; +"13 MHA_single_input/MultiheadAttention[mha]/transpose_1" -> "18 MHA_single_input/MultiheadAttention[mha]/__truediv___0"; +"14 MHA_single_input/MultiheadAttention[mha]/view_1" -> "15 MHA_single_input/MultiheadAttention[mha]/transpose_2"; +"15 MHA_single_input/MultiheadAttention[mha]/transpose_2" -> "20 MHA_single_input/MultiheadAttention[mha]/transpose_4"; +"16 MHA_single_input/MultiheadAttention[mha]/view_2" -> "17 MHA_single_input/MultiheadAttention[mha]/transpose_3"; +"17 MHA_single_input/MultiheadAttention[mha]/transpose_3" -> "24 MHA_single_input/MultiheadAttention[mha]/bmm_1"; +"18 MHA_single_input/MultiheadAttention[mha]/__truediv___0" -> "19 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_1"; +"19 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_1" -> "21 MHA_single_input/MultiheadAttention[mha]/bmm_0"; +"20 MHA_single_input/MultiheadAttention[mha]/transpose_4" -> "21 MHA_single_input/MultiheadAttention[mha]/bmm_0"; +"21 MHA_single_input/MultiheadAttention[mha]/bmm_0" -> "22 MHA_single_input/MultiheadAttention[mha]/softmax_0"; +"22 MHA_single_input/MultiheadAttention[mha]/softmax_0" -> "23 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_2"; +"23 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_2" -> "24 MHA_single_input/MultiheadAttention[mha]/bmm_1"; +"23 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_2" -> "31 MHA_single_input/MultiheadAttention[mha]/view_5"; +"24 MHA_single_input/MultiheadAttention[mha]/bmm_1" -> "25 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_3"; +"25 MHA_single_input/MultiheadAttention[mha]/SymmetricQuantizer/symmetric_quantize_3" -> "26 MHA_single_input/MultiheadAttention[mha]/transpose_5"; +"26 MHA_single_input/MultiheadAttention[mha]/transpose_5" -> "27 MHA_single_input/MultiheadAttention[mha]/contiguous_1"; +"27 MHA_single_input/MultiheadAttention[mha]/contiguous_1" -> "28 MHA_single_input/MultiheadAttention[mha]/view_3"; +"28 MHA_single_input/MultiheadAttention[mha]/view_3" -> "29 MHA_single_input/MultiheadAttention[mha]/linear_1"; +"29 MHA_single_input/MultiheadAttention[mha]/linear_1" -> "30 MHA_single_input/MultiheadAttention[mha]/view_4"; +"30 MHA_single_input/MultiheadAttention[mha]/view_4" -> "33 /nncf_model_output_0"; +"31 MHA_single_input/MultiheadAttention[mha]/view_5" -> "32 MHA_single_input/MultiheadAttention[mha]/mean_0"; +"32 MHA_single_input/MultiheadAttention[mha]/mean_0" -> "34 /nncf_model_output_1"; } diff --git a/tests/torch/data/reference_graphs/quantized/synthetic_model/MultiOutputSameTensorModel.dot b/tests/torch/data/reference_graphs/quantized/synthetic_model/MultiOutputSameTensorModel.dot index c293f688eee..9917a155038 100644 --- a/tests/torch/data/reference_graphs/quantized/synthetic_model/MultiOutputSameTensorModel.dot +++ b/tests/torch/data/reference_graphs/quantized/synthetic_model/MultiOutputSameTensorModel.dot @@ -1,13 +1,16 @@ strict digraph { "0 /nncf_model_input_0" [id=0, type=nncf_model_input]; -"1 SymmetricQuantizer/symmetric_quantize_0" [id=1, type=symmetric_quantize]; -"2 MultiOutputSameTensorModel/__mul___0" [id=2, type=__mul__]; -"3 /nncf_model_output_0" [id=3, type=nncf_model_output]; -"4 /nncf_model_output_1" [id=4, type=nncf_model_output]; -"5 /nncf_model_output_2" [id=5, type=nncf_model_output]; -"0 /nncf_model_input_0" -> "1 SymmetricQuantizer/symmetric_quantize_0"; -"1 SymmetricQuantizer/symmetric_quantize_0" -> "2 MultiOutputSameTensorModel/__mul___0"; -"1 SymmetricQuantizer/symmetric_quantize_0" -> "3 /nncf_model_output_0"; -"1 SymmetricQuantizer/symmetric_quantize_0" -> "5 /nncf_model_output_2"; -"2 MultiOutputSameTensorModel/__mul___0" -> "4 /nncf_model_output_1"; +"1 MultiOutputSameTensorModel/SymmetricQuantizer/symmetric_quantize_0" [id=1, type=symmetric_quantize]; +"2 MultiOutputSameTensorModel/SymmetricQuantizer/symmetric_quantize_1" [id=2, type=symmetric_quantize]; +"3 MultiOutputSameTensorModel/__mul___0" [id=3, type=__mul__]; +"4 /nncf_model_output_0" [id=4, type=nncf_model_output]; +"5 /nncf_model_output_1" [id=5, type=nncf_model_output]; +"6 /nncf_model_output_2" [id=6, type=nncf_model_output]; +"0 /nncf_model_input_0" -> "1 MultiOutputSameTensorModel/SymmetricQuantizer/symmetric_quantize_0"; +"0 /nncf_model_input_0" -> "2 MultiOutputSameTensorModel/SymmetricQuantizer/symmetric_quantize_1"; +"0 /nncf_model_input_0" -> "4 /nncf_model_output_0"; +"0 /nncf_model_input_0" -> "6 /nncf_model_output_2"; +"1 MultiOutputSameTensorModel/SymmetricQuantizer/symmetric_quantize_0" -> "3 MultiOutputSameTensorModel/__mul___0"; +"2 MultiOutputSameTensorModel/SymmetricQuantizer/symmetric_quantize_1" -> "3 MultiOutputSameTensorModel/__mul___0"; +"3 MultiOutputSameTensorModel/__mul___0" -> "5 /nncf_model_output_1"; } diff --git a/tests/torch/data/reference_graphs/quantized/synthetic_model/OrdinaryModelWithRecurrentInName.dot b/tests/torch/data/reference_graphs/quantized/synthetic_model/OrdinaryModelWithRecurrentInName.dot new file mode 100644 index 00000000000..caa0828d5eb --- /dev/null +++ b/tests/torch/data/reference_graphs/quantized/synthetic_model/OrdinaryModelWithRecurrentInName.dot @@ -0,0 +1,13 @@ +strict digraph { +"0 /nncf_model_input_0" [id=0, type=nncf_model_input]; +"1 SymmetricQuantizer/symmetric_quantize_0" [id=1, type=symmetric_quantize]; +"2 OrdinaryModelWithRecurrentInName/__getitem___0" [id=2, type=__getitem__]; +"3 OrdinaryModelWithRecurrentInName/NNCFConv2d[conv]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=3, type=symmetric_quantize]; +"4 OrdinaryModelWithRecurrentInName/NNCFConv2d[conv]/conv2d_0" [id=4, type=conv2d]; +"5 /nncf_model_output_0" [id=5, type=nncf_model_output]; +"0 /nncf_model_input_0" -> "1 SymmetricQuantizer/symmetric_quantize_0"; +"1 SymmetricQuantizer/symmetric_quantize_0" -> "2 OrdinaryModelWithRecurrentInName/__getitem___0"; +"2 OrdinaryModelWithRecurrentInName/__getitem___0" -> "4 OrdinaryModelWithRecurrentInName/NNCFConv2d[conv]/conv2d_0"; +"3 OrdinaryModelWithRecurrentInName/NNCFConv2d[conv]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "4 OrdinaryModelWithRecurrentInName/NNCFConv2d[conv]/conv2d_0"; +"4 OrdinaryModelWithRecurrentInName/NNCFConv2d[conv]/conv2d_0" -> "5 /nncf_model_output_0"; +} diff --git a/tests/torch/data/reference_graphs/quantized/synthetic_model/ShiftScale__multi_input_branch.dot b/tests/torch/data/reference_graphs/quantized/synthetic_model/ShiftScale__multi_input_branch.dot new file mode 100644 index 00000000000..a1587e51232 --- /dev/null +++ b/tests/torch/data/reference_graphs/quantized/synthetic_model/ShiftScale__multi_input_branch.dot @@ -0,0 +1,23 @@ +strict digraph { +"0 /nncf_model_input_0" [id=0, type=nncf_model_input]; +"1 SymmetricQuantizer/symmetric_quantize_0" [id=1, type=symmetric_quantize]; +"2 ShiftScaleParametrized/__sub___0" [id=2, type=__sub__]; +"3 ShiftScaleParametrized/__truediv___0" [id=3, type=__truediv__]; +"4 ShiftScaleParametrized/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[ShiftScaleParametrized/__truediv___0|OUTPUT]/symmetric_quantize_0" [id=4, type=symmetric_quantize]; +"5 ShiftScaleParametrized/NNCFConv2d[conv]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=5, type=symmetric_quantize]; +"6 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_0" [id=6, type=conv2d]; +"7 ShiftScaleParametrized/NNCFConv2d[conv]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_1" [id=7, type=symmetric_quantize]; +"8 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_1" [id=8, type=conv2d]; +"9 /nncf_model_output_0" [id=9, type=nncf_model_output]; +"10 /nncf_model_output_1" [id=10, type=nncf_model_output]; +"0 /nncf_model_input_0" -> "1 SymmetricQuantizer/symmetric_quantize_0"; +"1 SymmetricQuantizer/symmetric_quantize_0" -> "2 ShiftScaleParametrized/__sub___0"; +"1 SymmetricQuantizer/symmetric_quantize_0" -> "8 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_1"; +"2 ShiftScaleParametrized/__sub___0" -> "3 ShiftScaleParametrized/__truediv___0"; +"3 ShiftScaleParametrized/__truediv___0" -> "4 ShiftScaleParametrized/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[ShiftScaleParametrized/__truediv___0|OUTPUT]/symmetric_quantize_0"; +"4 ShiftScaleParametrized/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[ShiftScaleParametrized/__truediv___0|OUTPUT]/symmetric_quantize_0" -> "6 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_0"; +"5 ShiftScaleParametrized/NNCFConv2d[conv]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "6 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_0"; +"6 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_0" -> "9 /nncf_model_output_0"; +"7 ShiftScaleParametrized/NNCFConv2d[conv]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_1" -> "8 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_1"; +"8 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_1" -> "10 /nncf_model_output_1"; +} diff --git a/tests/torch/data/reference_graphs/quantized/synthetic_model/ShiftScale__normalize__multi_input_branch.dot b/tests/torch/data/reference_graphs/quantized/synthetic_model/ShiftScale__normalize__multi_input_branch.dot new file mode 100644 index 00000000000..079b8f9afe8 --- /dev/null +++ b/tests/torch/data/reference_graphs/quantized/synthetic_model/ShiftScale__normalize__multi_input_branch.dot @@ -0,0 +1,27 @@ +strict digraph { +"0 /nncf_model_input_0" [id=0, type=nncf_model_input]; +"1 ShiftScaleParametrized/is_floating_point_0" [id=1, type=is_floating_point]; +"2 ShiftScaleParametrized/clone_0" [id=2, type=clone]; +"3 ShiftScaleParametrized/sub__0" [id=3, type=sub_]; +"4 ShiftScaleParametrized/div__0" [id=4, type=div_]; +"5 ShiftScaleParametrized/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[ShiftScaleParametrized/div__0|OUTPUT]/symmetric_quantize_0" [id=5, type=symmetric_quantize]; +"6 ShiftScaleParametrized/NNCFConv2d[conv]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=6, type=symmetric_quantize]; +"7 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_0" [id=7, type=conv2d]; +"8 ShiftScaleParametrized/NNCFConv2d[conv]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_1" [id=8, type=symmetric_quantize]; +"9 ShiftScaleParametrized/NNCFConv2d[conv]/SymmetricQuantizer/symmetric_quantize_0" [id=9, type=symmetric_quantize]; +"10 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_1" [id=10, type=conv2d]; +"11 /nncf_model_output_0" [id=11, type=nncf_model_output]; +"12 /nncf_model_output_1" [id=12, type=nncf_model_output]; +"0 /nncf_model_input_0" -> "1 ShiftScaleParametrized/is_floating_point_0"; +"0 /nncf_model_input_0" -> "2 ShiftScaleParametrized/clone_0"; +"0 /nncf_model_input_0" -> "9 ShiftScaleParametrized/NNCFConv2d[conv]/SymmetricQuantizer/symmetric_quantize_0"; +"2 ShiftScaleParametrized/clone_0" -> "3 ShiftScaleParametrized/sub__0"; +"3 ShiftScaleParametrized/sub__0" -> "4 ShiftScaleParametrized/div__0"; +"4 ShiftScaleParametrized/div__0" -> "5 ShiftScaleParametrized/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[ShiftScaleParametrized/div__0|OUTPUT]/symmetric_quantize_0"; +"5 ShiftScaleParametrized/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[ShiftScaleParametrized/div__0|OUTPUT]/symmetric_quantize_0" -> "7 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_0"; +"6 ShiftScaleParametrized/NNCFConv2d[conv]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "7 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_0"; +"7 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_0" -> "11 /nncf_model_output_0"; +"8 ShiftScaleParametrized/NNCFConv2d[conv]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_1" -> "10 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_1"; +"9 ShiftScaleParametrized/NNCFConv2d[conv]/SymmetricQuantizer/symmetric_quantize_0" -> "10 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_1"; +"10 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_1" -> "12 /nncf_model_output_1"; +} diff --git a/tests/torch/data/reference_graphs/quantized/synthetic_model/ShiftScale__normalize__single_input_branch.dot b/tests/torch/data/reference_graphs/quantized/synthetic_model/ShiftScale__normalize__single_input_branch.dot new file mode 100644 index 00000000000..4d067597486 --- /dev/null +++ b/tests/torch/data/reference_graphs/quantized/synthetic_model/ShiftScale__normalize__single_input_branch.dot @@ -0,0 +1,19 @@ +strict digraph { +"0 /nncf_model_input_0" [id=0, type=nncf_model_input]; +"1 ShiftScaleParametrized/is_floating_point_0" [id=1, type=is_floating_point]; +"2 ShiftScaleParametrized/clone_0" [id=2, type=clone]; +"3 ShiftScaleParametrized/sub__0" [id=3, type=sub_]; +"4 ShiftScaleParametrized/div__0" [id=4, type=div_]; +"5 ShiftScaleParametrized/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[ShiftScaleParametrized/div__0|OUTPUT]/symmetric_quantize_0" [id=5, type=symmetric_quantize]; +"6 ShiftScaleParametrized/NNCFConv2d[conv]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=6, type=symmetric_quantize]; +"7 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_0" [id=7, type=conv2d]; +"8 /nncf_model_output_0" [id=8, type=nncf_model_output]; +"0 /nncf_model_input_0" -> "1 ShiftScaleParametrized/is_floating_point_0"; +"0 /nncf_model_input_0" -> "2 ShiftScaleParametrized/clone_0"; +"2 ShiftScaleParametrized/clone_0" -> "3 ShiftScaleParametrized/sub__0"; +"3 ShiftScaleParametrized/sub__0" -> "4 ShiftScaleParametrized/div__0"; +"4 ShiftScaleParametrized/div__0" -> "5 ShiftScaleParametrized/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[ShiftScaleParametrized/div__0|OUTPUT]/symmetric_quantize_0"; +"5 ShiftScaleParametrized/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[ShiftScaleParametrized/div__0|OUTPUT]/symmetric_quantize_0" -> "7 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_0"; +"6 ShiftScaleParametrized/NNCFConv2d[conv]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "7 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_0"; +"7 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_0" -> "8 /nncf_model_output_0"; +} diff --git a/tests/torch/data/reference_graphs/quantized/synthetic_model/ShiftScale__single_input_branch.dot b/tests/torch/data/reference_graphs/quantized/synthetic_model/ShiftScale__single_input_branch.dot new file mode 100644 index 00000000000..0a2f73028fb --- /dev/null +++ b/tests/torch/data/reference_graphs/quantized/synthetic_model/ShiftScale__single_input_branch.dot @@ -0,0 +1,15 @@ +strict digraph { +"0 /nncf_model_input_0" [id=0, type=nncf_model_input]; +"1 ShiftScaleParametrized/__sub___0" [id=1, type=__sub__]; +"2 ShiftScaleParametrized/__truediv___0" [id=2, type=__truediv__]; +"3 ShiftScaleParametrized/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[ShiftScaleParametrized/__truediv___0|OUTPUT]/symmetric_quantize_0" [id=3, type=symmetric_quantize]; +"4 ShiftScaleParametrized/NNCFConv2d[conv]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" [id=4, type=symmetric_quantize]; +"5 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_0" [id=5, type=conv2d]; +"6 /nncf_model_output_0" [id=6, type=nncf_model_output]; +"0 /nncf_model_input_0" -> "1 ShiftScaleParametrized/__sub___0"; +"1 ShiftScaleParametrized/__sub___0" -> "2 ShiftScaleParametrized/__truediv___0"; +"2 ShiftScaleParametrized/__truediv___0" -> "3 ShiftScaleParametrized/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[ShiftScaleParametrized/__truediv___0|OUTPUT]/symmetric_quantize_0"; +"3 ShiftScaleParametrized/NNCFNetworkInterface[_nncf]/ModuleDict[external_quantizers]/SymmetricQuantizer[ShiftScaleParametrized/__truediv___0|OUTPUT]/symmetric_quantize_0" -> "5 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_0"; +"4 ShiftScaleParametrized/NNCFConv2d[conv]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "5 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_0"; +"5 ShiftScaleParametrized/NNCFConv2d[conv]/conv2d_0" -> "6 /nncf_model_output_0"; +} diff --git a/tests/torch/data/reference_graphs/quantized_rb_sparsity/lstm_cell.dot b/tests/torch/data/reference_graphs/quantized_rb_sparsity/lstm_cell.dot index d7aff5d7d35..713c0421b36 100644 --- a/tests/torch/data/reference_graphs/quantized_rb_sparsity/lstm_cell.dot +++ b/tests/torch/data/reference_graphs/quantized_rb_sparsity/lstm_cell.dot @@ -64,8 +64,8 @@ strict digraph { "25 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/__mul___1" -> "26 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/SymmetricQuantizer/symmetric_quantize_6"; "26 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/SymmetricQuantizer/symmetric_quantize_6" -> "27 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/__add___1"; "27 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/__add___1" -> "28 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/SymmetricQuantizer/symmetric_quantize_7"; +"27 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/__add___1" -> "33 /nncf_model_output_1"; "28 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/SymmetricQuantizer/symmetric_quantize_7" -> "29 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/tanh_1"; -"28 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/SymmetricQuantizer/symmetric_quantize_7" -> "33 /nncf_model_output_1"; "29 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/tanh_1" -> "30 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/SymmetricQuantizer/symmetric_quantize_8"; "30 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/SymmetricQuantizer/symmetric_quantize_8" -> "31 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/__mul___2"; "31 LSTMCellNNCF/LSTMCellForwardNNCF[cell]/__mul___2" -> "32 /nncf_model_output_0"; diff --git a/tests/torch/data/search_building_block/wave2vec_2.0.json b/tests/torch/data/search_building_block/wave2vec_2.0.json index e2778e08c21..b698685f88b 100644 --- a/tests/torch/data/search_building_block/wave2vec_2.0.json +++ b/tests/torch/data/search_building_block/wave2vec_2.0.json @@ -19,6 +19,8 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[0]/Wav2Vec2Attention[attention]/contiguous_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[0]/Wav2Vec2Attention[attention]/dropout_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[0]/Wav2Vec2Attention[attention]/reshape_0", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[0]/Wav2Vec2Attention[attention]/reshape_1", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[0]/Wav2Vec2Attention[attention]/reshape_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[0]/Wav2Vec2Attention[attention]/softmax_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[0]/Wav2Vec2Attention[attention]/transpose_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[0]/Wav2Vec2Attention[attention]/transpose_1", @@ -30,8 +32,6 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[0]/Wav2Vec2Attention[attention]/view_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[0]/Wav2Vec2Attention[attention]/view_3", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[0]/Wav2Vec2Attention[attention]/view_4", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[0]/Wav2Vec2Attention[attention]/view_5", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[0]/Wav2Vec2Attention[attention]/view_6", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[0]/__add___0" ], "ordinal_ids": [ @@ -78,6 +78,8 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[1]/Wav2Vec2Attention[attention]/contiguous_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[1]/Wav2Vec2Attention[attention]/dropout_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[1]/Wav2Vec2Attention[attention]/reshape_0", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[1]/Wav2Vec2Attention[attention]/reshape_1", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[1]/Wav2Vec2Attention[attention]/reshape_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[1]/Wav2Vec2Attention[attention]/softmax_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[1]/Wav2Vec2Attention[attention]/transpose_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[1]/Wav2Vec2Attention[attention]/transpose_1", @@ -89,8 +91,6 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[1]/Wav2Vec2Attention[attention]/view_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[1]/Wav2Vec2Attention[attention]/view_3", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[1]/Wav2Vec2Attention[attention]/view_4", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[1]/Wav2Vec2Attention[attention]/view_5", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[1]/Wav2Vec2Attention[attention]/view_6", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[1]/__add___0" ], "ordinal_ids": [ @@ -137,6 +137,8 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[2]/Wav2Vec2Attention[attention]/contiguous_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[2]/Wav2Vec2Attention[attention]/dropout_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[2]/Wav2Vec2Attention[attention]/reshape_0", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[2]/Wav2Vec2Attention[attention]/reshape_1", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[2]/Wav2Vec2Attention[attention]/reshape_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[2]/Wav2Vec2Attention[attention]/softmax_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[2]/Wav2Vec2Attention[attention]/transpose_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[2]/Wav2Vec2Attention[attention]/transpose_1", @@ -148,8 +150,6 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[2]/Wav2Vec2Attention[attention]/view_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[2]/Wav2Vec2Attention[attention]/view_3", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[2]/Wav2Vec2Attention[attention]/view_4", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[2]/Wav2Vec2Attention[attention]/view_5", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[2]/Wav2Vec2Attention[attention]/view_6", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[2]/__add___0" ], "ordinal_ids": [ @@ -196,6 +196,8 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[3]/Wav2Vec2Attention[attention]/contiguous_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[3]/Wav2Vec2Attention[attention]/dropout_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[3]/Wav2Vec2Attention[attention]/reshape_0", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[3]/Wav2Vec2Attention[attention]/reshape_1", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[3]/Wav2Vec2Attention[attention]/reshape_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[3]/Wav2Vec2Attention[attention]/softmax_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[3]/Wav2Vec2Attention[attention]/transpose_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[3]/Wav2Vec2Attention[attention]/transpose_1", @@ -207,8 +209,6 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[3]/Wav2Vec2Attention[attention]/view_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[3]/Wav2Vec2Attention[attention]/view_3", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[3]/Wav2Vec2Attention[attention]/view_4", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[3]/Wav2Vec2Attention[attention]/view_5", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[3]/Wav2Vec2Attention[attention]/view_6", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[3]/__add___0" ], "ordinal_ids": [ @@ -255,6 +255,8 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[4]/Wav2Vec2Attention[attention]/contiguous_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[4]/Wav2Vec2Attention[attention]/dropout_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[4]/Wav2Vec2Attention[attention]/reshape_0", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[4]/Wav2Vec2Attention[attention]/reshape_1", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[4]/Wav2Vec2Attention[attention]/reshape_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[4]/Wav2Vec2Attention[attention]/softmax_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[4]/Wav2Vec2Attention[attention]/transpose_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[4]/Wav2Vec2Attention[attention]/transpose_1", @@ -266,8 +268,6 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[4]/Wav2Vec2Attention[attention]/view_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[4]/Wav2Vec2Attention[attention]/view_3", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[4]/Wav2Vec2Attention[attention]/view_4", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[4]/Wav2Vec2Attention[attention]/view_5", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[4]/Wav2Vec2Attention[attention]/view_6", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[4]/__add___0" ], "ordinal_ids": [ @@ -314,6 +314,8 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[5]/Wav2Vec2Attention[attention]/contiguous_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[5]/Wav2Vec2Attention[attention]/dropout_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[5]/Wav2Vec2Attention[attention]/reshape_0", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[5]/Wav2Vec2Attention[attention]/reshape_1", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[5]/Wav2Vec2Attention[attention]/reshape_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[5]/Wav2Vec2Attention[attention]/softmax_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[5]/Wav2Vec2Attention[attention]/transpose_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[5]/Wav2Vec2Attention[attention]/transpose_1", @@ -325,8 +327,6 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[5]/Wav2Vec2Attention[attention]/view_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[5]/Wav2Vec2Attention[attention]/view_3", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[5]/Wav2Vec2Attention[attention]/view_4", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[5]/Wav2Vec2Attention[attention]/view_5", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[5]/Wav2Vec2Attention[attention]/view_6", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[5]/__add___0" ], "ordinal_ids": [ @@ -373,6 +373,8 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[6]/Wav2Vec2Attention[attention]/contiguous_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[6]/Wav2Vec2Attention[attention]/dropout_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[6]/Wav2Vec2Attention[attention]/reshape_0", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[6]/Wav2Vec2Attention[attention]/reshape_1", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[6]/Wav2Vec2Attention[attention]/reshape_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[6]/Wav2Vec2Attention[attention]/softmax_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[6]/Wav2Vec2Attention[attention]/transpose_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[6]/Wav2Vec2Attention[attention]/transpose_1", @@ -384,8 +386,6 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[6]/Wav2Vec2Attention[attention]/view_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[6]/Wav2Vec2Attention[attention]/view_3", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[6]/Wav2Vec2Attention[attention]/view_4", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[6]/Wav2Vec2Attention[attention]/view_5", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[6]/Wav2Vec2Attention[attention]/view_6", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[6]/__add___0" ], "ordinal_ids": [ @@ -432,6 +432,8 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[7]/Wav2Vec2Attention[attention]/contiguous_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[7]/Wav2Vec2Attention[attention]/dropout_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[7]/Wav2Vec2Attention[attention]/reshape_0", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[7]/Wav2Vec2Attention[attention]/reshape_1", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[7]/Wav2Vec2Attention[attention]/reshape_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[7]/Wav2Vec2Attention[attention]/softmax_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[7]/Wav2Vec2Attention[attention]/transpose_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[7]/Wav2Vec2Attention[attention]/transpose_1", @@ -443,8 +445,6 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[7]/Wav2Vec2Attention[attention]/view_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[7]/Wav2Vec2Attention[attention]/view_3", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[7]/Wav2Vec2Attention[attention]/view_4", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[7]/Wav2Vec2Attention[attention]/view_5", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[7]/Wav2Vec2Attention[attention]/view_6", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[7]/__add___0" ], "ordinal_ids": [ @@ -491,6 +491,8 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[8]/Wav2Vec2Attention[attention]/contiguous_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[8]/Wav2Vec2Attention[attention]/dropout_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[8]/Wav2Vec2Attention[attention]/reshape_0", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[8]/Wav2Vec2Attention[attention]/reshape_1", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[8]/Wav2Vec2Attention[attention]/reshape_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[8]/Wav2Vec2Attention[attention]/softmax_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[8]/Wav2Vec2Attention[attention]/transpose_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[8]/Wav2Vec2Attention[attention]/transpose_1", @@ -502,8 +504,6 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[8]/Wav2Vec2Attention[attention]/view_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[8]/Wav2Vec2Attention[attention]/view_3", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[8]/Wav2Vec2Attention[attention]/view_4", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[8]/Wav2Vec2Attention[attention]/view_5", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[8]/Wav2Vec2Attention[attention]/view_6", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[8]/__add___0" ], "ordinal_ids": [ @@ -550,6 +550,8 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[9]/Wav2Vec2Attention[attention]/contiguous_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[9]/Wav2Vec2Attention[attention]/dropout_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[9]/Wav2Vec2Attention[attention]/reshape_0", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[9]/Wav2Vec2Attention[attention]/reshape_1", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[9]/Wav2Vec2Attention[attention]/reshape_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[9]/Wav2Vec2Attention[attention]/softmax_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[9]/Wav2Vec2Attention[attention]/transpose_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[9]/Wav2Vec2Attention[attention]/transpose_1", @@ -561,8 +563,6 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[9]/Wav2Vec2Attention[attention]/view_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[9]/Wav2Vec2Attention[attention]/view_3", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[9]/Wav2Vec2Attention[attention]/view_4", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[9]/Wav2Vec2Attention[attention]/view_5", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[9]/Wav2Vec2Attention[attention]/view_6", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[9]/__add___0" ], "ordinal_ids": [ @@ -609,6 +609,8 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[10]/Wav2Vec2Attention[attention]/contiguous_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[10]/Wav2Vec2Attention[attention]/dropout_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[10]/Wav2Vec2Attention[attention]/reshape_0", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[10]/Wav2Vec2Attention[attention]/reshape_1", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[10]/Wav2Vec2Attention[attention]/reshape_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[10]/Wav2Vec2Attention[attention]/softmax_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[10]/Wav2Vec2Attention[attention]/transpose_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[10]/Wav2Vec2Attention[attention]/transpose_1", @@ -620,8 +622,6 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[10]/Wav2Vec2Attention[attention]/view_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[10]/Wav2Vec2Attention[attention]/view_3", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[10]/Wav2Vec2Attention[attention]/view_4", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[10]/Wav2Vec2Attention[attention]/view_5", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[10]/Wav2Vec2Attention[attention]/view_6", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[10]/__add___0" ], "ordinal_ids": [ @@ -668,6 +668,8 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[11]/Wav2Vec2Attention[attention]/contiguous_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[11]/Wav2Vec2Attention[attention]/dropout_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[11]/Wav2Vec2Attention[attention]/reshape_0", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[11]/Wav2Vec2Attention[attention]/reshape_1", + "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[11]/Wav2Vec2Attention[attention]/reshape_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[11]/Wav2Vec2Attention[attention]/softmax_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[11]/Wav2Vec2Attention[attention]/transpose_0", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[11]/Wav2Vec2Attention[attention]/transpose_1", @@ -679,8 +681,6 @@ "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[11]/Wav2Vec2Attention[attention]/view_2", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[11]/Wav2Vec2Attention[attention]/view_3", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[11]/Wav2Vec2Attention[attention]/view_4", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[11]/Wav2Vec2Attention[attention]/view_5", - "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[11]/Wav2Vec2Attention[attention]/view_6", "Wav2Vec2ForSequenceClassification/Wav2Vec2Model[wav2vec2]/Wav2Vec2Encoder[encoder]/ModuleList[layers]/Wav2Vec2EncoderLayer[11]/__add___0" ], "ordinal_ids": [ diff --git a/tests/torch/experimental/replace_custom_modules/test_replace_timm_custom_modules.py b/tests/torch/experimental/replace_custom_modules/test_replace_timm_custom_modules.py new file mode 100644 index 00000000000..f0a0132345c --- /dev/null +++ b/tests/torch/experimental/replace_custom_modules/test_replace_timm_custom_modules.py @@ -0,0 +1,96 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import timm +import torch +from timm.layers import Linear +from timm.layers.norm_act import BatchNormAct2d +from timm.layers.norm_act import GroupNormAct +from timm.layers.norm_act import LayerNormAct +from torch import nn + +from nncf.experimental.torch.replace_custom_modules.timm_custom_modules import convert_timm_custom_modules +from nncf.experimental.torch.replace_custom_modules.timm_custom_modules import ( + replace_timm_custom_modules_with_torch_native, +) + + +def _count_custom_modules(model) -> int: + """ + Get number of custom modules in the model. + :param model: The target model. + :return int: Number of custom modules. + """ + custom_types = [ + Linear, + BatchNormAct2d, + GroupNormAct, + LayerNormAct, + ] + return len([m for _, m in model.named_modules() if type(m) in custom_types]) + + +TEST_CUSTOM_MODULES = [ + Linear( + in_features=2, + out_features=2, + ), + BatchNormAct2d( + num_features=2, + act_layer=nn.ReLU, + ), + GroupNormAct( + num_channels=2, + num_groups=2, + act_layer=nn.ReLU, + ), + LayerNormAct( + normalization_shape=(2, 2), + act_layer=nn.ReLU, + ), +] + + +@pytest.mark.parametrize("custom_module", TEST_CUSTOM_MODULES, ids=[m.__class__.__name__ for m in TEST_CUSTOM_MODULES]) +@pytest.mark.skipif(timm is None, reason="Not install timm package") +def test_replace_custom_timm_module(custom_module): + """ + Test output of replaced module with custom module + """ + native_module = convert_timm_custom_modules(custom_module) + input_data = torch.rand(1, 2, 2, 2) + out_custom = custom_module(input_data) + out_native = native_module(input_data) + + assert custom_module.__class__ is not native_module.__class__ + assert torch.equal(out_custom, out_native) + + +def test_replace_custom_modules_in_timm_model(): + """ + Test that all modules from timm model replaced by replace_custom_modules_with_torch_native + """ + timm_model = timm.create_model( + "mobilenetv3_small_050", num_classes=1000, in_chans=3, pretrained=True, checkpoint_path="" + ) + input_data = torch.rand(1, 3, 224, 224) + out_timm = timm_model(input_data) + + native_model = replace_timm_custom_modules_with_torch_native(timm_model) + out_native = native_model(input_data) + assert torch.equal(out_timm, out_native) + + num_custom_modules_in_timm = _count_custom_modules(timm_model) + num_custom_modules_in_native = _count_custom_modules(native_model) + + assert num_custom_modules_in_native == 0 + assert num_custom_modules_in_timm > 0 diff --git a/tests/torch/experimental/search_building_blocks/test_search_building_blocks.py b/tests/torch/experimental/search_building_blocks/test_search_building_blocks.py index 8006b01abde..49886324452 100644 --- a/tests/torch/experimental/search_building_blocks/test_search_building_blocks.py +++ b/tests/torch/experimental/search_building_blocks/test_search_building_blocks.py @@ -1,15 +1,14 @@ -""" - Copyright (c) 2021-2023 Intel Corporation - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" +# Copyright (c) 2021-2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import json import os from functools import partial diff --git a/tests/torch/experimental/search_building_blocks/test_transformer_blocks.py b/tests/torch/experimental/search_building_blocks/test_transformer_blocks.py index b40a676ff3a..728f2d95c44 100644 --- a/tests/torch/experimental/search_building_blocks/test_transformer_blocks.py +++ b/tests/torch/experimental/search_building_blocks/test_transformer_blocks.py @@ -1,15 +1,14 @@ -""" - Copyright (c) 2021-2023 Intel Corporation - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" +# Copyright (c) 2021-2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import json import os from functools import partial diff --git a/tests/torch/extensions_build_checks.py b/tests/torch/extensions_build_checks.py index 302dc3bbeec..d76dc4f1260 100644 --- a/tests/torch/extensions_build_checks.py +++ b/tests/torch/extensions_build_checks.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import os import sys diff --git a/tests/torch/helpers.py b/tests/torch/helpers.py index 29f1a5d988b..e89a524bfd4 100644 --- a/tests/torch/helpers.py +++ b/tests/torch/helpers.py @@ -129,14 +129,14 @@ def create_transpose_conv(in_channels, out_channels, kernel_size, weight_init, b class BasicConvTestModel(nn.Module): INPUT_SIZE = [1, 1, 4, 4] - def __init__(self, in_channels=1, out_channels=2, kernel_size=2, weight_init=-1, bias_init=-2): + def __init__(self, in_channels=1, out_channels=2, kernel_size=2, weight_init=-1, bias_init=-2, padding=0): super().__init__() self.in_channels = in_channels self.out_channels = out_channels self.kernel_size = kernel_size self.weight_init = weight_init self.bias_init = bias_init - self.conv = create_conv(in_channels, out_channels, kernel_size, weight_init, bias_init) + self.conv = create_conv(in_channels, out_channels, kernel_size, weight_init, bias_init, padding) self.wq_scale_shape_per_channel = (out_channels, 1, 1, 1) self.aq_scale_shape_per_channel = (1, in_channels, 1, 1) diff --git a/tests/torch/modules/seq2seq/attention.py b/tests/torch/modules/seq2seq/attention.py index 957638a8de4..ec9d0085e33 100644 --- a/tests/torch/modules/seq2seq/attention.py +++ b/tests/torch/modules/seq2seq/attention.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import math import torch diff --git a/tests/torch/modules/seq2seq/decoder.py b/tests/torch/modules/seq2seq/decoder.py index c952c1f2ce3..f54274a2ebf 100644 --- a/tests/torch/modules/seq2seq/decoder.py +++ b/tests/torch/modules/seq2seq/decoder.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import itertools import torch diff --git a/tests/torch/modules/seq2seq/encoder.py b/tests/torch/modules/seq2seq/encoder.py index e317763fe69..690609796cc 100644 --- a/tests/torch/modules/seq2seq/encoder.py +++ b/tests/torch/modules/seq2seq/encoder.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from torch import nn from torch.nn.utils.rnn import pack_padded_sequence from torch.nn.utils.rnn import pad_packed_sequence diff --git a/tests/torch/modules/seq2seq/gnmt.py b/tests/torch/modules/seq2seq/gnmt.py index f454bbbb801..9de1ee5691c 100644 --- a/tests/torch/modules/seq2seq/gnmt.py +++ b/tests/torch/modules/seq2seq/gnmt.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from torch import nn from tests.torch.modules.seq2seq.decoder import ResidualRecurrentDecoder diff --git a/tests/torch/modules/seq2seq/seq2seq_base.py b/tests/torch/modules/seq2seq/seq2seq_base.py index 188382cb64c..ebe3e13913b 100644 --- a/tests/torch/modules/seq2seq/seq2seq_base.py +++ b/tests/torch/modules/seq2seq/seq2seq_base.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from torch import nn from torch.nn.functional import log_softmax diff --git a/tests/torch/modules/test_rnn.py b/tests/torch/modules/test_rnn.py index f783c83bc61..a3e6e2016db 100644 --- a/tests/torch/modules/test_rnn.py +++ b/tests/torch/modules/test_rnn.py @@ -195,7 +195,7 @@ def test_forward_lstm_cell(self, sizes, _seed): ref_result = ref_rnn(ref_data.x[i], (ref_data.h0[0], ref_data.c0[0])) test_result = test_rnn(test_data.x[i], (test_data.h0[0], test_data.c0[0])) for ref, test in list(zip(ref_result, test_result)): - torch.testing.assert_allclose(test, ref) + torch.testing.assert_close(test, ref) def test_backward_lstm_cell(self, sizes, _seed): p = sizes @@ -218,7 +218,7 @@ def test_backward_lstm_cell(self, sizes, _seed): test_grads = get_grads([ref_data.h0[0], ref_data.c0[0]]) test_grads += get_grads([test_rnn.weight_ih, test_rnn.weight_hh, test_rnn.bias_ih, test_rnn.bias_hh]) for ref, test in list(zip(test_grads, ref_grads)): - torch.testing.assert_allclose(test, ref) + torch.testing.assert_close(test, ref) def test_export_lstm_cell(tmp_path): @@ -318,16 +318,16 @@ def forward(self, *input_): ref_output, (ref_hn, ref_cn) = ref_rnn(ref_data.x, ref_hidden) test_output, (test_hn, test_cn) = test_rnn(test_data.x, test_hidden) - torch.testing.assert_allclose(test_hn[0], ref_hn[0], rtol=1e-3, atol=1e-4) - torch.testing.assert_allclose(test_cn[0], ref_cn[0], rtol=1e-3, atol=1e-4) + torch.testing.assert_close(test_hn[0], ref_hn[0], rtol=1e-3, atol=1e-4) + torch.testing.assert_close(test_cn[0], ref_cn[0], rtol=1e-3, atol=1e-4) if variable_length: - torch.testing.assert_allclose(test_output.batch_sizes, ref_output.batch_sizes) - torch.testing.assert_allclose(test_output.data, ref_output.data, rtol=1e-2, atol=1e-3) + torch.testing.assert_close(test_output.batch_sizes, ref_output.batch_sizes) + torch.testing.assert_close(test_output.data, ref_output.data, rtol=1e-2, atol=1e-3) if not sorted_: - torch.testing.assert_allclose(test_output.sorted_indices, ref_output.sorted_indices) - torch.testing.assert_allclose(test_output.unsorted_indices, ref_output.unsorted_indices) + torch.testing.assert_close(test_output.sorted_indices, ref_output.sorted_indices) + torch.testing.assert_close(test_output.unsorted_indices, ref_output.unsorted_indices) else: - torch.testing.assert_allclose(test_output, ref_output, rtol=9e-2, atol=15e-4) + torch.testing.assert_close(test_output, ref_output, rtol=9e-2, atol=15e-4) def test_backward_lstm( self, @@ -386,7 +386,7 @@ def test_backward_lstm( ref_grads += get_grads([ref_data.h0[0], ref_data.c0[0]]) test_grads += get_grads([test_hidden[0][0], test_hidden[1][0]]) for ref, test in list(zip(test_grads, ref_grads)): - torch.testing.assert_allclose(test, ref, rtol=1e-1, atol=1e-1) + torch.testing.assert_close(test, ref, rtol=1e-1, atol=1e-1) @classmethod def flatten_nested_lists(cls, nested_list): @@ -528,6 +528,7 @@ def hook(model, input_, counter): for counter in inter_layer_reset_point_post_aq_counters.values(): assert counter.count == 1 + @pytest.mark.skip(reason="Sporadic failures") def test_number_of_calling_fq_for_gnmt(self): if torch.cuda.is_available(): torch.cuda.set_device(0) @@ -606,33 +607,40 @@ def hook(model, input_, counter): dummy_forward_fn(model) assert ( - model.nncf.get_graph().get_nodes_count() == 373 + model.nncf.get_graph().get_nodes_count() == 370 ) # NB: may always fail in debug due to superfluous 'cat' nodes - assert len(counters) == 142 - + assert len(counters) == 136 + ref_call_counts = { + "cell": sequence_size, + "LSTMCellForwardNNCF": sequence_size, + # embedding module is shared between the decoder and encoder, + # associated weight quantizer will be called twice + "embedding": 2, + # unified scales for 4 FQ + "NNCF_RNN[0]/StackedRNN[rnn_impl]/StackedRNNResetPoint/cat_0|OUTPUT": 4, + } for name, counter in counters.items(): - if "cell" in name or "LSTMCellForwardNNCF" in name: - assert counter.count == sequence_size, name - elif "embedding" in name: - # embedding module is shared between the decoder and - # encoder, associated weight quantizer will be called - # twice - assert counter.count == 2, name - else: - assert counter.count == 1, name + print(name, counter.count) + for ref_key, ref_count in ref_call_counts.items(): + if ref_key in name: + assert counter.count == ref_count, name + break new_seq_len = int(sequence_size / 2) dummy_forward_fn(model, new_seq_len) - # NB: may always fail in debug due to superfluous 'cat' nodes - assert model.nncf.get_graph().get_nodes_count() == 373 - assert len(counters) == 142 + + ref_call_counts = { + "cell": sequence_size + new_seq_len, + "LSTMCellForwardNNCF": sequence_size + new_seq_len, + "embedding": 4, + "NNCF_RNN[0]/StackedRNN[rnn_impl]/StackedRNNResetPoint/cat_0|OUTPUT": 8, + } + assert model.nncf.get_graph().get_nodes_count() == 370 + assert len(counters) == 136 for name, counter in counters.items(): - if "cell" in name or "LSTMCellForwardNNCF" in name: - assert counter.count == sequence_size + new_seq_len, name - elif "embedding" in name: - # same as above - assert counter.count == 4, name - else: - assert counter.count == 2, name + for ref_key, ref_count in ref_call_counts.items(): + if ref_key in name: + assert counter.count == ref_count, name + break def test_number_of_nodes_for_module_in_loop(self): num_iter = 5 diff --git a/tests/torch/nas/creators.py b/tests/torch/nas/creators.py index 1f3f80cf439..42dd0f363cf 100644 --- a/tests/torch/nas/creators.py +++ b/tests/torch/nas/creators.py @@ -29,8 +29,8 @@ from nncf.torch.dynamic_graph.graph_tracer import create_input_infos from nncf.torch.graph.transformations.layout import PTTransformationLayout from nncf.torch.model_creation import create_nncf_network +from nncf.torch.model_transformer import PTModelTransformer from nncf.torch.nncf_network import NNCFNetwork -from nncf.torch.nncf_network import PTModelTransformer from tests.torch import test_models from tests.torch.helpers import BasicConvTestModel from tests.torch.helpers import get_empty_config @@ -133,9 +133,11 @@ def create_supernet( return ctrl.multi_elasticity_handler, model -def create_single_conv_kernel_supernet(kernel_size=5, out_channels=1) -> Tuple[ElasticKernelHandler, NNCFNetwork]: +def create_single_conv_kernel_supernet( + kernel_size=5, out_channels=1, padding=2 +) -> Tuple[ElasticKernelHandler, NNCFNetwork]: params = {"available_elasticity_dims": [ElasticityDim.KERNEL.value]} - model_creator = partial(BasicConvTestModel, 1, out_channels=out_channels, kernel_size=kernel_size) + model_creator = partial(BasicConvTestModel, 1, out_channels=out_channels, kernel_size=kernel_size, padding=padding) input_sample_sizes = [1, 1, kernel_size, kernel_size] multi_elasticity_handler, supernet = create_supernet(model_creator, input_sample_sizes, params) move_model_to_cuda_if_available(supernet) diff --git a/tests/torch/nas/models/synthetic.py b/tests/torch/nas/models/synthetic.py index 6744d92b03a..c1c03e47ec5 100644 --- a/tests/torch/nas/models/synthetic.py +++ b/tests/torch/nas/models/synthetic.py @@ -53,12 +53,11 @@ class ThreeConvModel(nn.Module): def __init__(self): super().__init__() - self.conv1 = create_conv(1, 3, 5, bias=False) + self.conv1 = create_conv(1, 3, 5, bias=False, padding=2) self.conv_to_skip = create_conv(3, 3, 1, bias=False) self.last_conv = create_conv(3, 1, 1) - self.mode = ThreeConvModelMode.ORIGINAL + self.mode = ThreeConvModelMode.SUPERNET self._forward_fn_per_mode = { - ThreeConvModelMode.ORIGINAL: self.original_forward, ThreeConvModelMode.SUPERNET: self.supernet_forward, ThreeConvModelMode.WIDTH_STAGE: self.forward_min_subnet_on_width_stage, ThreeConvModelMode.KERNEL_STAGE: self.forward_min_subnet_on_kernel_stage, @@ -77,14 +76,8 @@ def assert_weights_equal(self, model: "ThreeConvModel"): def assert_transition_matrix_equals(self, matrix_to_cmp: Tensor): assert torch.equal(self._transition_matrix, matrix_to_cmp) - def original_forward(self, x): - o1 = self.conv1(x) - o2 = self.conv_to_skip(o1) - o3 = o1 + o2 - return self.last_conv(o3) - def supernet_forward(self, x): - o1 = do_conv2d(self.conv1, x, padding=2) + o1 = self.conv1(x) o2 = self.conv_to_skip(o1) o3 = o1 + o2 return self.last_conv(o3) diff --git a/tests/torch/nas/test_elastic_depth.py b/tests/torch/nas/test_elastic_depth.py index a9a28921ee9..bf506f05b5b 100644 --- a/tests/torch/nas/test_elastic_depth.py +++ b/tests/torch/nas/test_elastic_depth.py @@ -15,7 +15,7 @@ import onnxruntime as rt import pytest import torch -from pkg_resources import parse_version +from packaging import version from torch import nn from nncf.experimental.torch.nas.bootstrapNAS.elasticity.elasticity_dim import ElasticityDim @@ -63,10 +63,10 @@ def __init__(self, depth=3): super().__init__() self._depth = depth self._skipped_layers = [] - self.conv1 = create_conv(1, 3, 3, weight_init=1, bias_init=1) + self.conv1 = create_conv(1, 3, 3, weight_init=1, bias_init=1, padding=1) self.branch_with_blocks = nn.Sequential() for idx in range(depth): - conv = create_conv(3, 3, 5, weight_init=idx + 1, bias_init=idx + 1) + conv = create_conv(3, 3, 5, weight_init=idx + 1, bias_init=idx + 1, padding=2) self.branch_with_blocks.add_module("conv{}".format(idx), conv) self.last_conv = create_conv(3, 1, 1) @@ -238,7 +238,7 @@ def test_can_export_model_with_one_skipped_block_resnet18(tmp_path): num_not_skipped_nodes = len(onnx_resnet18_without_one_block.graph.node) ref_num_nodes = 65 ref_not_skipped_nodes = 63 - if parse_version(torch.__version__) < parse_version("1.12"): + if version.parse(torch.__version__) < version.parse("1.12"): # different ONNX format for older pytorch version - no Identity nodes ref_num_nodes = 49 ref_not_skipped_nodes = 48 diff --git a/tests/torch/nas/test_elastic_kernel.py b/tests/torch/nas/test_elastic_kernel.py index db8b5b49644..c015104cac7 100644 --- a/tests/torch/nas/test_elastic_kernel.py +++ b/tests/torch/nas/test_elastic_kernel.py @@ -76,6 +76,22 @@ def test_elastic_kernel_with_intermediate_value(): assert torch.equal(actual_output, ref_output) +def test_elastic_kernel_output_shape(): + kernel_handler, supernet = create_single_conv_kernel_supernet(kernel_size=9, padding=2) + + device = next(iter(supernet.parameters())).device + input_ = torch.ones([1, 1, 9, 9]).to(device) + + original_model = supernet.nncf.get_clean_shallow_copy() + ref_output = original_model(input_) + + kernel_size_list = [9, 7, 5] + for kernel_size in kernel_size_list: + kernel_handler.activate_subnet_for_config([kernel_size]) + actual_output = supernet(input_) + assert actual_output.shape == ref_output.shape + + def test_elastic_kernel_with_custom_transition_matrix(): kernel_handler, supernet = create_single_conv_kernel_supernet() device = next(iter(supernet.parameters())).device diff --git a/tests/torch/nas/test_flops.py b/tests/torch/nas/test_flops.py index c0c9656277a..4fa2eb8e454 100644 --- a/tests/torch/nas/test_flops.py +++ b/tests/torch/nas/test_flops.py @@ -94,7 +94,7 @@ supernet=ModelStats(175_952_896, 2_202_560), kernel_stage=ModelStats(175_952_896, 2_202_560), depth_stage=ModelStats(151_966_720, 2_180_336), - width_stage=ModelStats(15_401_984, 88_144), + width_stage=ModelStats(14_099_072, 35_728), ), blocks_to_skip=MOBILENET_V2_BLOCKS_TO_SKIP, ), diff --git a/tests/torch/nas/test_ps_controller.py b/tests/torch/nas/test_ps_controller.py index 0d4a98a1777..c2f6cf0756f 100644 --- a/tests/torch/nas/test_ps_controller.py +++ b/tests/torch/nas/test_ps_controller.py @@ -8,17 +8,24 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from copy import deepcopy +from functools import partial +from functools import reduce from typing import Any, Dict, List, NamedTuple import pytest +import torch +from torch.optim import SGD from nncf import NNCFConfig from nncf.config.structures import BNAdaptationInitArgs from nncf.experimental.torch.nas.bootstrapNAS import EpochBasedTrainingAlgorithm from nncf.torch.model_creation import create_nncf_network +from nncf.torch.utils import get_model_device from tests.torch.helpers import create_ones_mock_dataloader from tests.torch.nas.helpers import move_model_to_cuda_if_available from tests.torch.nas.models.synthetic import ThreeConvModel +from tests.torch.nas.models.synthetic import ThreeConvModelMode from tests.torch.nas.test_scheduler import fixture_schedule_params # pylint: disable=unused-import @@ -40,7 +47,7 @@ def __str__(self): return name -def prepare_test_model(ps_ctrl_desc, bn_adapt_section_is_called): +def prepare_test_model(ps_ctrl_desc, bn_adapt_section_is_called, knowledge_distillation_loss_is_called: bool = False): config = { "input_info": {"sample_size": ps_ctrl_desc.input_sizes}, "bootstrapNAS": { @@ -51,6 +58,7 @@ def prepare_test_model(ps_ctrl_desc, bn_adapt_section_is_called): } nncf_config = NNCFConfig.from_dict(config) update_train_bn_adapt_section(nncf_config, bn_adapt_section_is_called) + update_train_kd_loss_section(nncf_config, knowledge_distillation_loss_is_called) bn_adapt_args = BNAdaptationInitArgs(data_loader=create_ones_mock_dataloader(nncf_config)) nncf_config.register_extra_structs([bn_adapt_args]) model = ps_ctrl_desc.model_creator() @@ -63,6 +71,41 @@ def update_train_bn_adapt_section(nncf_config, bn_adapt_section_is_called): nncf_config["bootstrapNAS"]["training"]["batchnorm_adaptation"]["num_bn_adaptation_samples"] = 0 +def update_train_kd_loss_section(nncf_config, knowledge_distillation_loss_is_called): + if knowledge_distillation_loss_is_called: + nncf_config["bootstrapNAS"]["training"].update( + {"compression": [{"algorithm": "knowledge_distillation", "type": "mse"}]} + ) + + +def cal_loss_actual(output, input_, training_ctrl): + return training_ctrl.loss() + + +def calc_loss_reference(output, input_, kd_model): + mse = torch.nn.MSELoss().to(get_model_device(kd_model)) + kd_output = kd_model(input_) + return mse(output, kd_output) + + +def run_train(training_ctrl, model, mock_dataloader, calc_loss_fn): + optimizer = SGD(model.parameters(), lr=1e-02, weight_decay=1e-02) + training_ctrl.set_training_lr_scheduler_args(optimizer, len(mock_dataloader)) + training_ctrl.scheduler.epoch_step() + training_ctrl.multi_elasticity_handler.activate_minimum_subnet() + model.train() + output_storage = [] + for _, (input_, __) in enumerate(mock_dataloader): + input_ = input_.to(get_model_device(model)) + output = model(input_) + output_storage.append(output) + loss = calc_loss_fn(output, input_) + optimizer.zero_grad() + loss.backward() + optimizer.step() + return output_storage + + # pylint: disable=protected-access class TestProgressiveTrainingController: @pytest.mark.parametrize( @@ -88,3 +131,39 @@ def test_bn_adapt(self, mocker, bn_adapt_section_is_called, schedule_params): bn_adapt_run_patch.assert_called() else: bn_adapt_run_patch.assert_not_called() + + def test_knowledge_distillation_training_process(self): + test_desc = PSControllerTestDesc( + model_creator=ThreeConvModel, + algo_params={"width": {"min_width": 1, "width_step": 1}}, + input_sizes=ThreeConvModel.INPUT_SIZE, + ) + model, _, nncf_config = prepare_test_model(test_desc, False, True) + model = create_nncf_network(model, nncf_config) + + torch.manual_seed(2) + number_of_iters = 2 + batch_size = 1 + + mock_dataloader = create_ones_mock_dataloader( + nncf_config, num_samples=batch_size * number_of_iters, batch_size=batch_size + ) + model.mode = ThreeConvModelMode.SUPERNET + training_algorithm = EpochBasedTrainingAlgorithm.from_config(deepcopy(model), nncf_config) + actual_outputs = run_train( + training_algorithm._training_ctrl, + training_algorithm._model, + mock_dataloader, + partial(cal_loss_actual, training_ctrl=training_algorithm._training_ctrl), + ) + training_algorithm = EpochBasedTrainingAlgorithm.from_config(deepcopy(model), nncf_config) + reference_outputs = run_train( + training_algorithm._training_ctrl, + training_algorithm._model, + mock_dataloader, + partial(calc_loss_reference, kd_model=deepcopy(model)), + ) + assert reduce(lambda a, b: a and torch.allclose(b[0], b[1]), zip(actual_outputs, reference_outputs), True), ( + "Outputs of model with actual KD implementation doesn't match outputs from model with reference " + "Knowledge Distillation implementation" + ) diff --git a/tests/torch/nas/test_scheduler.py b/tests/torch/nas/test_scheduler.py index a97005b8bc2..48c844e6299 100644 --- a/tests/torch/nas/test_scheduler.py +++ b/tests/torch/nas/test_scheduler.py @@ -30,6 +30,7 @@ from nncf.experimental.torch.nas.bootstrapNAS.training.scheduler import NASSchedulerParams from nncf.experimental.torch.nas.bootstrapNAS.training.stage_descriptor import DEFAULT_STAGE_LR_RATE from nncf.experimental.torch.nas.bootstrapNAS.training.stage_descriptor import StageDescriptor +from nncf.torch.algo_selector import ZeroCompressionLoss from nncf.torch.nncf_network import NNCFNetwork from tests.torch.helpers import MockModel @@ -138,6 +139,7 @@ def test_epoch_step(self, schedule_params, mocker): ProgressiveShrinkingBuilder.DEFAULT_PROGRESSIVITY, schedule_params, lr_schedule_config, + ZeroCompressionLoss(next(mock_model.parameters()).device), ) scheduler = training_algo.scheduler lr_scheduler = GlobalLRScheduler(mocker.stub(), mocker.stub(), base_lr=None, num_epochs=None) diff --git a/tests/torch/nas/test_search.py b/tests/torch/nas/test_search.py index 99e43147072..d35b6d71c52 100644 --- a/tests/torch/nas/test_search.py +++ b/tests/torch/nas/test_search.py @@ -116,7 +116,7 @@ def update_search_bn_adapt_section(nncf_config, bn_adapt_section_is_called): 'vgg11_k7': [1, 3, 7, 7, 15, 15, 15, 15, 2, 2, 2, 2, 2, 2, 2, 2, 1], 'unet': [1, 3, 7, 15, 31, 15, 7, 3, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], - 'squeezenet1_0': [2, 0, 1, 1, 0, 1, 1, 0, 3, 3, 0, 3, 3, 0, 5, 5, 0, 5, 5, 1, 7, 7, 1, 7, 7, 2, 0, + 'squeezenet1_0': [2, 0, 1, 1, 0, 1, 1, 0, 3, 3, 0, 3, 3, 0, 5, 5, 0, 5, 5, 1, 7, 7, 1, 7, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15], 'resnext29_32x4d': [7, 15, 31, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7], diff --git a/tests/torch/nas/test_search_space.py b/tests/torch/nas/test_search_space.py index 968b1212c95..99aa8262980 100644 --- a/tests/torch/nas/test_search_space.py +++ b/tests/torch/nas/test_search_space.py @@ -30,7 +30,7 @@ LIST_KERNEL_SS_DESCS = [ ElasticityDesc( ElasticityDim.KERNEL, - model_cls=partial(BasicConvTestModel, 1, 1, kernel_size), + model_cls=partial(BasicConvTestModel, 1, 1, kernel_size, padding=2), name=f"kernel_{kernel_size}_{search_space}", input_size=[1, 1, kernel_size, kernel_size], ref_search_space=[search_space], @@ -93,7 +93,7 @@ def __str__(self): ElasticityDim.DEPTH, model_cls=DepthBasicConvTestModel, params={"min_block_size": 1, "hw_fused_ops": False}, - ref_search_space=[[]], + ref_search_space=[[0], []], ), COMMON_DEPTH_SUPERNET_DESC, ElasticityDesc( diff --git a/tests/torch/nas/test_state.py b/tests/torch/nas/test_state.py index 40a1451f7a1..0141683efe4 100644 --- a/tests/torch/nas/test_state.py +++ b/tests/torch/nas/test_state.py @@ -89,7 +89,7 @@ def ref_kernel_output_fn(model, x): COMMON_KERNEL_DESC = ElasticityDesc( ElasticityDim.KERNEL, - model_cls=partial(BasicConvTestModel, 1, out_channels=1, kernel_size=5), + model_cls=partial(BasicConvTestModel, 1, out_channels=1, kernel_size=5, padding=2), params=BASIC_ELASTIC_KERNEL_PARAMS, ref_output_fn=ref_kernel_output_fn, ref_state={ diff --git a/tests/torch/pruning/experimental/test_nodes_grouping.py b/tests/torch/pruning/experimental/test_nodes_grouping.py index df53dbbf175..75e00c88894 100644 --- a/tests/torch/pruning/experimental/test_nodes_grouping.py +++ b/tests/torch/pruning/experimental/test_nodes_grouping.py @@ -35,8 +35,8 @@ from nncf.experimental.common.pruning.propagation_data import ProducerInfo from nncf.experimental.torch.pruning.operations import PT_EXPERIMENTAL_PRUNING_OPERATOR_METATYPES -# NNCF Torch should be imported before transformers in order to patch all operations before they added to some global vars, -# otherwise test may fail with some error (e.g. IndexError: list index out of range). +# NNCF Torch should be imported before transformers in order to patch all operations before they +# added to some global vars, otherwise test may fail with some error (e.g. IndexError: list index out of range). from transformers import AutoModelForAudioClassification from transformers import AutoModelForImageClassification from transformers import AutoModelForQuestionAnswering @@ -140,7 +140,7 @@ class GroupTestDesc: ref_groups: Optional[List[PruningGroup]] = None def __str__(self) -> str: - return self.model_desc.model_name + return self.model_desc.model_name # pylint: disable=no-member SYNTHETIC_DESCS = [ @@ -474,7 +474,7 @@ def test_groups(desc: GroupTestDesc, mocker, tmp_path): pruning_producing_types = ["linear"] get_graph_spy = mocker.spy(BlockHierarchy, "_get_graph_for_visualization") not_filtered_groups = get_pruning_groups( - nncf_network.get_graph(), PT_EXPERIMENTAL_PRUNING_OPERATOR_METATYPES, pruning_producing_types, tmp_path + nncf_network.nncf.get_graph(), PT_EXPERIMENTAL_PRUNING_OPERATOR_METATYPES, pruning_producing_types, tmp_path ) nx_graph = get_graph_spy.spy_return @@ -511,7 +511,7 @@ def test_all_groups_valid(desc: GroupTestDesc): nncf_network = create_nncf_network(model, config) pruning_producing_types = ["linear"] all_groups = get_pruning_groups( - nncf_network.get_graph(), PT_EXPERIMENTAL_PRUNING_OPERATOR_METATYPES, pruning_producing_types + nncf_network.nncf.get_graph(), PT_EXPERIMENTAL_PRUNING_OPERATOR_METATYPES, pruning_producing_types ) for group in all_groups: assert group.consumers diff --git a/tests/torch/pruning/filter_pruning/test_algo.py b/tests/torch/pruning/filter_pruning/test_algo.py index 70e47af4f09..6dd544f86ed 100644 --- a/tests/torch/pruning/filter_pruning/test_algo.py +++ b/tests/torch/pruning/filter_pruning/test_algo.py @@ -402,7 +402,7 @@ def test_valid_masks_for_bn_after_concat(prune_bn): ref_concat_masks = [[0] * 8 + [1] * 8 + [0] * 8 + [1] * 8, [1] * 8 + [0] * 16 + [1] * 8 + [0] * 8 + [1] * 8] graph = pruned_model.nncf.get_original_graph() for i, node in enumerate(graph.get_nodes_by_types(["cat"])): - assert np.allclose(node.data["output_mask"].tensor.numpy(), ref_concat_masks[i]) + assert np.allclose(node.attributes["output_mask"].tensor.numpy(), ref_concat_masks[i]) @pytest.mark.parametrize('model,ref_output_shapes', @@ -742,7 +742,7 @@ def test_flops_calculator(model_module, all_weights, pruning_flops_target, ref_f pruning_groups_next_nodes = shape_pruning_processor.get_next_nodes(graph, pruning_groups) # Check output_shapes are empty in graph for node in graph.get_all_nodes(): - assert node.data["output_shape"] is None + assert node.attributes["output_shape"] is None # Next nodes cluster check assert len(pruning_groups_next_nodes) == len(refs["next_nodes"]) @@ -849,7 +849,7 @@ def test_disconnected_graph(): for name, (shape, mask_sum) in nodes_output_mask_map.items(): node = graph.get_node_by_name(name) if mask_sum is None: - assert node.data["output_mask"] is None + assert node.attributes["output_mask"] is None else: - assert sum(node.data["output_mask"].tensor) == mask_sum + assert sum(node.attributes["output_mask"].tensor) == mask_sum assert collected_shapes[name] == shape diff --git a/tests/torch/pruning/filter_pruning/test_layers.py b/tests/torch/pruning/filter_pruning/test_layers.py index 1d859fa9c80..69a7bf5e211 100644 --- a/tests/torch/pruning/filter_pruning/test_layers.py +++ b/tests/torch/pruning/filter_pruning/test_layers.py @@ -8,11 +8,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + import pytest import torch from torch import nn -from nncf.torch.dynamic_graph.scope import Scope from nncf.torch.layers import NNCFConv2d from nncf.torch.module_operations import UpdateWeightAndBias from nncf.torch.pruning.filter_pruning.layers import FilterPruningMask diff --git a/tests/torch/pruning/helpers.py b/tests/torch/pruning/helpers.py index f9569d78fff..6361a48b2f7 100644 --- a/tests/torch/pruning/helpers.py +++ b/tests/torch/pruning/helpers.py @@ -342,6 +342,21 @@ def forward(self, x): return x +class PruningTestMeanMetatype(nn.Module): + def __init__(self, mean_dim): + super().__init__() + self.mean_dim = mean_dim + conv2_input_dim = 1 if mean_dim == 1 else 16 + self.conv1 = create_conv(1, 16, 2, 1, -2) + self.last_conv = create_conv(conv2_input_dim, 32, 1, 2, -2) + + def forward(self, x): + x = self.conv1(x) + x = torch.mean(x, self.mean_dim, keepdim=True) + x = self.last_conv(x) + return x + + class BigPruningTestModel(nn.Module): def __init__(self, dim=2): super().__init__() diff --git a/tests/torch/pruning/test_model_pruning_analysis.py b/tests/torch/pruning/test_model_pruning_analysis.py index eee95289812..dc113553a3f 100644 --- a/tests/torch/pruning/test_model_pruning_analysis.py +++ b/tests/torch/pruning/test_model_pruning_analysis.py @@ -8,6 +8,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# pylint: disable=too-many-lines from collections import Counter from functools import partial @@ -49,6 +50,7 @@ from tests.torch.pruning.helpers import MultipleSplitConcatModel from tests.torch.pruning.helpers import NASnetBlock from tests.torch.pruning.helpers import PruningTestBatchedLinear +from tests.torch.pruning.helpers import PruningTestMeanMetatype from tests.torch.pruning.helpers import PruningTestModelBroadcastedLinearWithConcat from tests.torch.pruning.helpers import PruningTestModelDiffChInPruningCluster from tests.torch.pruning.helpers import PruningTestModelEltwise @@ -382,26 +384,26 @@ def __str__(self): model=partial(MobilenetV3BlockSEReshape, mode='linear_mean'), name='MobilenetV3BlockSEReshape with linear mean', non_pruned_module_nodes= - ['MobilenetV3BlockSEReshape/NNCFConv2d[last_conv]/conv2d_0', - 'MobilenetV3BlockSEReshape/NNCFConv2d[first_conv]/conv2d_0', - 'MobilenetV3BlockSEReshape/InvertedResidual[inverted_residual]/Sequential[conv]/' - 'NNCFConv2d[4]/conv2d_0', + ['MobilenetV3BlockSEReshape/NNCFConv2d[last_conv]/conv2d_0'], + pruned_groups=[ + ['MobilenetV3BlockSEReshape/NNCFConv2d[first_conv]/conv2d_0', 'MobilenetV3BlockSEReshape/InvertedResidual[inverted_residual]/Sequential[conv]/' 'NNCFConv2d[0]/conv2d_0', 'MobilenetV3BlockSEReshape/InvertedResidual[inverted_residual]/Sequential[conv]/' - 'SELayerWithReshapeAndLinearAndMean[3]/Sequential[fc]/NNCFLinear[2]/linear_0'], - pruned_groups=[ + 'SELayerWithReshapeAndLinearAndMean[3]/Sequential[fc]/NNCFLinear[2]/linear_0', + 'MobilenetV3BlockSEReshape/InvertedResidual[inverted_residual]/Sequential[conv]/' + 'NNCFConv2d[4]/conv2d_0'], ['MobilenetV3BlockSEReshape/InvertedResidual[inverted_residual]/Sequential[conv]/' 'SELayerWithReshapeAndLinearAndMean[3]/Sequential[fc]/NNCFLinear[0]/linear_0'] ], - pruned_groups_by_node_id=[[6]], - can_prune_after_analysis={0: True, 1: False, 2: False, 3: False, 4: False, 5: False, 6: True, 7: True, - 8: False, 9: False, 10: False, 11: False, 12: False, 13: True, 14: False, 15: True, + pruned_groups_by_node_id=[[1, 2, 8, 12], [6]], + can_prune_after_analysis={0: True, 1: True, 2: True, 3: True, 4: True, 5: True, 6: True, 7: True, + 8: True, 9: True, 10: True, 11: True, 12: True, 13: True, 14: True, 15: True, 16: True}, - final_can_prune={1: PruningAnalysisDecision(False, [PruningAnalysisReason.CLOSING_CONV_MISSING]), + final_can_prune={1: PruningAnalysisDecision(True), 6: PruningAnalysisDecision(True), - 8: PruningAnalysisDecision(False, [PruningAnalysisReason.CLOSING_CONV_MISSING]), - 12: PruningAnalysisDecision(False, [PruningAnalysisReason.CLOSING_CONV_MISSING]), + 8: PruningAnalysisDecision(True), + 12: PruningAnalysisDecision(True), 15: PruningAnalysisDecision(False, [PruningAnalysisReason.LAST_CONV])}, prune_params=(True, True)), @@ -614,6 +616,29 @@ def __str__(self): 5: PruningAnalysisDecision(False, PruningAnalysisReason.CLOSING_CONV_MISSING), 7: PruningAnalysisDecision(False, PruningAnalysisReason.LAST_CONV)}, prune_params=(True, True)), + GroupPruningModulesTestStruct( + model=partial(PruningTestMeanMetatype, mean_dim=1), + name='PruningTestMeanMetatype with mean dimension 1', + non_pruned_module_nodes=['PruningTestMeanMetatype/NNCFConv2d[last_conv]/conv2d_0', + 'PruningTestMeanMetatype/NNCFConv2d[conv1]/conv2d_0'], + pruned_groups=[], + pruned_groups_by_node_id=[], + can_prune_after_analysis={0: True, 1: False, 2: False, 3: True}, + final_can_prune={1: PruningAnalysisDecision(False, [PruningAnalysisReason.CLOSING_CONV_MISSING]), + 3: PruningAnalysisDecision(False, [PruningAnalysisReason.LAST_CONV])}, + prune_params=(True, True) + ), + GroupPruningModulesTestStruct( + model=partial(PruningTestMeanMetatype, mean_dim=2), + name='PruningTestMeanMetatype with mean dimension 2', + non_pruned_module_nodes=['PruningTestMeanMetatype/NNCFConv2d[last_conv]/conv2d_0'], + pruned_groups=[['PruningTestMeanMetatype/NNCFConv2d[conv1]/conv2d_0']], + pruned_groups_by_node_id=[[1]], + can_prune_after_analysis={0: True, 1: True, 2: True, 3: True}, + final_can_prune={1: PruningAnalysisDecision(True), + 3: PruningAnalysisDecision(False, [PruningAnalysisReason.LAST_CONV])}, + prune_params=(True, True) + ), ] # fmt: skip @@ -713,7 +738,7 @@ def test_symbolic_mask_propagation(test_input_info_struct_): final_can_prune = algo.symbolic_mask_propagation(pruning_types, test_input_info_struct_.can_prune_after_analysis) # Check all output masks are deleted for node in graph.get_all_nodes(): - assert node.data["output_mask"] is None + assert node.attributes["output_mask"] is None # Check ref decisions ref_final_can_prune = test_input_info_struct_.final_can_prune @@ -785,8 +810,8 @@ def __init__(self, model: nn.Module, ref_can_prune: dict): ), ModelAnalyserTestStruct( model=partial(MobilenetV3BlockSEReshape, mode='linear_mean'), - ref_can_prune={0: True, 1: False, 2: False, 3: False, 4: False, 5: False, 6: True, 7: True, - 8: True, 9: True, 10: True, 11: False, 12: True, 13: True, 14: False, 15: True, + ref_can_prune={0: True, 1: True, 2: True, 3: True, 4: True, 5: True, 6: True, 7: True, + 8: True, 9: True, 10: True, 11: True, 12: True, 13: True, 14: True, 15: True, 16: True} ) ] # fmt: skip diff --git a/tests/torch/pruning/test_tensor_processor.py b/tests/torch/pruning/test_tensor_processor.py index e86db5f6c51..2b147a30e67 100644 --- a/tests/torch/pruning/test_tensor_processor.py +++ b/tests/torch/pruning/test_tensor_processor.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import pytest import torch diff --git a/tests/torch/ptq/helpers.py b/tests/torch/ptq/helpers.py index b0b48d98a95..e019eb00f02 100644 --- a/tests/torch/ptq/helpers.py +++ b/tests/torch/ptq/helpers.py @@ -9,13 +9,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import List +from typing import List, Optional import torch from nncf import NNCFConfig from nncf.common.graph.layer_attributes import ConvolutionLayerAttributes -from nncf.common.graph.layer_attributes import GroupNormLayerAttributes from nncf.torch.graph.graph import PTNNCFGraph from nncf.torch.graph.operator_metatypes import PTDepthwiseConv2dSubtype from nncf.torch.graph.operator_metatypes import PTModuleConv2dMetatype @@ -23,9 +22,9 @@ from nncf.torch.graph.operator_metatypes import PTSumMetatype from nncf.torch.model_creation import create_nncf_network from nncf.torch.tensor_statistics.statistics import PTMinMaxTensorStatistic -from tests.post_training.models import NNCFGraphToTest -from tests.post_training.models import NNCFGraphToTestDepthwiseConv -from tests.post_training.models import NNCFGraphToTestSumAggregation +from tests.post_training.test_templates.models import NNCFGraphToTest +from tests.post_training.test_templates.models import NNCFGraphToTestDepthwiseConv +from tests.post_training.test_templates.models import NNCFGraphToTestSumAggregation def get_single_conv_nncf_graph() -> NNCFGraphToTest: @@ -35,6 +34,7 @@ def get_single_conv_nncf_graph() -> NNCFGraphToTest: out_channels=4, kernel_size=(4, 4), stride=1, + dilations=1, groups=1, transpose=False, padding_values=[], @@ -43,11 +43,21 @@ def get_single_conv_nncf_graph() -> NNCFGraphToTest: def get_depthwise_conv_nncf_graph() -> NNCFGraphToTestDepthwiseConv: - conv_layer_attrs = GroupNormLayerAttributes(False, 3, 3) + conv_layer_attrs = ConvolutionLayerAttributes( + weight_requires_grad=False, + in_channels=3, + out_channels=3, + dilations=1, + kernel_size=(1, 1), + stride=(1, 1), + groups=3, + transpose=False, + padding_values=(1, 1), + ) return NNCFGraphToTestDepthwiseConv(PTDepthwiseConv2dSubtype, conv_layer_attrs) -def get_single_no_weigth_matmul_nncf_graph() -> NNCFGraphToTest: +def get_single_no_weight_matmul_nncf_graph() -> NNCFGraphToTest: return NNCFGraphToTest(PTModuleLinearMetatype, None, PTNNCFGraph) @@ -58,6 +68,7 @@ def get_sum_aggregation_nncf_graph() -> NNCFGraphToTestSumAggregation: out_channels=4, kernel_size=(4, 4), stride=1, + dilations=1, groups=1, transpose=False, padding_values=[], @@ -65,7 +76,8 @@ def get_sum_aggregation_nncf_graph() -> NNCFGraphToTestSumAggregation: return NNCFGraphToTestSumAggregation(PTModuleConv2dMetatype, PTSumMetatype, conv_layer_attrs, PTNNCFGraph) -def get_nncf_network(model: torch.nn.Module, input_shape: List[int] = [1, 3, 32, 32]): +def get_nncf_network(model: torch.nn.Module, input_shape: Optional[List[int]] = None): + input_shape = [1, 3, 32, 32] if input_shape is None else input_shape model.eval() nncf_config = NNCFConfig({"input_info": {"sample_size": input_shape.copy()}}) nncf_network = create_nncf_network( @@ -80,7 +92,7 @@ def mock_collect_statistics(mocker): "nncf.common.tensor_statistics.aggregator.StatisticsAggregator.collect_statistics", return_value=None ) min_, max_ = 0.0, 1.0 - min_, max_ = map(lambda x: torch.tensor(x), [min_, max_]) + min_, max_ = torch.tensor(min_), torch.tensor(max_) _ = mocker.patch( "nncf.common.tensor_statistics.collectors.TensorStatisticCollectorBase.get_statistics", return_value=PTMinMaxTensorStatistic(min_, max_), diff --git a/tests/torch/ptq/test_calculation_quantizer_params.py b/tests/torch/ptq/test_calculation_quantizer_params.py index d66642a0630..f98e5137d48 100644 --- a/tests/torch/ptq/test_calculation_quantizer_params.py +++ b/tests/torch/ptq/test_calculation_quantizer_params.py @@ -33,11 +33,12 @@ from nncf.torch.model_creation import create_nncf_network from nncf.torch.statistics.aggregator import PTStatisticsAggregator from nncf.torch.tensor_statistics.statistics import PTMinMaxTensorStatistic -from tests.post_training.test_calculate_quantizer_parameters import TemplateTestFQParams +from tests.post_training.test_templates.test_calculate_quantizer_parameters import TemplateTestFQParams from tests.torch.helpers import get_all_inputs_for_graph_node from tests.torch.helpers import get_nodes_by_type # pylint: disable=protected-access +# pylint: disable=too-many-function-args INPUT_SHAPE = (2, 3, 4, 5) @@ -225,8 +226,8 @@ def __init__(self): self.conv2 = nn.Conv2d(3, 1, 1) self.bn2 = nn.BatchNorm2d(1) with torch.no_grad(): - self.conv1.weight.copy_(torch.rand_like(self.conv1.weight)) - 0.5 - self.conv2.weight.copy_(torch.rand_like(self.conv2.weight)) - 0.5 + self.conv1.weight.copy_(torch.rand_like(self.conv1.weight) - 0.5) + self.conv2.weight.copy_(torch.rand_like(self.conv2.weight) - 0.5) def forward(self, x): # input_shape = [1, 3, 32, 32] @@ -283,8 +284,8 @@ def calculate_fq_params(model, input_data): _, relu, bn1, avg_pool = model(input_data) conv1_stats = calculate_statistics(input_data, QuantizationMode.SYMMETRIC, QuantizerGroup.ACTIVATIONS) bn1_stats = calculate_statistics(bn1, QuantizationMode.SYMMETRIC, QuantizerGroup.ACTIVATIONS) - avg_pool_stats = calculate_statistics(avg_pool, QuantizationMode.SYMMETRIC, QuantizerGroup.ACTIVATIONS) - conv2_stats = calculate_statistics(relu, QuantizationMode.SYMMETRIC, QuantizerGroup.ACTIVATIONS) + conv2_stats = calculate_statistics(avg_pool, QuantizationMode.SYMMETRIC, QuantizerGroup.ACTIVATIONS) + avg_pool_stats = calculate_statistics(relu, QuantizationMode.SYMMETRIC, QuantizerGroup.ACTIVATIONS) conv1_w = model.conv1.weight conv1_w_stats = calculate_statistics(conv1_w, QuantizationMode.SYMMETRIC, QuantizerGroup.WEIGHTS, True) @@ -292,9 +293,9 @@ def calculate_fq_params(model, input_data): conv2_w_stats = calculate_statistics(conv2_w, QuantizationMode.SYMMETRIC, QuantizerGroup.WEIGHTS) return { "/FakeQuantize": conv1_stats, - "/relu/FakeQuantize": bn1_stats, + "/bn1/FakeQuantize": bn1_stats, "/avg_pool/FakeQuantize": avg_pool_stats, - "/bn1/FakeQuantize": conv2_stats, + "/conv2/FakeQuantize": conv2_stats, "/conv1/pre_ops.0/op/FakeQuantize": conv1_w_stats, "/conv2/pre_ops.0/op/FakeQuantize": conv2_w_stats, } @@ -314,10 +315,12 @@ def test_quantizer_parameters_export(tmp_path: Path): nncf_config = NNCFConfig({"input_info": {"sample_size": [1, 3, 32, 32]}}) nncf_network = create_nncf_network(model, nncf_config) - statistic_points = min_max_algo.get_statistic_points(nncf_network) + statistic_points = min_max_algo.get_statistic_points(nncf_network, nncf_network.nncf.get_graph()) statistics_aggregator.register_statistic_points(statistic_points) - statistics_aggregator.collect_statistics(model) - torch_quantized_model = min_max_algo._apply(model, statistics_aggregator.statistic_points) + statistics_aggregator.collect_statistics(model, nncf_network.nncf.get_graph()) + torch_quantized_model = min_max_algo.apply( + nncf_network, nncf_network.nncf.get_graph(), statistics_aggregator.statistic_points + ) path = str(tmp_path / "torch_ptq_model.onnx") torch.onnx.export( @@ -338,10 +341,10 @@ def test_quantizer_parameters_export(tmp_path: Path): input_low, input_high = fq_input[-2].flatten(), fq_input[-1].flatten() torch_ptq_params[fq_node.name] = {"input_low": input_low, "input_high": input_high} - for name in fq_params: + for name, param in fq_params.items(): assert name in torch_ptq_params - assert np.allclose(fq_params[name]["input_low"], torch_ptq_params[name]["input_low"]) - assert np.allclose(fq_params[name]["input_high"], torch_ptq_params[name]["input_high"]) + assert np.allclose(param["input_low"], torch_ptq_params[name]["input_low"]) + assert np.allclose(param["input_high"], torch_ptq_params[name]["input_high"]) class TestFQParams(TemplateTestFQParams): diff --git a/tests/torch/ptq/test_fast_bias_correction.py b/tests/torch/ptq/test_fast_bias_correction.py new file mode 100644 index 00000000000..b713aeb802c --- /dev/null +++ b/tests/torch/ptq/test_fast_bias_correction.py @@ -0,0 +1,61 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import List + +import torch + +from nncf.common.factory import NNCFGraphFactory +from nncf.quantization.algorithms.fast_bias_correction.torch_backend import PTFastBiasCorrectionAlgoBackend +from nncf.torch.model_analyzer import get_fused_bias_value +from nncf.torch.model_analyzer import is_node_with_fused_bias +from nncf.torch.nncf_network import NNCFNetwork +from tests.post_training.test_templates.test_fast_bias_correction import TemplateTestFBCAlgorithm +from tests.torch.ptq.helpers import get_nncf_network + + +class TestTorchFBCAlgorithm(TemplateTestFBCAlgorithm): + @staticmethod + def list_to_backend_type(data: List) -> torch.Tensor: + return torch.Tensor(data) + + @staticmethod + def get_backend() -> PTFastBiasCorrectionAlgoBackend: + return PTFastBiasCorrectionAlgoBackend + + @staticmethod + def backend_specific_model(model: bool, tmp_dir: str): + return get_nncf_network(model, model.INPUT_SIZE) + + @staticmethod + def fn_to_type(tensor): + return torch.Tensor(tensor) + + @staticmethod + def get_transform_fn(): + def transform_fn(data_item): + tensor, _ = data_item + return tensor + + return transform_fn + + @staticmethod + def check_bias(model: NNCFNetwork, ref_bias: list): + ref_bias = torch.Tensor(ref_bias) + nncf_graph = NNCFGraphFactory.create(model) + for node in nncf_graph.get_all_nodes(): + if not is_node_with_fused_bias(node, nncf_graph): + continue + bias_value = get_fused_bias_value(node, model) + # TODO(AlexanderDokuchaev): return atol=0.0001 after fix 109189 + assert torch.all(torch.isclose(bias_value, ref_bias, atol=0.02)), f"{bias_value} != {ref_bias}" + return + raise ValueError("Not found node with bias") diff --git a/tests/torch/ptq/test_fq_params_calculation.py b/tests/torch/ptq/test_fq_params_calculation.py index 001b2d6b513..974f42588d8 100644 --- a/tests/torch/ptq/test_fq_params_calculation.py +++ b/tests/torch/ptq/test_fq_params_calculation.py @@ -33,6 +33,9 @@ REFERENCE_SCALES_DIR = TEST_ROOT / "torch" / "data" / "reference_scales" +# pylint: disable=protected-access + + def min_max_quantize_model( original_model: torch.nn.Module, quantization_params: Dict[str, Any] = None ) -> torch.nn.Module: @@ -56,7 +59,7 @@ def transform_fn(sample): original_model.eval() nncf_network = create_nncf_network(original_model, config) - quantized_model = post_training_quantization.apply(nncf_network, dataset=dataset) + quantized_model = post_training_quantization.apply(nncf_network, nncf_network.nncf.get_graph(), dataset=dataset) return quantized_model diff --git a/tests/torch/ptq/test_graphs.py b/tests/torch/ptq/test_graphs.py index 53341e5faf5..b688e9cf830 100644 --- a/tests/torch/ptq/test_graphs.py +++ b/tests/torch/ptq/test_graphs.py @@ -14,10 +14,12 @@ import pytest +from nncf.parameters import TargetDevice from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters from nncf.quantization.algorithms.post_training.algorithm import PostTrainingQuantization from nncf.torch.layers import NNCF_RNN from nncf.torch.layers import LSTMCellNNCF +from tests.post_training.test_templates.helpers import EmbeddingModel from tests.torch import test_models from tests.torch.ptq.helpers import get_nncf_network from tests.torch.ptq.helpers import mock_collect_statistics @@ -30,62 +32,74 @@ SKIP_MARK = pytest.mark.skip("Model is not supported yet") -@pytest.fixture(scope="function", params=ALGOS) -def graph_dir(request): +@pytest.fixture(scope="function", params=ALGOS, name="graph_dir") +def fixture_graph_dir(request): quantization_type = request.param - graph_dir = Path("quantized") / "ptq" / quantization_type - return graph_dir + graph_dir_ = Path("quantized") / "ptq" / quantization_type + return graph_dir_ -def get_model_name(desc): +def get_model_name(description): + desc = description[0] if isinstance(desc, ModelDesc): return desc.model_name - return desc.values[0].model_name + return desc[0].model_name TEST_MODELS_DESC = [ - ModelDesc("shared_model", SharedLayersModel, [1, 1, 5, 6]), - ModelDesc("alexnet", test_models.AlexNet, [1, 3, 32, 32]), - ModelDesc("lenet", test_models.LeNet, [1, 3, 32, 32]), - ModelDesc("resnet18", test_models.ResNet18, [1, 3, 32, 32]), - ModelDesc("resnet50", test_models.ResNet50, [1, 3, 32, 32]), - ModelDesc("vgg16", partial(test_models.VGG, "VGG16"), [1, 3, 32, 32]), - ModelDesc("inception", test_models.GoogLeNet, [1, 3, 32, 32]), - ModelDesc("densenet121", test_models.DenseNet121, [1, 3, 32, 32]), - ModelDesc("inception_v3", partial(test_models.Inception3, aux_logits=True, transform_input=True), [2, 3, 299, 299]), - ModelDesc("squeezenet1_1", test_models.squeezenet1_1, [1, 3, 32, 32]), - ModelDesc("shufflenetv2", partial(test_models.ShuffleNetV2, net_size=0.5), [1, 3, 32, 32]), - ModelDesc("ssd_vgg", test_models.ssd_vgg300, [2, 3, 300, 300]), - ModelDesc("ssd_mobilenet", test_models.ssd_mobilenet, [2, 3, 300, 300]), - ModelDesc("mobilenet_v2", test_models.mobilenet_v2, [2, 3, 32, 32]), - ModelDesc("mobilenet_v3_small", test_models.mobilenet_v3_small, [2, 3, 32, 32]), - ModelDesc("unet", test_models.UNet, [1, 3, 360, 480]), - pytest.param(ModelDesc("lstm_cell", LSTMCellNNCF, [2, 1]), marks=SKIP_MARK), + (ModelDesc("embedding_model", EmbeddingModel, [1, 10]), {}), + (ModelDesc("shared_model", SharedLayersModel, [1, 1, 5, 6]), {}), + (ModelDesc("alexnet", test_models.AlexNet, [1, 3, 32, 32]), {}), + (ModelDesc("lenet", test_models.LeNet, [1, 3, 32, 32]), {}), + (ModelDesc("resnet18", test_models.ResNet18, [1, 3, 32, 32]), {}), + (ModelDesc("resnet50_cpu_spr", test_models.ResNet50, [1, 3, 32, 32]), {"target_device": TargetDevice.CPU_SPR}), + (ModelDesc("vgg16", partial(test_models.VGG, "VGG16"), [1, 3, 32, 32]), {}), + (ModelDesc("inception", test_models.GoogLeNet, [1, 3, 32, 32]), {}), + (ModelDesc("densenet121", test_models.DenseNet121, [1, 3, 32, 32]), {}), + ( + ModelDesc( + "inception_v3", partial(test_models.Inception3, aux_logits=True, transform_input=True), [2, 3, 299, 299] + ), + {}, + ), + (ModelDesc("squeezenet1_1", test_models.squeezenet1_1, [1, 3, 32, 32]), {}), + (ModelDesc("shufflenetv2", partial(test_models.ShuffleNetV2, net_size=0.5), [1, 3, 32, 32]), {}), + (ModelDesc("ssd_vgg", test_models.ssd_vgg300, [2, 3, 300, 300]), {}), + (ModelDesc("ssd_mobilenet", test_models.ssd_mobilenet, [2, 3, 300, 300]), {}), + (ModelDesc("mobilenet_v2", test_models.mobilenet_v2, [2, 3, 32, 32]), {}), + (ModelDesc("mobilenet_v3_small", test_models.mobilenet_v3_small, [2, 3, 32, 32]), {}), + (ModelDesc("unet", test_models.UNet, [1, 3, 360, 480]), {}), + pytest.param(ModelDesc("lstm_cell", LSTMCellNNCF, [2, 1]), {}, marks=SKIP_MARK), pytest.param( - ModelDesc("lstm_uni_seq", partial(NNCF_RNN, num_layers=1, bidirectional=False), [3, 1, 1]), marks=SKIP_MARK + ModelDesc("lstm_uni_seq", partial(NNCF_RNN, num_layers=1, bidirectional=False), [3, 1, 1]), {}, marks=SKIP_MARK ), pytest.param( - ModelDesc("lstm_uni_stacked", partial(NNCF_RNN, num_layers=2, bidirectional=False), [3, 1, 1]), marks=SKIP_MARK + ModelDesc("lstm_uni_stacked", partial(NNCF_RNN, num_layers=2, bidirectional=False), [3, 1, 1]), + {}, + marks=SKIP_MARK, ), pytest.param( - ModelDesc("lstm_bi_seq", partial(NNCF_RNN, num_layers=1, bidirectional=True), [3, 1, 1]), marks=SKIP_MARK + ModelDesc("lstm_bi_seq", partial(NNCF_RNN, num_layers=1, bidirectional=True), [3, 1, 1]), {}, marks=SKIP_MARK ), pytest.param( - ModelDesc("lstm_bi_stacked", partial(NNCF_RNN, num_layers=2, bidirectional=True), [3, 1, 1]), marks=SKIP_MARK + ModelDesc("lstm_bi_stacked", partial(NNCF_RNN, num_layers=2, bidirectional=True), [3, 1, 1]), + {}, + marks=SKIP_MARK, ), ] -@pytest.mark.parametrize("desc", TEST_MODELS_DESC, ids=[get_model_name(m) for m in TEST_MODELS_DESC]) -def test_min_max_classification_quantized_graphs(desc: ModelDesc, graph_dir, mocker): +@pytest.mark.parametrize( + ("desc", "quantization_parameters"), TEST_MODELS_DESC, ids=[get_model_name(m) for m in TEST_MODELS_DESC] +) +def test_min_max_classification_quantized_graphs(desc: ModelDesc, quantization_parameters, graph_dir, mocker): mock_collect_statistics(mocker) model = desc.model_builder() nncf_network = get_nncf_network(model, desc.input_sample_sizes) - quantization_algorithm = PostTrainingQuantization( - advanced_parameters=AdvancedQuantizationParameters(disable_bias_correction=True) - ) + quantization_parameters["advanced_parameters"] = AdvancedQuantizationParameters(disable_bias_correction=True) + quantization_algorithm = PostTrainingQuantization(**quantization_parameters) - quantized_model = quantization_algorithm.apply(nncf_network, dataset=None) + quantized_model = quantization_algorithm.apply(nncf_network, nncf_network.nncf.get_graph(), dataset=None) check_graph(quantized_model.nncf.get_graph(), desc.dot_filename, graph_dir) diff --git a/tests/torch/ptq/test_ptq_params.py b/tests/torch/ptq/test_ptq_params.py index 32d4957014b..c174ec8b322 100644 --- a/tests/torch/ptq/test_ptq_params.py +++ b/tests/torch/ptq/test_ptq_params.py @@ -12,30 +12,39 @@ import pytest from torch import nn +from nncf import NNCFConfig from nncf.common.graph.patterns import GraphPattern from nncf.common.graph.patterns.manager import PatternsManager from nncf.common.graph.transformations.commands import TargetType +from nncf.common.quantization.structs import QuantizationPreset from nncf.common.utils.backend import BackendType +from nncf.parameters import ModelType from nncf.parameters import TargetDevice +from nncf.quantization.advanced_parameters import AdvancedQuantizationParameters +from nncf.quantization.advanced_parameters import OverflowFix +from nncf.quantization.advanced_parameters import QuantizationMode +from nncf.quantization.advanced_parameters import QuantizationParameters from nncf.quantization.algorithms.min_max.torch_backend import PTMinMaxAlgoBackend from nncf.quantization.algorithms.post_training.algorithm import PostTrainingQuantization +from nncf.quantization.range_estimator import RangeEstimatorParametersSet from nncf.scopes import IgnoredScope from nncf.torch.graph.graph import PTTargetPoint from nncf.torch.graph.operator_metatypes import PTModuleConv2dMetatype from nncf.torch.graph.operator_metatypes import PTModuleLinearMetatype from nncf.torch.graph.operator_metatypes import PTSoftmaxMetatype +from nncf.torch.quantization.quantize_model import _create_nncf_config from nncf.torch.tensor_statistics.collectors import PTMeanMinMaxStatisticCollector from nncf.torch.tensor_statistics.collectors import PTMinMaxStatisticCollector from tests.common.quantization.metatypes import Conv2dTestMetatype from tests.common.quantization.metatypes import LinearTestMetatype from tests.common.quantization.metatypes import SoftmaxTestMetatype -from tests.post_training.test_ptq_params import TemplateTestPTQParams +from tests.post_training.test_templates.test_ptq_params import TemplateTestPTQParams from tests.torch.helpers import create_bn from tests.torch.helpers import create_conv from tests.torch.helpers import create_depthwise_conv from tests.torch.ptq.helpers import get_nncf_network from tests.torch.ptq.helpers import get_single_conv_nncf_graph -from tests.torch.ptq.helpers import get_single_no_weigth_matmul_nncf_graph +from tests.torch.ptq.helpers import get_single_no_weight_matmul_nncf_graph # pylint: disable=protected-access @@ -121,10 +130,18 @@ def metatypes_mapping(self): @pytest.fixture(scope="session") def test_params(self): + linear_model = LinearTestModel().get_nncf_network() + depthwise_model = OneDepthwiseConvModel().get_nncf_network() + return { - "test_range_estimator_per_tensor": {"model": LinearTestModel().get_nncf_network(), "stat_points_num": 5}, + "test_range_estimator_per_tensor": { + "model": linear_model, + "nncf_graph": linear_model.nncf.get_graph(), + "stat_points_num": 5, + }, "test_range_estimator_per_channel": { - "model": OneDepthwiseConvModel().get_nncf_network(), + "model": depthwise_model, + "nncf_graph": depthwise_model.nncf.get_graph(), "stat_points_num": 2, }, "test_quantize_outputs": { @@ -138,12 +155,106 @@ def test_params(self): "ignored_patterns": get_ignored_patterns(), }, "test_model_type_pass": { - "nncf_graph": get_single_no_weigth_matmul_nncf_graph().nncf_graph, + "nncf_graph": get_single_no_weight_matmul_nncf_graph().nncf_graph, "hw_patterns": get_hw_patterns(), "ignored_patterns": get_ignored_patterns(), }, + "test_validate_scope": { + "nncf_graph": get_single_conv_nncf_graph().nncf_graph, + "ignored_patterns": get_ignored_patterns(), + }, } @pytest.fixture(params=[(IgnoredScope([]), 1, 1), (IgnoredScope(["/Conv_1_0"]), 0, 0)]) def ignored_scopes_data(self, request): return request.param + + +@pytest.mark.parametrize( + "params", + ( + { + "preset": QuantizationPreset.MIXED, + "target_device": TargetDevice.ANY, + "subset_size": 1, + "model_type": ModelType.TRANSFORMER, + "ignored_scope": IgnoredScope(names=["node_1"]), + "advanced_parameters": AdvancedQuantizationParameters( + overflow_fix=OverflowFix.DISABLE, quantize_outputs=True, disable_bias_correction=True + ), + }, + { + "preset": QuantizationPreset.MIXED, + "target_device": TargetDevice.ANY, + "subset_size": 2, + "model_type": None, + "ignored_scope": None, + "advanced_parameters": AdvancedQuantizationParameters( + overflow_fix=OverflowFix.ENABLE, quantize_outputs=False, disable_bias_correction=False + ), + }, + { + "preset": QuantizationPreset.MIXED, + "target_device": TargetDevice.ANY, + "subset_size": 3, + "model_type": None, + "ignored_scope": IgnoredScope(names=["node_1"]), + "advanced_parameters": AdvancedQuantizationParameters( + overflow_fix=OverflowFix.FIRST_LAYER, quantize_outputs=True, disable_bias_correction=False + ), + }, + { + "preset": QuantizationPreset.MIXED, + "target_device": TargetDevice.ANY, + "subset_size": 4, + "model_type": None, + "ignored_scope": IgnoredScope(names=["node_1"]), + "advanced_parameters": AdvancedQuantizationParameters( + overflow_fix=OverflowFix.FIRST_LAYER, + quantize_outputs=True, + disable_bias_correction=False, + activations_quantization_params=QuantizationParameters(num_bits=8, mode=QuantizationMode.SYMMETRIC), + activations_range_estimator_params=RangeEstimatorParametersSet.MEAN_MINMAX, + weights_quantization_params=QuantizationParameters(num_bits=8, mode=QuantizationMode.SYMMETRIC), + weights_range_estimator_params=RangeEstimatorParametersSet.MEAN_MINMAX, + ), + }, + ), +) +def test_create_nncf_config(params): + config = _create_nncf_config(**params) + + assert config["compression"]["overflow_fix"] == params["advanced_parameters"].overflow_fix.value + assert config["compression"]["quantize_outputs"] == params["advanced_parameters"].quantize_outputs + + assert config["compression"]["preset"] == params["preset"].value + + range_config = config["compression"]["initializer"]["range"] + if isinstance(range_config, dict): + assert range_config["num_init_samples"] == params["subset_size"] + assert range_config["type"] == "mean_min_max" + else: + for rc in range_config: + assert rc["num_init_samples"] == params["subset_size"] + assert rc["type"] == "mean_min_max" + + num_bn_samples = config["compression"]["initializer"]["batchnorm_adaptation"]["num_bn_adaptation_samples"] + if params["advanced_parameters"].disable_bias_correction is True or params["model_type"] == ModelType.TRANSFORMER: + assert num_bn_samples == 0 + else: + assert num_bn_samples == params["subset_size"] + + ref_scope = params["ignored_scope"].names if params["ignored_scope"] is not None else [] + if params["model_type"] == ModelType.TRANSFORMER: + ref_scope = [ + "{re}.*Embeddings.*", + "{re}.*__add___[0-1]", + "{re}.*layer_norm_0", + "{re}.*matmul_1", + "{re}.*__truediv__*", + ] + ref_scope + assert config["compression"].get("ignored_scopes", []) == ref_scope + + # To validate NNCFConfig requared input_info + config["input_info"] = {"sample_size": [1, 2, 224, 224]} + NNCFConfig.validate(config) diff --git a/tests/torch/ptq/test_quantizer_config.py b/tests/torch/ptq/test_quantizer_config.py index be9a449b3fc..41cab6438b5 100644 --- a/tests/torch/ptq/test_quantizer_config.py +++ b/tests/torch/ptq/test_quantizer_config.py @@ -13,13 +13,12 @@ from nncf.common.graph.transformations.commands import TargetType from nncf.quantization.algorithms.min_max.torch_backend import PTMinMaxAlgoBackend -from nncf.torch.graph.transformations.commands import PTTargetPoint from nncf.torch.tensor_statistics.collectors import PTMeanMinMaxStatisticCollector from nncf.torch.tensor_statistics.collectors import PTMinMaxStatisticCollector -from tests.post_training.models import NNCFGraphToTest -from tests.post_training.models import NNCFGraphToTestDepthwiseConv -from tests.post_training.models import NNCFGraphToTestSumAggregation -from tests.post_training.test_quantizer_config import TemplateTestQuantizerConfig +from tests.post_training.test_templates.models import NNCFGraphToTest +from tests.post_training.test_templates.models import NNCFGraphToTestDepthwiseConv +from tests.post_training.test_templates.models import NNCFGraphToTestSumAggregation +from tests.post_training.test_templates.test_quantizer_config import TemplateTestQuantizerConfig from tests.torch.ptq.helpers import get_depthwise_conv_nncf_graph from tests.torch.ptq.helpers import get_single_conv_nncf_graph from tests.torch.ptq.helpers import get_sum_aggregation_nncf_graph diff --git a/tests/torch/ptq/test_strip.py b/tests/torch/ptq/test_strip.py new file mode 100644 index 00000000000..dbdd6c6b7da --- /dev/null +++ b/tests/torch/ptq/test_strip.py @@ -0,0 +1,86 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from torch.quantization import FakeQuantize + +import nncf +from nncf.data import Dataset +from nncf.experimental.torch.quantization.quantize_model import quantize_impl +from nncf.parameters import TargetDevice +from nncf.quantization import QuantizationPreset +from nncf.torch.nncf_network import NNCFNetwork +from nncf.torch.quantization.layers import BaseQuantizer +from tests.torch.helpers import LeNet +from tests.torch.helpers import RandomDatasetMock + + +# pylint: disable=too-many-branches +def check_fq(model: NNCFNetwork, striped: bool): + if hasattr(model.nncf, "external_quantizers"): + for key in list(model.nncf.external_quantizers.keys()): + op = model.nncf.external_quantizers[key] + if striped: + assert isinstance(op, FakeQuantize) + else: + assert isinstance(op, BaseQuantizer) + + for node in model.nncf.get_original_graph().get_all_nodes(): + if node.node_type in ["nncf_model_input", "nncf_model_output"]: + continue + + nncf_module = model.nncf.get_containing_module(node.node_name) + + if hasattr(nncf_module, "pre_ops"): + for key in list(nncf_module.pre_ops.keys()): + op = nncf_module.get_pre_op(key) + if striped: + assert isinstance(op.op, FakeQuantize) + else: + assert isinstance(op.op, BaseQuantizer) + + if hasattr(nncf_module, "post_ops"): + for key in list(nncf_module.post_ops.keys()): + op = nncf_module.get_post_ops(key) + if striped: + assert isinstance(op.op, FakeQuantize) + else: + assert isinstance(op.op, BaseQuantizer) + + +@pytest.mark.parametrize("strip_type", ("nncf", "torch", "nncf_interfere")) +def test_nncf_strip_api(strip_type): + model = LeNet() + input_size = [1, 1, 32, 32] + + def transform_fn(data_item): + images, _ = data_item + return images + + dataset = Dataset(RandomDatasetMock(input_size), transform_fn) + + quantized_model = quantize_impl( + model=model, + calibration_dataset=dataset, + preset=QuantizationPreset.MIXED, + target_device=TargetDevice.CPU, + subset_size=1, + fast_bias_correction=True, + ) + + if strip_type == "nncf": + strip_model = nncf.strip(quantized_model) + elif strip_type == "torch": + strip_model = nncf.torch.strip(quantized_model) + elif strip_type == "nncf_interfere": + strip_model = quantized_model.nncf.strip() + + check_fq(quantized_model, True if strip_model is None else strip_model) diff --git a/tests/torch/ptq/test_weights_compression.py b/tests/torch/ptq/test_weights_compression.py new file mode 100644 index 00000000000..e71394e9284 --- /dev/null +++ b/tests/torch/ptq/test_weights_compression.py @@ -0,0 +1,72 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch + +from nncf.quantization import compress_weights + + +class ShortTransformer(torch.nn.Module): + def __init__(self, in_features, num_embeddings, share_weights=False): + super().__init__() + self.wte = torch.nn.Embedding(num_embeddings, in_features) + self.linear = torch.nn.Linear(in_features, in_features) + self.lm_head = torch.nn.Linear(in_features, num_embeddings) + + if share_weights: + self.lm_head.weight = self.wte.weight + + def forward(self, input_ids): + x = self.wte(input_ids) + x = self.linear(x) + res = self.lm_head(x) + return res + + +def test_compress_weights(): + model = ShortTransformer(5, 10) + + compressed_model = compress_weights(model) + + n_compressed_weights = 0 + n_target_modules = 0 + + for _, module in compressed_model.named_children(): + if isinstance(module, (torch.nn.Linear, torch.nn.Embedding)): + n_target_modules += 1 + if module.weight.dtype in [torch.uint8, torch.int8]: + n_compressed_weights += 1 + + assert n_compressed_weights == n_target_modules + + +def test_compress_shared_weights(): + model = ShortTransformer(5, 10, share_weights=True) + + compressed_model = compress_weights(model) + + n_compressed_weights = 0 + n_target_modules = 0 + + for _, module in compressed_model.named_children(): + if isinstance(module, (torch.nn.Linear, torch.nn.Embedding)): + n_target_modules += 1 + if module.weight.dtype in [torch.uint8, torch.int8]: + n_compressed_weights += 1 + + assert n_compressed_weights == n_target_modules + + assert len(compressed_model.wte.pre_ops) > 0 + + assert len(compressed_model.wte.pre_ops) == len(compressed_model.lm_head.pre_ops) + + for key, val in compressed_model.wte.pre_ops.items(): + assert compressed_model.lm_head.get_pre_op(key) is val diff --git a/tests/torch/pytorch_patch_isolated.py b/tests/torch/pytorch_patch_isolated.py index a3622974fd8..049ddf653f1 100644 --- a/tests/torch/pytorch_patch_isolated.py +++ b/tests/torch/pytorch_patch_isolated.py @@ -1,15 +1,13 @@ -""" - Copyright (c) 2022 Intel Corporation - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import inspect import os @@ -50,3 +48,30 @@ def test_jit_if_tracing_script_source_equals(): "torch.jit.script", "script" ) assert torch_source == nncf_source_corrected + + +class DummyModel(torch.nn.Module): + def forward(self, x): + return x + + +@pytest.mark.skipif(ISOLATION_RUN_ENV_VAR not in os.environ, reason="Should be run via isolation proxy") +def test_jit_script_exception_preserves_patching_isolated(): + from nncf import NNCFConfig + from nncf.torch import create_compressed_model + + _, compressed_model = create_compressed_model( + DummyModel(), + NNCFConfig.from_dict( + {"input_info": {"sample_size": [1, 3, 32, 32]}, "compression": {"algorithm": "quantization"}} + ), + ) + + try: + torch.jit.script(compressed_model) # supposed to fail since torch.jit.script does not support NNCF models + except: # pylint:disable=bare-except + pass + + # torch.nn.Module.__call__ is one of the fundamental patched functions, if the code object points to NNCF code, + # then it means patching is still present + assert "nncf" in torch.nn.Module.__call__.__code__.co_filename diff --git a/tests/torch/quantization/extensions/isolated_cases.py b/tests/torch/quantization/extensions/isolated_cases.py index 66e23f6650d..ecad13c7bed 100644 --- a/tests/torch/quantization/extensions/isolated_cases.py +++ b/tests/torch/quantization/extensions/isolated_cases.py @@ -1,16 +1,13 @@ -""" - Copyright (c) 2022 Intel Corporation - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" - +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import os diff --git a/tests/torch/quantization/extensions/test_extension_unavailable.py b/tests/torch/quantization/extensions/test_extension_unavailable.py index ed9db3c35d2..b4ebbcdb94f 100644 --- a/tests/torch/quantization/extensions/test_extension_unavailable.py +++ b/tests/torch/quantization/extensions/test_extension_unavailable.py @@ -1,15 +1,14 @@ -""" - Copyright (c) 2022 Intel Corporation - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import pytest from tests.shared.isolation_runner import run_pytest_case_function_in_separate_process diff --git a/tests/torch/quantization/extensions/test_timeout_extension_loader.py b/tests/torch/quantization/extensions/test_timeout_extension_loader.py new file mode 100644 index 00000000000..f28f226045e --- /dev/null +++ b/tests/torch/quantization/extensions/test_timeout_extension_loader.py @@ -0,0 +1,45 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from pathlib import Path + +import pytest +import torch + +from nncf.torch.extensions import EXTENSION_LOAD_TIMEOUT_ENV_VAR +from nncf.torch.extensions import ExtensionLoaderTimeoutException +from nncf.torch.quantization.extensions import QuantizedFunctionsCPU +from nncf.torch.quantization.extensions import QuantizedFunctionsCUDA +from tests.shared.isolation_runner import ISOLATION_RUN_ENV_VAR +from tests.shared.isolation_runner import run_pytest_case_function_in_separate_process + + +@pytest.mark.skipif(ISOLATION_RUN_ENV_VAR not in os.environ, reason="Should be run via isolation proxy") +def test_timeout_extension_loader_isolated(tmp_path, use_cuda): + if not torch.cuda.is_available() and use_cuda is True: + pytest.skip("Skipping CUDA test cases for CPU only setups") + + quant_func = QuantizedFunctionsCUDA if use_cuda else QuantizedFunctionsCPU + + os.environ[EXTENSION_LOAD_TIMEOUT_ENV_VAR] = "1" + os.environ["TORCH_EXTENSIONS_DIR"] = tmp_path.as_posix() + + # pylint: disable=protected-access + build_dir = Path(quant_func._loader.get_build_dir()) + lock_file = build_dir / "lock" + lock_file.touch() + with pytest.raises(ExtensionLoaderTimeoutException): + quant_func.get("Quantize_forward") + + +def test_timeout_extension_loader(): + run_pytest_case_function_in_separate_process(test_timeout_extension_loader_isolated) diff --git a/tests/torch/quantization/test_adjust_padding.py b/tests/torch/quantization/test_adjust_padding.py index 9edc4b6c071..857332b0398 100644 --- a/tests/torch/quantization/test_adjust_padding.py +++ b/tests/torch/quantization/test_adjust_padding.py @@ -1,15 +1,14 @@ -""" - Copyright (c) 2023 Intel Corporation - Licensed under the Apache License, Version 2.0 (the 'License'); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an 'AS IS' BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import os from typing import List diff --git a/tests/torch/quantization/test_algo_quantization.py b/tests/torch/quantization/test_algo_quantization.py index 2617bc1d4ff..7b7e395cad6 100644 --- a/tests/torch/quantization/test_algo_quantization.py +++ b/tests/torch/quantization/test_algo_quantization.py @@ -16,7 +16,7 @@ import torch import torch.nn.functional as F import torch.utils.data -from pkg_resources import parse_version +from torch import autocast from torch import nn from torchvision.models import resnet50 from torchvision.models import squeezenet1_1 @@ -510,6 +510,10 @@ def test_quantize_outputs(): config["compression"]["quantize_outputs"] = True register_bn_adaptation_init_args(config) model, qctrl = create_compressed_model_and_algo_for_test(model, config) + # The quantizers below will not have been set up due to quantizer propagation, + # and no configuration can be determined for them from the HW config. The + # configuration is also missing in this case in the NNCFConfig, so will + # set up a quantizer with default config. REF_QUANTIZED_OUTPUT_MODULE_SCOPES = [ "QuantizeOutputsTestModel/NNCFConv2d[conv1]/conv2d_0|OUTPUT", "QuantizeOutputsTestModel/NNCFConv2d[conv2]/conv2d_0|OUTPUT", @@ -560,6 +564,44 @@ def test_quantize_outputs_with_scope_overrides(): assert isinstance(output_quantizers[0], AsymmetricQuantizer) +class IntermediateOutputModel(nn.Module): + """ + When quantized with "quantize_outputs": False (which is the default behaviour), + the activation quantizer of `conv2` shall not propagate to the output of `conv1`, + but shall stay as a pre-hook to the `conv2`, so as not to impact the + return value of `conv1` which is also an intermediate output of the model. + """ + + def __init__(self): + super().__init__() + self.conv1 = nn.Conv2d(in_channels=3, out_channels=3, kernel_size=1) + self.conv2 = nn.Conv2d(in_channels=3, out_channels=3, kernel_size=1) + + def forward(self, x): + x1 = self.conv1(x) + return x1, self.conv2(x1) + + +def test_intermediate_output_model(): + config = get_quantization_config_without_range_init() + config["input_info"] = [ + { + "sample_size": [2, 3, 32, 32], + } + ] + model = IntermediateOutputModel() + config["compression"]["quantize_outputs"] = False + register_bn_adaptation_init_args(config) + model, qctrl = create_compressed_model_and_algo_for_test(model, config) + activation_quantizer_scopes = [str(aq_id) for aq_id in qctrl.non_weight_quantizers] + assert Counter(activation_quantizer_scopes) == Counter( + [ + "/nncf_model_input_0|OUTPUT", # activation quantizer of conv1 + "IntermediateOutputModel/NNCFConv2d[conv2]/conv2d_0|INPUT0", + ] + ) # act. quant. of conv2 + + def test_debug_mode(): config = get_quantization_config_without_range_init() register_bn_adaptation_init_args(config) @@ -694,12 +736,6 @@ def test_quantization_can_be_run_with_no_data_loaders_if_zero_init_samples(): ) -if parse_version(torch.__version__).base_version <= parse_version("1.9.1").base_version: - from torch.cuda.amp import autocast -else: - from torch import autocast - - class TestHalfPrecisionModels: class RegularModel(torch.nn.Module): def __init__(self): @@ -718,7 +754,7 @@ def __init__(self): self.model = TestHalfPrecisionModels.RegularModel() def forward(self, x): - with autocast(): + with autocast(device_type="cuda" if x.is_cuda else "cpu"): y = self.model(x) return y @@ -785,16 +821,19 @@ def test_manual_partial_half_precision_model(self, initializing_config: NNCFConf # Should complete successfully, including init. compressed_model(inputs) - def test_external_autocast(self, initializing_config: NNCFConfig): + @pytest.mark.parametrize("device", ["cpu", "cuda"]) + def test_external_autocast(self, initializing_config: NNCFConfig, device: str): model = TestHalfPrecisionModels.RegularModel() inputs = torch.ones([1, 1, 1, 1]) - if torch.cuda.is_available(): + if device == "cuda": + if not torch.cuda.is_available(): + pytest.skip("CUDA not available") inputs = inputs.cuda() model = model.cuda() compressed_model, _ = create_compressed_model_and_algo_for_test(model, initializing_config) - with autocast(): + with autocast(device_type="cuda" if inputs.is_cuda else "cpu"): # Should complete successfully. result = compressed_model(inputs) if torch.is_autocast_enabled(): # For torch <= 1.9.1 and CPU the autocast context won't have effect diff --git a/tests/torch/quantization/test_hawq_precision_init.py b/tests/torch/quantization/test_hawq_precision_init.py index 5cf5a4a6bfc..155cdbee42d 100644 --- a/tests/torch/quantization/test_hawq_precision_init.py +++ b/tests/torch/quantization/test_hawq_precision_init.py @@ -23,7 +23,6 @@ import torch.utils.data from numpy.random import random_sample from torch import nn -from torch.utils import model_zoo from torchvision.models import resnet50 from torchvision.transforms import transforms @@ -455,8 +454,8 @@ def test_hawq_hw_vpu_config_e2e(_seed, dataset_dir, tmp_path): ( HAWQTestParams(200, 13, 100, 1.2741253547860323, 1.274125503581261), HAWQTestParams(2, 13, 100, 1.2646427814393832, 1.2646428162034615), - HAWQTestParams(2, 10, 10, 1.83052726021032, 1.8305243724338203), - HAWQTestParams(2, 10, 5, 1.830527260210321, 1.8305243724338203), + HAWQTestParams(2, 10, 10, 1.8305234709185931, 1.8305243724338203), + HAWQTestParams(2, 10, 5, 1.8305234709185931, 1.8305243724338203), ), ids=("until_threshold", "until_num_iter", "batch_eq_num_data", "batch_larger_num_data"), ) @@ -466,9 +465,10 @@ def test_hawq_on_single_conv_without_quantizers(_seed, dataset_dir, tmp_path, pa tolerance = 4e-4 model = squeezenet1_1(num_classes=10, dropout=0) - from torchvision.models.squeezenet import model_urls - load_state(model, model_zoo.load_url(model_urls["squeezenet1_1"])) + from torchvision.models import SqueezeNet1_1_Weights + + load_state(model, SqueezeNet1_1_Weights.IMAGENET1K_V1.get_state_dict(progress=False)) criterion = nn.CrossEntropyLoss() ref_trace = params.cpu_ref_trace rtol = 1e-5 diff --git a/tests/torch/quantization/test_logarithm_scale.py b/tests/torch/quantization/test_logarithm_scale.py index e249cd7959d..96cb6631ca9 100644 --- a/tests/torch/quantization/test_logarithm_scale.py +++ b/tests/torch/quantization/test_logarithm_scale.py @@ -15,6 +15,7 @@ import nncf from nncf import NNCFConfig +from nncf.torch.initialization import PTInitializingDataLoader from tests.torch.helpers import TwoConvTestModel from tests.torch.helpers import create_compressed_model_and_algo_for_test from tests.torch.helpers import register_bn_adaptation_init_args @@ -52,11 +53,11 @@ def __len__(self): data_loader = torch.utils.data.DataLoader(RandDatasetMock(), batch_size=1, shuffle=False, drop_last=True) - class SquadInitializingDataloader(nncf.torch.initialization.PTInitializingDataLoader): - def get_inputs(self, batch): - return batch, {} + class SquadInitializingDataloader(PTInitializingDataLoader): + def get_inputs(self, dataloader_output): + return dataloader_output, {} - def get_target(self, batch): + def get_target(self, dataloader_output): return None initializing_data_loader = SquadInitializingDataloader(data_loader) diff --git a/tests/torch/quantization/test_overflow_issue_export.py b/tests/torch/quantization/test_overflow_issue_export.py index fd408612c03..9898d55450b 100644 --- a/tests/torch/quantization/test_overflow_issue_export.py +++ b/tests/torch/quantization/test_overflow_issue_export.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import numpy as np import onnx import onnxruntime as rt diff --git a/tests/torch/quantization/test_sanity_sample.py b/tests/torch/quantization/test_sanity_sample.py index dd572ba166d..10909d57c59 100644 --- a/tests/torch/quantization/test_sanity_sample.py +++ b/tests/torch/quantization/test_sanity_sample.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from abc import ABC from pathlib import Path from typing import Dict diff --git a/tests/torch/quantization/test_serialize_to_json.py b/tests/torch/quantization/test_serialize_to_json.py index 98b5d19b87f..29bd4e2a11b 100644 --- a/tests/torch/quantization/test_serialize_to_json.py +++ b/tests/torch/quantization/test_serialize_to_json.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import json import torch diff --git a/tests/torch/quantization/test_solver_quantization_traits.py b/tests/torch/quantization/test_solver_quantization_traits.py index a3df48f3a51..120a4107e4c 100644 --- a/tests/torch/quantization/test_solver_quantization_traits.py +++ b/tests/torch/quantization/test_solver_quantization_traits.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from nncf.common.graph import INPUT_NOOP_METATYPES from nncf.common.graph import OUTPUT_NOOP_METATYPES from nncf.common.quantization.quantizer_propagation.graph import QuantizerPropagationStateGraph as QPSG diff --git a/tests/torch/quantization/test_strip.py b/tests/torch/quantization/test_strip.py index c768adb9d31..0b18f14c207 100644 --- a/tests/torch/quantization/test_strip.py +++ b/tests/torch/quantization/test_strip.py @@ -16,6 +16,7 @@ import torch from torch.quantization.fake_quantize import FakeQuantize +import nncf from nncf.common.quantization.quantizers import calculate_asymmetric_level_ranges from nncf.common.quantization.quantizers import calculate_symmetric_level_ranges from nncf.common.quantization.quantizers import get_num_levels @@ -110,17 +111,16 @@ def range_mode_to_args(range_mode: str) -> Tuple[bool, bool]: @pytest.mark.parametrize("input_size", INPUT_TEST_SCALES, ids=_idfn) -@pytest.mark.parametrize("num_bits", (4, 8), ids=("4-bits", "8-bits")) @pytest.mark.parametrize("range_mode", ["full_range", "half_range", "narrow_range"]) -def test_converting_symmetric_quantizer( - input_size, num_bits, is_per_channel, is_weights, range_mode, is_signed, use_cuda -): +def test_converting_symmetric_quantizer(input_size, is_per_channel, is_weights, range_mode, is_signed, use_cuda): if not torch.cuda.is_available() and use_cuda is True: pytest.skip("Skipping CUDA test cases for CPU only setups") if is_per_channel and input_size[0 if is_weights else 1] == 1: pytest.skip("Same case as for per_tensor case") + num_bits = 8 + is_half_range, narrow_range = range_mode_to_args(range_mode) np.random.seed(42) @@ -194,8 +194,7 @@ def test_converting_symmetric_quantizer( @pytest.mark.parametrize("input_size", INPUT_TEST_SCALES, ids=_idfn) -@pytest.mark.parametrize("num_bits", (4, 8), ids=("4-bits", "8-bits")) -def test_converting_asymmetric_quantizer(input_size, num_bits, is_per_channel, is_weights, is_half_range, use_cuda): +def test_converting_asymmetric_quantizer(input_size, is_per_channel, is_weights, is_half_range, use_cuda): if not torch.cuda.is_available() and use_cuda is True: pytest.skip("Skipping CUDA test cases for CPU only setups") @@ -203,6 +202,7 @@ def test_converting_asymmetric_quantizer(input_size, num_bits, is_per_channel, i pytest.skip("Same case as for per_tensor case") np.random.seed(42) + num_bits = 8 real_num_bits = num_bits - 1 if is_half_range else num_bits input_low, input_range = generate_random_low_and_range_by_input_size(input_size, is_per_channel, is_weights) @@ -277,8 +277,8 @@ def test_converting_asymmetric_quantizer(input_size, num_bits, is_per_channel, i @pytest.mark.parametrize("mode", ("asymmetric", "symmetric")) @pytest.mark.parametrize("overflow_fix", ("disable", "enable"), ids=("overflow_fix_disable", "overflow_fix_enable")) -@pytest.mark.parametrize("num_bits", (4, 8), ids=("4-bits", "8-bits")) -def test_strip_quantization(mode, overflow_fix, num_bits, tmp_path): +def test_strip_quantization(mode, overflow_fix, tmp_path): + num_bits = 8 model = BasicConvTestModel() config = _get_config_for_algo(model.INPUT_SIZE, mode, overflow_fix, bits=num_bits) @@ -294,9 +294,7 @@ def test_strip_quantization(mode, overflow_fix, num_bits, tmp_path): assert torch.all(torch.isclose(x_nncf, x_torch)), f"{x_nncf.view(-1)} != {x_torch.view(-1)}" - if num_bits == 8: - # ONNX export only supports 8 bits - torch.onnx.export(inference_model, input_tensor, f"{tmp_path}/model.onnx") + torch.onnx.export(inference_model, input_tensor, f"{tmp_path}/model.onnx") @pytest.mark.parametrize("do_copy", (True, False)) @@ -314,3 +312,21 @@ def test_do_copy(do_copy): assert id(inference_model) == id(compressed_model) assert id(compressed_model) == id(compression_ctrl.model) + + +@pytest.mark.parametrize("strip_type", ("nncf", "torch", "nncf_interfere")) +def test_nncf_strip_api(strip_type): + model = BasicConvTestModel() + config = _get_config_for_algo(model.INPUT_SIZE) + + quantized_model, _ = create_compressed_model_and_algo_for_test(model, config) + + if strip_type == "nncf": + strip_model = nncf.strip(quantized_model) + elif strip_type == "torch": + strip_model = nncf.torch.strip(quantized_model) + elif strip_type == "nncf_interfere": + strip_model = quantized_model.nncf.strip() + + fq = strip_model.conv.get_pre_op("0").op + assert isinstance(fq, FakeQuantize) diff --git a/tests/torch/quantization/test_tracing.py b/tests/torch/quantization/test_tracing.py new file mode 100644 index 00000000000..06068f6b5e4 --- /dev/null +++ b/tests/torch/quantization/test_tracing.py @@ -0,0 +1,95 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import torch +from torch import nn + +from nncf.common.quantization.structs import QuantizationMode +from nncf.torch.quantization.layers import AsymmetricQuantizer +from nncf.torch.quantization.layers import PTQuantizerSpec +from nncf.torch.quantization.layers import SymmetricQuantizer + + +class TestModel(nn.Module): + def __init__(self, fq) -> None: + super().__init__() + self.fq = fq + + def forward(self, x): + return self.fq(x) + + +def check_fq_op(traced_graph: nn.Module, is_per_channel: bool): + aten_op = "aten::fake_quantize_per_channel_affine" if is_per_channel else "aten::fake_quantize_per_tensor_affine" + is_fq_node = False + for graph_node in traced_graph.inlined_graph.nodes(): + if graph_node.kind() == "prim::PythonOp": + if "Subgraph" in graph_node.attributeNames(): + subgraph = getattr(graph_node, graph_node.kindOf("Subgraph"))("Subgraph") + for subgraph_node in subgraph.nodes(): + if subgraph_node.kind() == aten_op: + is_fq_node = True + break + if is_fq_node: + break + + assert is_fq_node, "FQ operation is not found in the traced graph" + + +def test_trace_asymmetric_quantizer(is_per_channel): + if is_per_channel: + input_low = torch.tensor([-0.1, 0.1]).reshape(1, 2, 1, 1) + input_range = torch.tensor([0.3, 0.4]).reshape(1, 2, 1, 1) + else: + input_low = torch.tensor([-0.1]) + input_range = torch.tensor([1.1]) + + qspec = PTQuantizerSpec( + num_bits=8, + mode=QuantizationMode.ASYMMETRIC, + signedness_to_force=False, + narrow_range=False, + scale_shape=tuple(input_low.shape), + logarithm_scale=False, + half_range=False, + is_quantized_on_export=True, + ) + quantizer = AsymmetricQuantizer(qspec) + quantizer.input_low.data = input_low + quantizer.input_range.data = input_range + + model = TestModel(quantizer) + traced = torch.jit.trace(model, torch.ones(1, 2, 1, 1)) + check_fq_op(traced, is_per_channel) + + +def test_trace_symmetric_quantizer(is_per_channel, is_signed): + if is_per_channel: + scale = torch.tensor([0.3, 0.4]).reshape(1, 2, 1, 1) + else: + scale = torch.tensor([1.1]) + + qspec = PTQuantizerSpec( + num_bits=8, + mode=QuantizationMode.SYMMETRIC, + signedness_to_force=False, + narrow_range=False, + scale_shape=tuple(scale.shape), + logarithm_scale=False, + half_range=False, + is_quantized_on_export=True, + ) + quantizer = SymmetricQuantizer(qspec) + quantizer.scale.data = scale + quantizer.signed = is_signed + + model = TestModel(quantizer) + traced = torch.jit.trace(model, torch.ones(1, 2, 1, 1)) + check_fq_op(traced, is_per_channel) diff --git a/tests/torch/quantization/test_unified_scales.py b/tests/torch/quantization/test_unified_scales.py index 5ab36f23602..78f9ede3656 100644 --- a/tests/torch/quantization/test_unified_scales.py +++ b/tests/torch/quantization/test_unified_scales.py @@ -1,14 +1,14 @@ -""" - Copyright (c) Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import itertools from collections import Counter from functools import partial diff --git a/tests/torch/requirements.txt b/tests/torch/requirements.txt index e19c77c40c4..aaff3722820 100644 --- a/tests/torch/requirements.txt +++ b/tests/torch/requirements.txt @@ -1,18 +1,19 @@ yattag>=1.14.0 prettytable>=2.0.0 -onnx>=1.8.0 -onnxruntime==1.6.0 +onnx==1.13.1 +onnxruntime==1.14.1 pytest-mock>=3.3.1 +pytest-cov pytest-dependency>=0.5.1 virtualenv # Ticket 69520 pyparsing<3.0 -# Required for search_buidling_blocks tests -transformers==4.23.1 +# Required for search_building_blocks tests +transformers[torch]~=4.30.0 # Required for movement_sparsity tests datasets~=2.12.0 evaluate==0.3.0 -timm==0.6.13 -openvino-dev==2023.0.0 +timm==0.9.2 +openvino-dev==2023.0.1 diff --git a/tests/torch/run_examples_for_test_sota.py b/tests/torch/run_examples_for_test_sota.py index 5c833184fd3..aba0837f05b 100644 --- a/tests/torch/run_examples_for_test_sota.py +++ b/tests/torch/run_examples_for_test_sota.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import sys from examples.torch.classification import main as cls_main diff --git a/tests/torch/sparsity/movement/helpers/utils.py b/tests/torch/sparsity/movement/helpers/utils.py index ba1668053e0..c56dd1f6e67 100644 --- a/tests/torch/sparsity/movement/helpers/utils.py +++ b/tests/torch/sparsity/movement/helpers/utils.py @@ -37,7 +37,12 @@ def mock_linear_nncf_node( ) -> NNCFNode: graph = NNCFGraph() linear = graph.add_nncf_node( - node_name, "linear", Mock(), LinearLayerAttributes(True, in_features, out_features, bias=bias) + node_name, + "linear", + Mock(), + LinearLayerAttributes( + weight_requires_grad=True, in_features=in_features, out_features=out_features, with_bias=bias + ), ) return linear diff --git a/tests/torch/sparsity/movement/test_model_saving.py b/tests/torch/sparsity/movement/test_model_saving.py index 8bca056a3a0..29ce64246ca 100644 --- a/tests/torch/sparsity/movement/test_model_saving.py +++ b/tests/torch/sparsity/movement/test_model_saving.py @@ -25,7 +25,7 @@ from openvino.tools.mo.back.offline_transformations import apply_fused_names_cleanup from openvino.tools.mo.back.offline_transformations import apply_moc_transformations from openvino.tools.mo.back.offline_transformations import apply_user_transformations -from pkg_resources import parse_version +from packaging import version from scipy.special import softmax from transformers.trainer_utils import PREFIX_CHECKPOINT_DIR @@ -78,7 +78,7 @@ def test_no_weight_override_on_export(self, tmp_path): PTTensorListComparator.check_equal(list(state_before.values()), list(state_after.values())) @pytest.mark.skipif( - parse_version(torch.__version__) < parse_version("1.12"), + version.parse(torch.__version__) < version.parse("1.12"), reason=f"torch {torch.__version__} is not compatible with installed transformers package. " f"Some tests may fail with segmentation fault", ) @@ -126,7 +126,7 @@ def test_same_outputs_in_torch_and_exported_onnx(self, tmp_path: Path, recipe: B assert np.allclose(softmax(onnx_outputs, axis=-1), softmax(torch_outputs, axis=-1), atol=1e-6) @pytest.mark.skipif( - parse_version(torch.__version__) < parse_version("1.12"), + version.parse(torch.__version__) < version.parse("1.12"), reason=f"torch {torch.__version__} is not compatible with installed transformers package. " f"Some tests may fail with segmentation fault", ) @@ -269,7 +269,7 @@ def test_ngraph_pruning(self, tmp_path: Path, desc: dict): ), f"IR's size ratio: 1 - {pruned_file_bytes}/{not_pruned_file_bytes}" if abs(desc.ov_weight_ratio - desc.nncf_weight_ratio) >= 0.15: pytest.skip("Known issue in the ngraph transformation") - assert abs(file_size_ratio - compression_rate) < 0.15 + assert abs(file_size_ratio - compression_rate) < 0.152 # used to be 0.15 before OV 2023.1.0 def _get_onnx_model_inference_outputs(self, onnx_model_path: str, dataset: Dataset, recipe: BaseMockRunRecipe): sess = onnxruntime.InferenceSession(onnx_model_path) diff --git a/tests/torch/sparsity/movement/test_structured_mask.py b/tests/torch/sparsity/movement/test_structured_mask.py index d2cba30dd6a..00e41e7354e 100644 --- a/tests/torch/sparsity/movement/test_structured_mask.py +++ b/tests/torch/sparsity/movement/test_structured_mask.py @@ -17,7 +17,7 @@ import pandas as pd import pytest import torch -from pkg_resources import parse_version +from packaging import version from nncf.common.logging import nncf_logger from nncf.config import NNCFConfig @@ -169,7 +169,7 @@ def test_structured_mask_setter_with_wrong_shape(self, is_dependent_mask: bool): setattr(ctx, mask_name, torch.ones(2)) @pytest.mark.skipif( - parse_version(torch.__version__) < parse_version("1.12"), + version.parse(torch.__version__) < version.parse("1.12"), reason=f"torch {torch.__version__} may not compatible with installed transformers package. " f"Some tests may fail with error", ) diff --git a/tests/torch/sparsity/movement/test_training.py b/tests/torch/sparsity/movement/test_training.py index 8302c468493..41b12a257c2 100644 --- a/tests/torch/sparsity/movement/test_training.py +++ b/tests/torch/sparsity/movement/test_training.py @@ -17,7 +17,7 @@ import jstyleson as json import pytest import torch.cuda -from pkg_resources import parse_version +from packaging import version from pytest import approx from tests.shared.paths import PROJECT_ROOT @@ -317,7 +317,7 @@ def test_compression_movement_long_train(self, movement_desc_long: MovementTrain self._validate_train_metric(movement_desc_long) @pytest.mark.skipif( - parse_version(torch.__version__) < parse_version("1.12"), + version.parse(torch.__version__) < version.parse("1.12"), reason=f"torch {torch.__version__} may not compatible with installed transformers package. " f"Some tests may fail with error", ) diff --git a/tests/torch/sparsity/movement/test_training_with_third_party.py b/tests/torch/sparsity/movement/test_training_with_third_party.py index df67f5b5251..6286cb6ad07 100644 --- a/tests/torch/sparsity/movement/test_training_with_third_party.py +++ b/tests/torch/sparsity/movement/test_training_with_third_party.py @@ -37,13 +37,13 @@ def setup(self, temp_folder): self.env = TransformersVirtualEnvInstaller(temp_folder["venv"], temp_folder["repo"]) @pytest.mark.dependency(name="install_transformers") - def test_install_transformers_env(self, third_party, pip_cache_dir, torch_with_cuda11): + def test_install_transformers_env(self, third_party, pip_cache_dir): if not third_party: pytest.skip( "Skip tests of movement sparsity with patched transformers package " "since `--third-party-sanity` is False." ) - self.env.install_env(pip_cache_dir, torch_with_cuda11) + self.env.install_env(pip_cache_dir) @pytest.mark.dependency(depends=["install_transformers"], name="glue_movement_train") def test_movement_glue_train(self): diff --git a/tests/torch/sparsity/movement/training_scripts/run_glue.py b/tests/torch/sparsity/movement/training_scripts/run_glue.py index 3a983550c77..09a6bf6c737 100644 --- a/tests/torch/sparsity/movement/training_scripts/run_glue.py +++ b/tests/torch/sparsity/movement/training_scripts/run_glue.py @@ -16,6 +16,7 @@ import evaluate import jstyleson import numpy as np +from transformers.training_args import ParallelMode # isort: off from nncf import NNCFConfig @@ -122,7 +123,11 @@ def __init__( self._compression_callback = CompressionCallback(compression_ctrl) callbacks = [self._compression_callback] + (callbacks or []) super().__init__(callbacks=callbacks, *args, **kwargs) - if not (self.args.local_rank == -1 or self.args.no_cuda or compression_ctrl is None): + if ( + self.args.parallel_mode == ParallelMode.DISTRIBUTED + and not self.args.no_cuda + and compression_ctrl is not None + ): compression_ctrl.distributed() def compute_loss(self, model, inputs, return_outputs=False): diff --git a/tests/torch/test_algo_common.py b/tests/torch/test_algo_common.py index 9253ac30efb..ba405187237 100644 --- a/tests/torch/test_algo_common.py +++ b/tests/torch/test_algo_common.py @@ -22,6 +22,7 @@ from nncf import NNCFConfig from nncf.api.compression import CompressionStage +from nncf.config.schemata.defaults import VALIDATE_SCOPES from nncf.torch.algo_selector import PT_COMPRESSION_ALGORITHMS from nncf.torch.compression_method_api import DOMAIN_CUSTOM_OPS_NAME from tests.torch.helpers import BasicConvTestModel @@ -394,20 +395,36 @@ def test_compression_loss_gpu_device_compatibility(config): compression_ctrl.loss() -NOT_SUPPORT_SCOPES_ALGO = ["knowledge_distillation"] +NOT_SUPPORT_SCOPES_ALGO = ["knowledge_distillation", "NoCompressionAlgorithm"] @pytest.mark.parametrize("algo_name", PT_COMPRESSION_ALGORITHMS.registry_dict.keys() - NOT_SUPPORT_SCOPES_ALGO) -def test_raise_runtimeerror_for_not_matched_scope_names(algo_name): - if algo_name == "NoCompressionAlgorithm": - pytest.skip() +@pytest.mark.parametrize("validate_scopes", (True, False, None)) +def test_raise_runtimeerror_for_not_matched_scope_names(algo_name, validate_scopes): model = BasicLinearTestModel() config = ConfigCreator().add_algo(algo_name).create() config["compression"][0]["ignored_scopes"] = ["unknown"] - with pytest.raises(RuntimeError) as exc_info: + if algo_name == "movement_sparsity": + config["compression"][0]["params"] = { + "warmup_start_epoch": 1, + "warmup_end_epoch": 3, + "enable_structured_masking": False, + "init_importance_threshold": -0.1, + "final_importance_threshold": 0.0, + "importance_regularization_factor": 0.2, + "power": 3, + "steps_per_epoch": 4, + } + + if validate_scopes is not None: + config["compression"][0]["validate_scopes"] = validate_scopes + + if validate_scopes or (validate_scopes is None and VALIDATE_SCOPES is True): + with pytest.raises(RuntimeError, match="scope definitions"): + create_compressed_model_and_algo_for_test(model, config) + else: create_compressed_model_and_algo_for_test(model, config) - assert "No match has been found among the model" in str(exc_info.value) @pytest.mark.parametrize( diff --git a/tests/torch/test_api_behavior.py b/tests/torch/test_api_behavior.py index 79130dbb201..709d84a5734 100644 --- a/tests/torch/test_api_behavior.py +++ b/tests/torch/test_api_behavior.py @@ -15,6 +15,7 @@ from nncf import NNCFConfig from nncf.common.quantization.quantizer_setup import SingleConfigQuantizerSetup +from nncf.torch import create_compressed_model from nncf.torch import register_default_init_args from nncf.torch.tensor_statistics.algo import TensorStatisticsCollectionBuilder from nncf.torch.tensor_statistics.algo import TensorStatisticsCollectionController @@ -22,6 +23,7 @@ from tests.torch.helpers import OnesDatasetMock from tests.torch.helpers import TwoConvTestModel from tests.torch.helpers import create_compressed_model_and_algo_for_test +from tests.torch.test_nncf_network import SimplestModel INPUT_SAMPLE_SIZE = [1, 1, 4, 4] CONFIG_WITH_ALL_INIT_TYPES = { @@ -126,3 +128,11 @@ def test_model_is_inited_with_own_device_by_default(nncf_config_with_default_ini pytest.skip("Skipping for CPU-only setups") model = DeviceCheckingModel(original_device) create_compressed_model_and_algo_for_test(model, nncf_config_with_default_init_args) + + +def test_repeat_compression_fails(): + model = SimplestModel() + nncf_config = NNCFConfig.from_dict({"input_info": {"sample_size": SimplestModel.INPUT_SIZE}}) + _ = create_compressed_model(model, nncf_config) + with pytest.raises(RuntimeError, match="The model object has already been compressed."): + _ = create_compressed_model(model, nncf_config) diff --git a/tests/torch/test_backward_compat.py b/tests/torch/test_backward_compat.py index 4e141d0e6a5..faebe58f4b4 100644 --- a/tests/torch/test_backward_compat.py +++ b/tests/torch/test_backward_compat.py @@ -217,39 +217,39 @@ def forward(self, x): sd_without_nncf_bn_wrapping = { - "nncf_module.conv.weight": torch.empty([9, 3, 3, 3]), - "nncf_module.conv.bias": torch.empty([9]), - "nncf_module.conv.nncf_padding_value": torch.empty([1]), - "nncf_module.conv.pre_ops.0.op._num_bits": torch.empty([1]), - "nncf_module.conv.pre_ops.0.op.signed_tensor": torch.empty([1]), - "nncf_module.conv.pre_ops.0.op.enabled": torch.empty([1]), - "nncf_module.conv.pre_ops.0.op.scale": torch.empty([9, 1, 1, 1]), - "nncf_module.bn.weight": torch.empty([9]), - "nncf_module.bn.bias": torch.empty([9]), - "nncf_module.bn.running_mean": torch.empty([9]), - "nncf_module.bn.running_var": torch.empty([9]), - "nncf_module.bn.num_batches_tracked": torch.empty([]), - "nncf_module.conv1.weight": torch.empty([3, 9, 3, 3]), - "nncf_module.conv1.bias": torch.empty([3]), - "nncf_module.conv1.nncf_padding_value": torch.empty([1]), - "nncf_module.conv1.pre_ops.0.op._num_bits": torch.empty([1]), - "nncf_module.conv1.pre_ops.0.op.signed_tensor": torch.empty([1]), - "nncf_module.conv1.pre_ops.0.op.enabled": torch.empty([1]), - "nncf_module.conv1.pre_ops.0.op.scale": torch.empty([3, 1, 1, 1]), - "nncf_module.bn1.weight": torch.empty([3]), - "nncf_module.bn1.bias": torch.empty([3]), - "nncf_module.bn1.running_mean": torch.empty([3]), - "nncf_module.bn1.running_var": torch.empty([3]), - "nncf_module.bn1.num_batches_tracked": torch.empty([]), - "external_quantizers./nncf_model_input_0|OUTPUT._num_bits": torch.empty([1]), - "external_quantizers./nncf_model_input_0|OUTPUT.signed_tensor": torch.empty([1]), - "external_quantizers./nncf_model_input_0|OUTPUT.enabled": torch.empty([1]), - "external_quantizers./nncf_model_input_0|OUTPUT.scale": torch.empty([1]), + "nncf_module.conv.weight": torch.ones([9, 3, 3, 3]), + "nncf_module.conv.bias": torch.ones([9]), + "nncf_module.conv.nncf_padding_value": torch.ones([1]), + "nncf_module.conv.pre_ops.0.op._num_bits": torch.ones([1]), + "nncf_module.conv.pre_ops.0.op.signed_tensor": torch.ones([1]), + "nncf_module.conv.pre_ops.0.op.enabled": torch.ones([1]), + "nncf_module.conv.pre_ops.0.op.scale": torch.ones([9, 1, 1, 1]), + "nncf_module.bn.weight": torch.ones([9]), + "nncf_module.bn.bias": torch.ones([9]), + "nncf_module.bn.running_mean": torch.ones([9]), + "nncf_module.bn.running_var": torch.ones([9]), + "nncf_module.bn.num_batches_tracked": torch.ones([]), + "nncf_module.conv1.weight": torch.ones([3, 9, 3, 3]), + "nncf_module.conv1.bias": torch.ones([3]), + "nncf_module.conv1.nncf_padding_value": torch.ones([1]), + "nncf_module.conv1.pre_ops.0.op._num_bits": torch.ones([1]), + "nncf_module.conv1.pre_ops.0.op.signed_tensor": torch.ones([1]), + "nncf_module.conv1.pre_ops.0.op.enabled": torch.ones([1]), + "nncf_module.conv1.pre_ops.0.op.scale": torch.ones([3, 1, 1, 1]), + "nncf_module.bn1.weight": torch.ones([3]), + "nncf_module.bn1.bias": torch.ones([3]), + "nncf_module.bn1.running_mean": torch.ones([3]), + "nncf_module.bn1.running_var": torch.ones([3]), + "nncf_module.bn1.num_batches_tracked": torch.ones([]), + "external_quantizers./nncf_model_input_0|OUTPUT._num_bits": torch.ones([1]), + "external_quantizers./nncf_model_input_0|OUTPUT.signed_tensor": torch.ones([1]), + "external_quantizers./nncf_model_input_0|OUTPUT.enabled": torch.ones([1]), + "external_quantizers./nncf_model_input_0|OUTPUT.scale": torch.ones([1]), # Old bn layer names: ||||||||||| - "external_quantizers.ConvBNLayer/BatchNorm2d[bn]/batch_norm_0|OUTPUT._num_bits": torch.empty([1]), - "external_quantizers.ConvBNLayer/BatchNorm2d[bn]/batch_norm_0|OUTPUT.signed_tensor": torch.empty([1]), - "external_quantizers.ConvBNLayer/BatchNorm2d[bn]/batch_norm_0|OUTPUT.enabled": torch.empty([1]), - "external_quantizers.ConvBNLayer/BatchNorm2d[bn]/batch_norm_0|OUTPUT.scale": torch.empty([1]), + "external_quantizers.ConvBNLayer/BatchNorm2d[bn]/batch_norm_0|OUTPUT._num_bits": torch.ones([1]), + "external_quantizers.ConvBNLayer/BatchNorm2d[bn]/batch_norm_0|OUTPUT.signed_tensor": torch.ones([1]), + "external_quantizers.ConvBNLayer/BatchNorm2d[bn]/batch_norm_0|OUTPUT.enabled": torch.ones([1]), + "external_quantizers.ConvBNLayer/BatchNorm2d[bn]/batch_norm_0|OUTPUT.scale": torch.ones([1]), } compression_state_without_bn_wrapping = { diff --git a/tests/torch/test_compressed_graph.py b/tests/torch/test_compressed_graph.py index c1498bc5219..d2a1c725af5 100644 --- a/tests/torch/test_compressed_graph.py +++ b/tests/torch/test_compressed_graph.py @@ -9,6 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import itertools import os from abc import ABC from abc import abstractmethod @@ -66,8 +67,10 @@ from tests.torch.test_models.synthetic import MMDivConv from tests.torch.test_models.synthetic import ModelWithDummyParameter from tests.torch.test_models.synthetic import MultiOutputSameTensorModel +from tests.torch.test_models.synthetic import OrdinaryModelWithRecurrentInName from tests.torch.test_models.synthetic import PoolUnPool from tests.torch.test_models.synthetic import ReshapeModel +from tests.torch.test_models.synthetic import ShiftScaleParametrized from tests.torch.test_models.synthetic import TransposeModel @@ -575,6 +578,21 @@ def forward(self, x): return TestModel(self.tensor_method, **self.model_kwargs) +shift_scale_models = [] +params_combinations = list(itertools.product([True, False], repeat=2)) + + +for pair in params_combinations: + names = ["is_single_input", "use_normalize"] + kwargs = dict(zip(names, pair)) + desc = GeneralModelDesc( + model_name=ShiftScaleParametrized.get_name(**kwargs), + model_builder=partial(ShiftScaleParametrized, **kwargs), + input_sample_sizes=(ShiftScaleParametrized.INPUT_SIZES), + ) + shift_scale_models.append(desc) + + TWO_INT_INPUTS_INFO = [{"sample_size": [1], "type": "long"}, {"sample_size": [1], "type": "long"}] SYNTHETIC_MODEL_DESC_LIST = [ SingleLayerModelDesc(layer=nn.Conv1d(1, 1, 1), input_sample_sizes=[1, 1, 1]), @@ -732,6 +750,12 @@ def forward(self, x): wrap_inputs_fn=partial(n_inputs_fn, nargs=3), ), GeneralModelDesc(model_builder=MHA_single_input, input_sample_sizes=(MHA_single_input.INPUT_SIZES,)), + GeneralModelDesc( + model_name="OrdinaryModelWithRecurrentInName", + model_builder=OrdinaryModelWithRecurrentInName, + input_sample_sizes=([1, 1, 2, 2]), + ), + *shift_scale_models, ] @@ -739,12 +763,15 @@ def forward(self, x): "synthetic_model_desc", SYNTHETIC_MODEL_DESC_LIST, ids=[m.model_name for m in SYNTHETIC_MODEL_DESC_LIST] ) def test_synthetic_model_quantization(synthetic_model_desc: IModelDesc): + model = synthetic_model_desc.get_model() + if isinstance(model, MultiOutputSameTensorModel): + pytest.xfail("The MultiOutputSameTensorModel is skipped, ticket 110944.") + config = get_basic_quantization_config( input_sample_sizes=synthetic_model_desc.get_input_sample_sizes(), input_info=synthetic_model_desc.input_info ) register_bn_adaptation_init_args(config) - model = synthetic_model_desc.get_model() compressed_model, _ = create_compressed_model_and_algo_for_test( model, config, wrap_inputs_fn=synthetic_model_desc.get_wrap_inputs_fn() ) diff --git a/tests/torch/test_compression_training.py b/tests/torch/test_compression_training.py index 35e8e37f25c..a58b2563caa 100644 --- a/tests/torch/test_compression_training.py +++ b/tests/torch/test_compression_training.py @@ -1,15 +1,13 @@ -""" - Copyright (c) 2019-2023 Intel Corporation - Licensed under the Apache License, Version 2.0 (the 'License'); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an 'AS IS' BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import json import os @@ -321,7 +319,7 @@ def _get_weight_path(self, weekly_models_path): NASTrainingTestDescriptor() .real_dataset("cifar10") .config_name("mobilenet_v2_nas_SMALL.json") - .expected_accuracy(80.95) + .expected_accuracy(85.1) .subnet_expected_accuracy(88.67) .weights_filename("mobilenet_v2_cifar10_93.91.pth") .absolute_tolerance_train(1.0) @@ -330,7 +328,7 @@ def _get_weight_path(self, weekly_models_path): .real_dataset("cifar10") .config_name("resnet50_nas_SMALL.json") .subnet_expected_accuracy(88.67) - .expected_accuracy(87.25) + .expected_accuracy(85.19) .weights_filename("resnet50_cifar10_93.65.pth") .absolute_tolerance_train(2.0) .absolute_tolerance_eval(2e-2), diff --git a/tests/torch/test_config_schema.py b/tests/torch/test_config_schema.py index cc833aa872b..7b4bf9a5534 100644 --- a/tests/torch/test_config_schema.py +++ b/tests/torch/test_config_schema.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from collections import namedtuple from pathlib import Path from typing import List diff --git a/tests/torch/test_context_independence.py b/tests/torch/test_context_independence.py index cf92f3616de..b7d4a38e1ec 100644 --- a/tests/torch/test_context_independence.py +++ b/tests/torch/test_context_independence.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import os import pytest diff --git a/tests/torch/test_custom_modules.py b/tests/torch/test_custom_modules.py index afe2fe495bf..1fe0e4c7a64 100644 --- a/tests/torch/test_custom_modules.py +++ b/tests/torch/test_custom_modules.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import torch import torch.nn.functional diff --git a/tests/torch/test_distributed_data_parallel_mode.py b/tests/torch/test_distributed_data_parallel_mode.py index a82ff824d13..edd9a7a0eea 100644 --- a/tests/torch/test_distributed_data_parallel_mode.py +++ b/tests/torch/test_distributed_data_parallel_mode.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import time from typing import Tuple diff --git a/tests/torch/test_extensions_build.py b/tests/torch/test_extensions_build.py index d1e97b462e2..149bb5178ea 100644 --- a/tests/torch/test_extensions_build.py +++ b/tests/torch/test_extensions_build.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import os import pathlib import shutil diff --git a/tests/torch/test_frozen_layers.py b/tests/torch/test_frozen_layers.py index aa05eafe9b4..26704dad0a3 100644 --- a/tests/torch/test_frozen_layers.py +++ b/tests/torch/test_frozen_layers.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import pytest from nncf import NNCFConfig diff --git a/tests/torch/test_graph_analysis.py b/tests/torch/test_graph_analysis.py index 275947a97d6..6a3e1615b70 100644 --- a/tests/torch/test_graph_analysis.py +++ b/tests/torch/test_graph_analysis.py @@ -57,6 +57,7 @@ def make_mock_edge( output_port_id=output_port_id, tensor_shape=[1, 1, 1, 1], dtype=Dtype.FLOAT, + parallel_input_port_ids=[], ) def get_node(name: NNCFNodeName): diff --git a/tests/torch/test_graph_building.py b/tests/torch/test_graph_building.py index 44e6fe51442..11fa4a213f3 100644 --- a/tests/torch/test_graph_building.py +++ b/tests/torch/test_graph_building.py @@ -42,6 +42,7 @@ from nncf.torch.graph.operator_metatypes import PTGatherMetatype from nncf.torch.graph.operator_metatypes import PTReshapeMetatype from nncf.torch.graph.operator_metatypes import PTSplitMetatype +from nncf.torch.graph.operator_metatypes import PTSqueezeMetatype from nncf.torch.graph.operator_metatypes import PTTransposeMetatype from tests.torch.helpers import create_compressed_model_and_algo_for_test from tests.torch.helpers import register_bn_adaptation_init_args @@ -308,7 +309,7 @@ def test_reshape_attributes_saved_during_graph_building(input_shape): } for node in graph.get_all_nodes(): - if node.metatype is PTReshapeMetatype: + if node.metatype in [PTReshapeMetatype, PTSqueezeMetatype]: assert node.node_name in reshape_nodes_with_attributes if isinstance(node.layer_attributes, ReshapeLayerAttributes): ref_attrs = reshape_nodes_with_attributes[node.node_name] @@ -354,7 +355,7 @@ def test_permute_attributes_saved_during_graph_building(input_shape): "ModelWithPermute/transpose_1": TransposeLayerAttributes(1, 3), "ModelWithPermute/transpose_2": TransposeLayerAttributes(1, 3), "ModelWithPermute/permute_0": PermuteLayerAttributes((3, 2, 1, 0)), - "ModelWithPermute/permute_1": PermuteLayerAttributes((3, 2, 1, 0)), + "ModelWithPermute/permute_1": PermuteLayerAttributes([3, 2, 1, 0]), } for node in graph.get_all_nodes(): @@ -734,3 +735,23 @@ def test_integer_path_marking(): def test_trace_output_with_no_tensors(): output = None trace_tensors(output, MagicMock()) + + +class ModelWithRepeatInputs(torch.nn.Module): + def forward(self, x): + y = x * 2 + return torch.stack([x, y, x, y]) + + +def test_dynamic_graph_assigns_contiguous_input_ports_for_edges_with_multiplicity(): + input_infos = [ + ModelInputInfo([1, 3, 3, 3]), + ] + tracer = GraphTracer(create_dummy_forward_fn(input_infos, with_input_tracing=True, with_output_tracing=True)) + dynamic_graph = tracer.trace_graph(ModelWithRepeatInputs()) + stack_in_edges = [e for e in dynamic_graph.get_all_edges() if e.to_node_id == 2] # node id 2 == torch.stack + all_input_port_ids = set() + for edge in stack_in_edges: + all_input_port_ids.add(edge.input_port_id) + all_input_port_ids.update(edge.parallel_input_port_ids) + assert all_input_port_ids == {0, 1, 2, 3} diff --git a/tests/torch/test_input_management.py b/tests/torch/test_input_management.py index deaf3e4cea3..bb7847145dc 100644 --- a/tests/torch/test_input_management.py +++ b/tests/torch/test_input_management.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import inspect import pytest diff --git a/tests/torch/test_layer_attributes.py b/tests/torch/test_layer_attributes.py index e3b4d67031c..65c841cc890 100644 --- a/tests/torch/test_layer_attributes.py +++ b/tests/torch/test_layer_attributes.py @@ -98,7 +98,7 @@ def __str__(self): BATCH_NORM_REF_ATTR = GenericWeightedLayerAttributes( - weight_requires_grad=True, weight_shape=Size([1]), filter_dimension_idx=0 + weight_requires_grad=True, weight_shape=Size([1]), filter_dimension_idx=0, with_bias=True ) LIST_TEST_DESCS = [ LayerAttributesTestDesc( @@ -134,9 +134,11 @@ def __str__(self): out_channels=1, kernel_size=(1, 1), stride=(1, 1), + dilations=(1, 1), groups=1, transpose=False, padding_values=(0, 0), + with_bias=True, ), metatype_cls=PTConv2dMetatype, ), @@ -149,9 +151,11 @@ def __str__(self): out_channels=2, kernel_size=(1, 1), stride=(1, 1), + dilations=(1, 1), groups=2, transpose=False, padding_values=(0, 0), + with_bias=True, ), metatype_cls=PTConv2dMetatype, ), @@ -164,9 +168,11 @@ def __str__(self): out_channels=1, kernel_size=(1,), stride=(1,), + dilations=(1,), groups=1, transpose=False, padding_values=(0,), + with_bias=True, ), metatype_cls=PTConv1dMetatype, ), @@ -179,9 +185,11 @@ def __str__(self): out_channels=1, kernel_size=(1, 1, 1), stride=(1, 1, 1), + dilations=(1, 1, 1), groups=1, transpose=False, padding_values=(0, 0, 0), + with_bias=True, ), metatype_cls=PTConv3dMetatype, ), @@ -194,9 +202,11 @@ def __str__(self): out_channels=1, kernel_size=(1,), stride=(1,), + dilations=(1,), groups=1, transpose=True, padding_values=(0,), + with_bias=True, ), metatype_cls=PTConvTranspose1dMetatype, ), @@ -209,9 +219,11 @@ def __str__(self): out_channels=1, kernel_size=(1, 1), stride=(1, 1), + dilations=(1, 1), groups=1, transpose=True, padding_values=(0, 0), + with_bias=True, ), metatype_cls=PTConvTranspose2dMetatype, ), @@ -224,9 +236,11 @@ def __str__(self): out_channels=1, kernel_size=(1, 1, 1), stride=(1, 1, 1), + dilations=(1, 1, 1), groups=1, transpose=True, padding_values=(0, 0, 0), + with_bias=True, ), metatype_cls=PTConvTranspose3dMetatype, ), @@ -239,7 +253,9 @@ def __str__(self): LayerAttributesTestDesc( module=nn.Linear(1, 1, bias=False), model_input_info_list=[ModelInputInfo([1, 1, 1, 1])], - layer_attributes=LinearLayerAttributes(weight_requires_grad=True, in_features=1, out_features=1, bias=False), + layer_attributes=LinearLayerAttributes( + weight_requires_grad=True, in_features=1, out_features=1, with_bias=False + ), metatype_cls=PTLinearMetatype, ), LayerAttributesTestDesc( diff --git a/tests/torch/test_model_transformer.py b/tests/torch/test_model_transformer.py new file mode 100644 index 00000000000..10ac161b588 --- /dev/null +++ b/tests/torch/test_model_transformer.py @@ -0,0 +1,507 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import itertools +import os +from collections import Counter +from pathlib import Path +from typing import List + +import networkx as nx +import pytest +import torch +import torch.nn.functional as F +from torch import nn + +from nncf.common.graph.definitions import MODEL_INPUT_OP_NAME +from nncf.common.graph.definitions import MODEL_OUTPUT_OP_NAME +from nncf.common.graph.patterns.manager import PatternsManager +from nncf.common.graph.patterns.manager import TargetDevice +from nncf.common.graph.transformations.commands import TargetType +from nncf.common.graph.transformations.commands import TransformationPriority +from nncf.common.insertion_point_graph import InsertionPointGraph +from nncf.common.insertion_point_graph import InsertionPointGraphNodeType +from nncf.common.insertion_point_graph import PostHookInsertionPoint +from nncf.common.insertion_point_graph import PreHookInsertionPoint +from nncf.common.quantization.structs import QuantizationMode +from nncf.common.utils.backend import BackendType +from nncf.common.utils.dot_file_rw import get_graph_without_data +from nncf.common.utils.dot_file_rw import read_dot_graph +from nncf.common.utils.dot_file_rw import write_dot_graph +from nncf.torch.dynamic_graph.context import PreHookId +from nncf.torch.dynamic_graph.graph_tracer import ModelInputInfo +from nncf.torch.dynamic_graph.operation_address import OperationAddress +from nncf.torch.graph.operator_metatypes import PTConv2dMetatype +from nncf.torch.graph.operator_metatypes import PTInputNoopMetatype +from nncf.torch.graph.operator_metatypes import PTModuleConv2dMetatype +from nncf.torch.graph.operator_metatypes import PTOutputNoopMetatype +from nncf.torch.graph.operator_metatypes import PTReshapeMetatype +from nncf.torch.graph.transformations.commands import PTBiasCorrectionCommand +from nncf.torch.graph.transformations.commands import PTInsertionCommand +from nncf.torch.graph.transformations.commands import PTModelExtractionWithFusedBiasCommand +from nncf.torch.graph.transformations.commands import PTTargetPoint +from nncf.torch.graph.transformations.layout import PTTransformationLayout +from nncf.torch.layers import NNCFConv2d +from nncf.torch.model_transformer import PTModelTransformer +from nncf.torch.module_operations import BaseOp +from nncf.torch.nncf_network import NNCFNetwork +from nncf.torch.nncf_network import PTInsertionPoint +from nncf.torch.nncf_network import PTInsertionType +from nncf.torch.quantization.layers import AsymmetricQuantizer +from nncf.torch.quantization.layers import PTQuantizerSpec +from tests.common.quantization.mock_graphs import get_ip_graph_for_test +from tests.common.quantization.mock_graphs import get_mock_model_graph_with_broken_output_edge_pattern +from tests.common.quantization.mock_graphs import get_mock_model_graph_with_mergeable_pattern +from tests.common.quantization.mock_graphs import get_mock_model_graph_with_no_mergeable_pattern +from tests.common.quantization.mock_graphs import get_nncf_graph_from_mock_nx_graph +from tests.common.quantization.mock_graphs import get_two_branch_mock_model_graph +from tests.shared.paths import TEST_ROOT + + +class InsertionPointTestModel(nn.Module): + def __init__(self): + super().__init__() + self.conv1 = nn.Conv2d(1, 1, 1, 1) + self.linear_wts = nn.Parameter(torch.FloatTensor(size=(100, 100))) + self.conv2 = nn.Conv2d(1, 1, 1, 1) + self.relu = nn.ReLU() + + def forward(self, input_): + x = self.conv1(input_) + x = x.flatten() + x = nn.functional.linear(x, self.linear_wts) + x = x.reshape((1, 1, 10, 10)) + x = self.conv2(x) + x = self.relu(x) + return x + + +class TestInsertionCommands: + @pytest.fixture() + def setup(self): + self.compressed_model = NNCFNetwork( + InsertionPointTestModel(), [ModelInputInfo([1, 1, 10, 10])] + ) # type: NNCFNetwork + + conv1_node_name = "InsertionPointTestModel/NNCFConv2d[conv1]/conv2d_0" + point_for_conv1_weights = PTTargetPoint( + target_type=TargetType.OPERATION_WITH_WEIGHTS, target_node_name=conv1_node_name + ) + point_for_conv1_inputs = PTTargetPoint(target_type=TargetType.OPERATOR_PRE_HOOK, target_node_name=conv1_node_name) + point_for_conv1_activations = PTTargetPoint( + target_type=TargetType.POST_LAYER_OPERATION, target_node_name=conv1_node_name + ) + + conv2_node_name = "InsertionPointTestModel/NNCFConv2d[conv2]/conv2d_0" + point_for_conv2_weights = PTTargetPoint( + target_type=TargetType.OPERATION_WITH_WEIGHTS, target_node_name=conv2_node_name + ) + point_for_conv2_inputs = PTTargetPoint(target_type=TargetType.OPERATOR_PRE_HOOK, target_node_name=conv2_node_name) + point_for_conv2_activations = PTTargetPoint( + target_type=TargetType.POST_LAYER_OPERATION, target_node_name=conv2_node_name + ) + + linear_node_name = "InsertionPointTestModel/linear_0" + point_for_linear_weight_input = PTTargetPoint( + target_type=TargetType.OPERATOR_PRE_HOOK, target_node_name=linear_node_name, input_port_id=0 + ) + point_for_linear_activation = PTTargetPoint( + target_type=TargetType.OPERATOR_POST_HOOK, target_node_name=linear_node_name + ) + + relu_node_name = "InsertionPointTestModel/ReLU[relu]/relu_0" + point_for_relu_inputs = PTTargetPoint( + target_type=TargetType.OPERATOR_PRE_HOOK, target_node_name=relu_node_name, input_port_id=0 + ) + point_for_relu_activations = PTTargetPoint( + target_type=TargetType.OPERATOR_POST_HOOK, target_node_name=relu_node_name + ) + + available_points = [ + point_for_conv1_weights, + point_for_conv2_weights, + point_for_conv1_inputs, + point_for_conv2_inputs, + point_for_conv1_activations, + point_for_conv2_activations, + point_for_linear_activation, + point_for_linear_weight_input, + point_for_relu_activations, + point_for_relu_inputs, + ] + + @pytest.mark.parametrize("target_point", available_points) + def test_single_insertions(self, setup, target_point: PTTargetPoint): + insertion_point = PTInsertionPoint( + target_point.target_type, + OperationAddress.from_str(target_point.target_node_name), + target_point.input_port_id, + ) + if insertion_point.insertion_type in [PTInsertionType.OPERATOR_PRE_HOOK, PTInsertionType.OPERATOR_POST_HOOK]: + hook = lambda x: x + else: + hook = BaseOp(lambda x: x) + + self.compressed_model.nncf.insert_at_point(insertion_point, [hook]) + + # pylint:disable=protected-access + if insertion_point.insertion_type == PTInsertionType.OPERATOR_PRE_HOOK: + ctx = self.compressed_model.nncf.get_tracing_context() + pre_hook_id = PreHookId(insertion_point.op_address, input_port_id=insertion_point.input_port_id) + assert ctx._pre_hooks[pre_hook_id][0] is hook + if insertion_point.insertion_type == PTInsertionType.OPERATOR_POST_HOOK: + ctx = self.compressed_model.nncf.get_tracing_context() + assert ctx._post_hooks[insertion_point.op_address][0] is hook + if insertion_point.insertion_type == PTInsertionType.NNCF_MODULE_PRE_OP: + module = self.compressed_model.nncf.get_module_by_scope(insertion_point.module_scope) + assert module.pre_ops["0"] is hook + + if insertion_point.insertion_type == PTInsertionType.NNCF_MODULE_POST_OP: + module = self.compressed_model.nncf.get_module_by_scope(insertion_point.module_scope) + assert module.post_ops["0"] is hook + + priority_types = ["same", "different"] + insertion_types = TargetType + priority_test_cases = list(itertools.product(priority_types, insertion_types)) + + @staticmethod + def check_order(iterable1: List, iterable2: List, ordering: List): + for idx, order in enumerate(ordering): + assert iterable1[idx] is iterable2[order] + + # pylint:disable=undefined-variable + @pytest.mark.parametrize("case", priority_test_cases, ids=[x[1].name + "-" + x[0] for x in priority_test_cases]) + def test_priority(self, case, setup): + # pylint:disable=too-many-branches + priority_type = case[0] + insertion_type = case[1] + + if insertion_type == TargetType.OPERATION_WITH_WEIGHTS: + hook1 = BaseOp(lambda x: x) + hook2 = BaseOp(lambda x: 2 * x) + hook3 = BaseOp(lambda x: 3 * x) + elif insertion_type == TargetType.POST_LAYER_OPERATION: + hook1 = BaseOp(lambda m, x: x) + hook2 = BaseOp(lambda m, x: 2 * x) + hook3 = BaseOp(lambda m, x: 3 * x) + else: + hook1 = lambda x: x + hook2 = lambda x: 2 * x + hook3 = lambda x: 3 * x + + if insertion_type == TargetType.OPERATION_WITH_WEIGHTS: + point = self.point_for_conv2_weights + elif insertion_type == TargetType.POST_LAYER_OPERATION: + point = self.point_for_conv1_activations + elif insertion_type == TargetType.OPERATOR_PRE_HOOK: + point = self.point_for_linear_weight_input + elif insertion_type == TargetType.OPERATOR_POST_HOOK: + point = self.point_for_relu_activations + else: + pytest.skip("Insertion type {} currently unsupported in PT".format(insertion_type)) + + if priority_type == "same": + # Same-priority commands will be executed in registration order + command1 = PTInsertionCommand(point, hook1, TransformationPriority.DEFAULT_PRIORITY) + command2 = PTInsertionCommand(point, hook2, TransformationPriority.DEFAULT_PRIORITY) + command3 = PTInsertionCommand(point, hook3, TransformationPriority.DEFAULT_PRIORITY) + else: + # Prioritized commands will be executed in ascending priority order + command1 = PTInsertionCommand(point, hook1, TransformationPriority.SPARSIFICATION_PRIORITY) + command2 = PTInsertionCommand(point, hook2, TransformationPriority.QUANTIZATION_PRIORITY) + command3 = PTInsertionCommand(point, hook3, TransformationPriority.DEFAULT_PRIORITY) + + layout = PTTransformationLayout() + layout.register(command1) + layout.register(command2) + layout.register(command3) + self.compressed_model = PTModelTransformer(self.compressed_model).transform(layout) + + hook_list = [hook1, hook2, hook3] + + if priority_type == "same": + order = [0, 1, 2] + elif priority_type == "different": + order = [2, 0, 1] + + # pylint:disable=protected-access + if insertion_type == TargetType.OPERATOR_PRE_HOOK: + ctx = self.compressed_model.nncf.get_tracing_context() + pre_hook_id = PreHookId( + OperationAddress.from_str(point.target_node_name), input_port_id=point.input_port_id + ) + self.check_order(ctx._pre_hooks[pre_hook_id], hook_list, order) + if insertion_type == TargetType.OPERATOR_POST_HOOK: + ctx = self.compressed_model.nncf.get_tracing_context() + self.check_order(ctx._post_hooks[OperationAddress.from_str(point.target_node_name)], hook_list, order) + + if insertion_type == TargetType.OPERATION_WITH_WEIGHTS: + module = self.compressed_model.nncf.get_containing_module(point.target_node_name) + # Works because Pytorch ModuleDict is ordered + self.check_order([x.operand for x in module.pre_ops.values()], hook_list, order) + + if insertion_type == TargetType.POST_LAYER_OPERATION: + module = self.compressed_model.nncf.get_containing_module(point.target_node_name) + # Works because Pytorch ModuleDict is ordered + self.check_order(list(module.post_ops.values()), hook_list, order) + + +MERGE_PATTERN_TEST_CASES = ( + [get_mock_model_graph_with_mergeable_pattern, "basic_pattern"], + [get_mock_model_graph_with_no_mergeable_pattern, "no_pattern"], + [get_mock_model_graph_with_broken_output_edge_pattern, "broken_output_edges_pattern"], +) + + +class TestInsertionPointGraph: + def test_insertion_point_setup(self): + # TODO: Change testing premises when module pre/post-op hooks and input/output nodes + # are correctly handled + mock_graph = get_two_branch_mock_model_graph() + + ip_graph = get_ip_graph_for_test(mock_graph) + + nx_graph = mock_graph.get_nx_graph_copy() + ref_node_len = 3 * len(nx_graph.nodes) # 2 additional nodes per each operator node + ref_edge_len = 3 * len(nx_graph.edges) + + assert len(ip_graph.nodes) == ref_node_len + assert len(ip_graph.edges) == ref_edge_len + + for nncf_node_idx in mock_graph.get_all_node_ids(): + node_key = mock_graph.get_node_key_by_id(nncf_node_idx) + ip_graph_op_node = ip_graph.nodes[node_key] + assert ip_graph_op_node[InsertionPointGraph.NODE_TYPE_NODE_ATTR] == InsertionPointGraphNodeType.OPERATOR + preds = list(ip_graph.predecessors(node_key)) + succs = list(ip_graph.successors(node_key)) + assert len(succs) == 1 + post_hook_ip_node_key = succs[0] + post_hook_ip_node = ip_graph.nodes[succs[0]] + post_hook_ip_node_type = post_hook_ip_node[InsertionPointGraph.NODE_TYPE_NODE_ATTR] + assert post_hook_ip_node_type == InsertionPointGraphNodeType.POST_HOOK + + pre_hook_ip_node_keys = preds + for pre_hook_ip_node_key in pre_hook_ip_node_keys: + pre_hook_ip_node = ip_graph.nodes[pre_hook_ip_node_key] + pre_hook_ip_node_type = pre_hook_ip_node[InsertionPointGraph.NODE_TYPE_NODE_ATTR] + assert pre_hook_ip_node_type == InsertionPointGraphNodeType.PRE_HOOK + + ref_associated_ip_node_keys_set = {*pre_hook_ip_node_keys, post_hook_ip_node_key} + assert ( + ref_associated_ip_node_keys_set + == ip_graph_op_node[InsertionPointGraph.ASSOCIATED_IP_NODE_KEYS_NODE_ATTR] + ) + original_neighbours = nx_graph.neighbors(node_key) + for neighbour in original_neighbours: + # IP node insertion should not disrupt the graph superstructure + ip_graph_paths = list(nx.all_simple_paths(ip_graph, node_key, neighbour)) + for path in ip_graph_paths: + path = path[1:-1] + for path_node_key in path: + node = ip_graph.nodes[path_node_key] + node_type = node[InsertionPointGraph.NODE_TYPE_NODE_ATTR] + assert node_type in [ + InsertionPointGraphNodeType.PRE_HOOK, + InsertionPointGraphNodeType.POST_HOOK, + ] + + for node_key, node in ip_graph.nodes.items(): + preds = list(ip_graph.predecessors(node_key)) + succs = list(ip_graph.successors(node_key)) + assert len(preds) != 0 or len(succs) != 0 + + for from_node_key, to_node_key in ip_graph.edges.keys(): + assert from_node_key in ip_graph.nodes + assert to_node_key in ip_graph.nodes + + def test_insertion_point_data_in_ip_nodes(self): + # TODO: extend for modules + mock_graph = nx.DiGraph() + + mock_graph.add_node("bar") + mock_graph.add_node("baz") + mock_graph.add_edge("bar", "baz") + nncf_graph = get_nncf_graph_from_mock_nx_graph(mock_graph) + + ip_graph = get_ip_graph_for_test(nncf_graph) + + for nncf_node in nncf_graph.get_all_nodes(): + node_id = nncf_node.node_id + node_key = nncf_graph.get_node_key_by_id(node_id) + preds = list(ip_graph.predecessors(node_key)) + succs = list(ip_graph.successors(node_key)) + + post_hook_ip_node = ip_graph.nodes[succs[0]] + post_hook_ip = post_hook_ip_node[InsertionPointGraph.INSERTION_POINT_NODE_ATTR] + assert isinstance(post_hook_ip, PostHookInsertionPoint) + assert post_hook_ip.target_node_name == nncf_node.node_name + + for pre_hook_ip_node_key in preds: + pre_hook_ip_node = ip_graph.nodes[pre_hook_ip_node_key] + pre_hook_ip = pre_hook_ip_node[InsertionPointGraph.INSERTION_POINT_NODE_ATTR] + assert isinstance(pre_hook_ip, PreHookInsertionPoint) + assert pre_hook_ip.target_node_name == nncf_node.node_name + + def test_operator_metatype_marking(self): + from nncf.torch.graph.operator_metatypes import PTAddMetatype + from nncf.torch.graph.operator_metatypes import PTAvgPool2dMetatype + from nncf.torch.graph.operator_metatypes import PTBatchNormMetatype + from nncf.torch.graph.operator_metatypes import PTConvTranspose2dMetatype + from nncf.torch.graph.operator_metatypes import PTDepthwiseConv2dSubtype + from nncf.torch.graph.operator_metatypes import PTLinearMetatype + from nncf.torch.graph.operator_metatypes import PTMaxPool2dMetatype + from nncf.torch.graph.operator_metatypes import PTModuleBatchNormMetatype + from nncf.torch.graph.operator_metatypes import PTModuleConvTranspose2dMetatype + from nncf.torch.graph.operator_metatypes import PTModuleLinearMetatype + from nncf.torch.graph.operator_metatypes import PTRELUMetatype + from nncf.torch.graph.operator_metatypes import PTTransposeMetatype + + ref_scope_vs_metatype_dict = { + "/" + MODEL_INPUT_OP_NAME + "_0": PTInputNoopMetatype, + "ModelForMetatypeTesting/NNCFConv2d[conv_regular]/conv2d_0": PTModuleConv2dMetatype, + "ModelForMetatypeTesting/NNCFBatchNorm2d[bn]/batch_norm_0": PTModuleBatchNormMetatype, + "ModelForMetatypeTesting/batch_norm_0": PTBatchNormMetatype, + "ModelForMetatypeTesting/relu_0": PTRELUMetatype, + "ModelForMetatypeTesting/transpose__0": PTTransposeMetatype, + "ModelForMetatypeTesting/MaxPool2d[max_pool2d]/max_pool2d_0": PTMaxPool2dMetatype, + "ModelForMetatypeTesting/NNCFConvTranspose2d[conv_transpose]/conv_transpose2d_0": ( + PTModuleConvTranspose2dMetatype + ), + "ModelForMetatypeTesting/conv_transpose2d_0": PTConvTranspose2dMetatype, + "ModelForMetatypeTesting/__add___0": PTAddMetatype, + "ModelForMetatypeTesting/NNCFConv2d[conv_depthwise]/conv2d_0": PTDepthwiseConv2dSubtype, + "ModelForMetatypeTesting/conv2d_0": PTConv2dMetatype, + "ModelForMetatypeTesting/__iadd___0": PTAddMetatype, + "ModelForMetatypeTesting/AdaptiveAvgPool2d[adaptive_avg_pool]/adaptive_avg_pool2d_0": PTAvgPool2dMetatype, + "ModelForMetatypeTesting/flatten_0": PTReshapeMetatype, + "ModelForMetatypeTesting/NNCFLinear[linear]/linear_0": PTModuleLinearMetatype, + "ModelForMetatypeTesting/linear_0": PTLinearMetatype, + "/" + MODEL_OUTPUT_OP_NAME + "_0": PTOutputNoopMetatype, + } + + class ModelForMetatypeTesting(torch.nn.Module): + def __init__(self): + super().__init__() + self.conv_regular = torch.nn.Conv2d(in_channels=3, out_channels=16, kernel_size=3) + self.bn = torch.nn.BatchNorm2d(num_features=16) + self.max_pool2d = torch.nn.MaxPool2d(kernel_size=2) + self.conv_transpose = torch.nn.ConvTranspose2d(in_channels=16, out_channels=8, kernel_size=3) + self.conv_depthwise = torch.nn.Conv2d(in_channels=8, out_channels=8, kernel_size=5, groups=8) + self.adaptive_avg_pool = torch.nn.AdaptiveAvgPool2d(output_size=1) + self.linear = torch.nn.Linear(in_features=8, out_features=8) + + def forward(self, input_): + x = self.conv_regular(input_) + x = self.bn(x) + x = F.batch_norm(x, self.bn.running_mean, self.bn.running_var) + x = F.relu(x) + x.transpose_(2, 3) + x = self.max_pool2d(x) + y = self.conv_transpose(x) + z = F.conv_transpose2d(x, self.conv_transpose.weight) + x = y + z + x = self.conv_depthwise(x) + x = F.conv2d(x, self.conv_depthwise.weight, groups=self.conv_depthwise.groups) + x += torch.ones_like(x) + x = self.adaptive_avg_pool(x) + x = self.linear(x.flatten()) + x = F.linear(x, self.linear.weight) + return x + + model = ModelForMetatypeTesting() + nncf_network = NNCFNetwork(model, [ModelInputInfo([1, 3, 300, 300])]) + nncf_graph = nncf_network.nncf.get_original_graph() + + for nncf_node in nncf_graph.get_all_nodes(): # type: NNCFNode + assert nncf_node.node_name in ref_scope_vs_metatype_dict + ref_metatype = ref_scope_vs_metatype_dict[nncf_node.node_name] + assert nncf_node.metatype == ref_metatype + + @pytest.mark.parametrize( + ("mock_graph_factory", "dot_file_name"), MERGE_PATTERN_TEST_CASES, ids=[x[1] for x in MERGE_PATTERN_TEST_CASES] + ) + def test_get_ip_graph_with_merged_operations(self, mock_graph_factory, dot_file_name): + mock_graph = mock_graph_factory() + ip_graph = get_ip_graph_for_test(mock_graph) + pattern = PatternsManager.get_full_hw_pattern_graph(backend=BackendType.TORCH, device=TargetDevice.ANY) + merged_ip_graph = ip_graph.get_ip_graph_with_merged_hw_optimized_operations(pattern) + + data_dir = TEST_ROOT / "torch/data/reference_graphs/pattern_merging" # type: Path + + path_to_dot_file = data_dir / "{}.dot".format(dot_file_name) + + if os.getenv("NNCF_TEST_REGEN_DOT") is not None: + if not os.path.exists(str(data_dir)): + os.makedirs(str(data_dir)) + graph_without_data = get_graph_without_data(merged_ip_graph) + write_dot_graph(graph_without_data, str(path_to_dot_file)) + + load_graph = read_dot_graph(str(path_to_dot_file)) + + for key in load_graph.nodes.keys(): + key.replace(r"\\n", r"\n") # Somehow pydot mangles the \n characters while writing a .dot file + + sanitized_loaded_keys = [key.replace("\\n", "\n") for key in load_graph.nodes.keys()] + sanitized_loaded_edges = [ + (u.replace("\\n", "\n"), v.replace("\\n", "\n")) for u, v in nx.DiGraph(load_graph).edges + ] + + assert Counter(sanitized_loaded_keys) == Counter(list(merged_ip_graph.nodes.keys())) + assert Counter(sanitized_loaded_edges) == Counter(list(merged_ip_graph.edges)) + + +def test_extraction_with_fused_bias_transformations(): + model = NNCFNetwork(InsertionPointTestModel(), [ModelInputInfo([1, 1, 10, 10])]) + model_transformer = PTModelTransformer(model) + + command = PTModelExtractionWithFusedBiasCommand("InsertionPointTestModel/NNCFConv2d[conv1]/conv2d_0") + transformation_layout = PTTransformationLayout() + transformation_layout.register(command) + extracted_model = model_transformer.transform(transformation_layout) + + assert isinstance(extracted_model, nn.Sequential) + assert len(extracted_model) == 1 + assert isinstance(extracted_model[0], NNCFConv2d) + + +def test_bias_correction_transformations(): + model = NNCFNetwork(InsertionPointTestModel(), [ModelInputInfo([1, 1, 10, 10])]) + model_transformer = PTModelTransformer(model) + + new_bias = torch.Tensor([42]) + + target_point = PTTargetPoint(TargetType.LAYER, "InsertionPointTestModel/NNCFConv2d[conv1]/conv2d_0") + command = PTBiasCorrectionCommand(target_point, new_bias) + + transformation_layout = PTTransformationLayout() + transformation_layout.register(command) + updated_model = model_transformer.transform(transformation_layout) + assert updated_model.conv1.bias.data == new_bias + + +def test_rebuild_graph_after_insert_transformation(): + model = NNCFNetwork(InsertionPointTestModel(), [ModelInputInfo([1, 1, 10, 10])]) + + graph = model.nncf.get_graph() + + command = PTInsertionCommand( + PTTargetPoint( + TargetType.OPERATION_WITH_WEIGHTS, target_node_name="InsertionPointTestModel/NNCFConv2d[conv1]/conv2d_0" + ), + AsymmetricQuantizer(PTQuantizerSpec(8, QuantizationMode.ASYMMETRIC, None, False, False, (1, 1, 1, 1), False)), + TransformationPriority.QUANTIZATION_PRIORITY, + ) + transformation_layout = PTTransformationLayout() + transformation_layout.register(command) + + model_transformer = PTModelTransformer(model) + transformed_model = model_transformer.transform(transformation_layout=transformation_layout) + new_graph = transformed_model.nncf.get_graph() + assert len(new_graph.get_all_nodes()) == len(graph.get_all_nodes()) + 1 diff --git a/tests/torch/test_models/__init__.py b/tests/torch/test_models/__init__.py index a5ebbfc67b4..ad3af2cb2f2 100644 --- a/tests/torch/test_models/__init__.py +++ b/tests/torch/test_models/__init__.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from .alexnet import * from .densenet import * from .dpn import * diff --git a/tests/torch/test_models/swin.py b/tests/torch/test_models/swin.py index 31ac9ee88e7..392c42b5217 100644 --- a/tests/torch/test_models/swin.py +++ b/tests/torch/test_models/swin.py @@ -13,9 +13,9 @@ # and https://github.com/rwightman/pytorch-image-models import torch -from timm.models.layers import DropPath -from timm.models.layers import to_2tuple -from timm.models.layers import trunc_normal_ +from timm.layers import DropPath +from timm.layers import to_2tuple +from timm.layers import trunc_normal_ from torch import nn WindowProcess = None @@ -177,7 +177,7 @@ class SwinTransformerBlock(nn.Module): Args: dim (int): Number of input channels. - input_resolution (tuple[int]): Input resulotion. + input_resolution (tuple[int]): Input resolution. num_heads (int): Number of attention heads. window_size (int): Window size. shift_size (int): Shift size for SW-MSA. diff --git a/tests/torch/test_models/synthetic.py b/tests/torch/test_models/synthetic.py index 0c78b74e6c2..7dac0819df9 100644 --- a/tests/torch/test_models/synthetic.py +++ b/tests/torch/test_models/synthetic.py @@ -16,6 +16,7 @@ from torch.nn import BatchNorm2d from torch.nn import Dropout from torch.nn import Parameter +from torchvision.transforms.functional import normalize from nncf.torch import register_module from tests.torch.helpers import create_conv @@ -332,3 +333,43 @@ def __init__(self): def forward(self, x): return self.mha(x, x, x) + + +class OrdinaryModelWithRecurrentInName(torch.nn.Module): + def __init__(self): + super().__init__() + self.conv = create_conv(1, 1, 1) + + def forward(self, x): + quantize_agnostic = x[:2] + return self.conv(quantize_agnostic) + + +class ShiftScaleParametrized(torch.nn.Module): + NUM_CHANNELS = 3 + INPUT_SIZES = [1, NUM_CHANNELS, 2, 2] + + def __init__(self, is_single_input: bool, use_normalize: bool): + super().__init__() + self.conv = create_conv(self.NUM_CHANNELS, 1, 1) + self.is_single_input = is_single_input + self.use_normalize = use_normalize + + @classmethod + def get_name(cls, is_single_input: bool, use_normalize: bool): + suffix_1 = "single" if is_single_input else "multi" + suffix_2 = "__normalize" if use_normalize else "" + return f"ShiftScale{suffix_2}__{suffix_1}_input_branch" + + def forward(self, x): + values = [1] * self.NUM_CHANNELS + if self.use_normalize: + pre_proc = normalize(x, values, values, inplace=False) + else: + vector = torch.Tensor(values).unsqueeze(dim=0).unsqueeze(dim=2).unsqueeze(dim=3) + pre_proc = (x - vector) / vector + + output = self.conv(pre_proc) + if self.is_single_input: + return output + return output, self.conv(x) diff --git a/tests/torch/test_nncf_network.py b/tests/torch/test_nncf_network.py index b523631d057..65585042f38 100644 --- a/tests/torch/test_nncf_network.py +++ b/tests/torch/test_nncf_network.py @@ -8,17 +8,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import functools import inspect -import itertools -import os from abc import ABCMeta from abc import abstractmethod -from collections import Counter from copy import deepcopy -from pathlib import Path -from typing import List -import networkx as nx import pytest import torch import torch.nn.functional as F @@ -27,52 +22,23 @@ from nncf import nncf_logger from nncf.common.graph import NNCFNode -from nncf.common.graph.definitions import MODEL_INPUT_OP_NAME -from nncf.common.graph.definitions import MODEL_OUTPUT_OP_NAME from nncf.common.graph.operator_metatypes import UnknownMetatype -from nncf.common.graph.patterns.manager import PatternsManager -from nncf.common.graph.patterns.manager import TargetDevice from nncf.common.graph.transformations.commands import TargetType -from nncf.common.graph.transformations.commands import TransformationPriority -from nncf.common.insertion_point_graph import InsertionPointGraph -from nncf.common.insertion_point_graph import InsertionPointGraphNodeType -from nncf.common.insertion_point_graph import PostHookInsertionPoint -from nncf.common.insertion_point_graph import PreHookInsertionPoint -from nncf.common.logging.logger import NNCFDeprecationWarning -from nncf.common.utils.backend import BackendType -from nncf.common.utils.dot_file_rw import get_graph_without_data -from nncf.common.utils.dot_file_rw import read_dot_graph -from nncf.common.utils.dot_file_rw import write_dot_graph from nncf.torch import register_module -from nncf.torch.dynamic_graph.context import PreHookId from nncf.torch.dynamic_graph.graph_tracer import ModelInputInfo from nncf.torch.dynamic_graph.operation_address import OperationAddress from nncf.torch.dynamic_graph.scope import Scope from nncf.torch.graph.graph import PTNNCFGraph from nncf.torch.graph.graph_builder import GraphBuilder from nncf.torch.graph.operator_metatypes import PTConv2dMetatype -from nncf.torch.graph.operator_metatypes import PTInputNoopMetatype from nncf.torch.graph.operator_metatypes import PTModuleConv2dMetatype -from nncf.torch.graph.operator_metatypes import PTOutputNoopMetatype -from nncf.torch.graph.operator_metatypes import PTReshapeMetatype -from nncf.torch.graph.transformations.commands import PTInsertionCommand -from nncf.torch.graph.transformations.commands import PTTargetPoint -from nncf.torch.graph.transformations.layout import PTTransformationLayout from nncf.torch.layer_utils import _NNCFModuleMixin from nncf.torch.layers import NNCFConv2d -from nncf.torch.module_operations import BaseOp +from nncf.torch.nncf_module_replacement import replace_modules_by_nncf_modules from nncf.torch.nncf_network import EXTERNAL_QUANTIZERS_STORAGE_NAME from nncf.torch.nncf_network import NNCFNetwork from nncf.torch.nncf_network import PTInsertionPoint from nncf.torch.nncf_network import PTInsertionType -from nncf.torch.nncf_network import PTModelTransformer -from tests.common.quantization.mock_graphs import get_ip_graph_for_test -from tests.common.quantization.mock_graphs import get_mock_model_graph_with_broken_output_edge_pattern -from tests.common.quantization.mock_graphs import get_mock_model_graph_with_mergeable_pattern -from tests.common.quantization.mock_graphs import get_mock_model_graph_with_no_mergeable_pattern -from tests.common.quantization.mock_graphs import get_nncf_graph_from_mock_nx_graph -from tests.common.quantization.mock_graphs import get_two_branch_mock_model_graph -from tests.shared.paths import TEST_ROOT from tests.torch.composite.test_sparsity_quantization import get_basic_sparsity_plus_quantization_config from tests.torch.helpers import BasicConvTestModel from tests.torch.helpers import TwoConvTestModel @@ -81,8 +47,6 @@ from tests.torch.helpers import register_bn_adaptation_init_args from tests.torch.test_models.synthetic import ManyNonEvalModules -# pylint:disable=too-many-lines - @pytest.fixture() def _nncf_caplog(caplog): @@ -306,396 +270,9 @@ def test_nncf_node_attrs_are_consistent(): node_name="dummy", node_type="dummy", layer_name="dummy", node_metatype=UnknownMetatype ) new_node_saved = nncf_graph.get_node_by_id(new_node.node_id) - assert new_node.data is new_node_saved.data + assert new_node.attributes is new_node_saved.attributes nodes_in_scope = nncf_graph.get_op_nodes_in_scope(nncf_graph.get_scope_by_node_name("dummy")) - assert new_node.data is nodes_in_scope[0].data - - -class InsertionPointTestModel(nn.Module): - def __init__(self): - super().__init__() - self.conv1 = nn.Conv2d(1, 1, 1, 1) - self.linear_wts = nn.Parameter(torch.FloatTensor(size=(100, 100))) - self.conv2 = nn.Conv2d(1, 1, 1, 1) - self.relu = nn.ReLU() - - def forward(self, input_): - x = self.conv1(input_) - x = x.flatten() - x = nn.functional.linear(x, self.linear_wts) - x = x.reshape((1, 1, 10, 10)) - x = self.conv2(x) - x = self.relu(x) - return x - - -class TestInsertionCommands: - @pytest.fixture() - def setup(self): - self.compressed_model = NNCFNetwork( - InsertionPointTestModel(), [ModelInputInfo([1, 1, 10, 10])] - ) # type: NNCFNetwork - - conv1_node_name = "InsertionPointTestModel/NNCFConv2d[conv1]/conv2d_0" - point_for_conv1_weights = PTTargetPoint( - target_type=TargetType.OPERATION_WITH_WEIGHTS, target_node_name=conv1_node_name - ) - point_for_conv1_inputs = PTTargetPoint(target_type=TargetType.OPERATOR_PRE_HOOK, target_node_name=conv1_node_name) - point_for_conv1_activations = PTTargetPoint( - target_type=TargetType.POST_LAYER_OPERATION, target_node_name=conv1_node_name - ) - - conv2_node_name = "InsertionPointTestModel/NNCFConv2d[conv2]/conv2d_0" - point_for_conv2_weights = PTTargetPoint( - target_type=TargetType.OPERATION_WITH_WEIGHTS, target_node_name=conv2_node_name - ) - point_for_conv2_inputs = PTTargetPoint(target_type=TargetType.OPERATOR_PRE_HOOK, target_node_name=conv2_node_name) - point_for_conv2_activations = PTTargetPoint( - target_type=TargetType.POST_LAYER_OPERATION, target_node_name=conv2_node_name - ) - - linear_node_name = "InsertionPointTestModel/linear_0" - point_for_linear_weight_input = PTTargetPoint( - target_type=TargetType.OPERATOR_PRE_HOOK, target_node_name=linear_node_name, input_port_id=0 - ) - point_for_linear_activation = PTTargetPoint( - target_type=TargetType.OPERATOR_POST_HOOK, target_node_name=linear_node_name - ) - - relu_node_name = "InsertionPointTestModel/ReLU[relu]/relu_0" - point_for_relu_inputs = PTTargetPoint( - target_type=TargetType.OPERATOR_PRE_HOOK, target_node_name=relu_node_name, input_port_id=0 - ) - point_for_relu_activations = PTTargetPoint( - target_type=TargetType.OPERATOR_POST_HOOK, target_node_name=relu_node_name - ) - - available_points = [ - point_for_conv1_weights, - point_for_conv2_weights, - point_for_conv1_inputs, - point_for_conv2_inputs, - point_for_conv1_activations, - point_for_conv2_activations, - point_for_linear_activation, - point_for_linear_weight_input, - point_for_relu_activations, - point_for_relu_inputs, - ] - - @pytest.mark.parametrize("target_point", available_points) - def test_single_insertions(self, setup, target_point: PTTargetPoint): - insertion_point = PTInsertionPoint( - target_point.target_type, - OperationAddress.from_str(target_point.target_node_name), - target_point.input_port_id, - ) - if insertion_point.insertion_type in [PTInsertionType.OPERATOR_PRE_HOOK, PTInsertionType.OPERATOR_POST_HOOK]: - hook = lambda x: x - else: - hook = BaseOp(lambda x: x) - - self.compressed_model.nncf.insert_at_point(insertion_point, [hook]) - - # pylint:disable=protected-access - if insertion_point.insertion_type == PTInsertionType.OPERATOR_PRE_HOOK: - ctx = self.compressed_model.nncf.get_tracing_context() - pre_hook_id = PreHookId(insertion_point.op_address, input_port_id=insertion_point.input_port_id) - assert ctx._pre_hooks[pre_hook_id][0] is hook - if insertion_point.insertion_type == PTInsertionType.OPERATOR_POST_HOOK: - ctx = self.compressed_model.nncf.get_tracing_context() - assert ctx._post_hooks[insertion_point.op_address][0] is hook - if insertion_point.insertion_type == PTInsertionType.NNCF_MODULE_PRE_OP: - module = self.compressed_model.nncf.get_module_by_scope(insertion_point.module_scope) - assert module.pre_ops["0"] is hook - - if insertion_point.insertion_type == PTInsertionType.NNCF_MODULE_POST_OP: - module = self.compressed_model.nncf.get_module_by_scope(insertion_point.module_scope) - assert module.post_ops["0"] is hook - - priority_types = ["same", "different"] - insertion_types = TargetType - priority_test_cases = list(itertools.product(priority_types, insertion_types)) - - @staticmethod - def check_order(iterable1: List, iterable2: List, ordering: List): - for idx, order in enumerate(ordering): - assert iterable1[idx] is iterable2[order] - - # pylint:disable=undefined-variable - @pytest.mark.parametrize("case", priority_test_cases, ids=[x[1].name + "-" + x[0] for x in priority_test_cases]) - def test_priority(self, case, setup): - # pylint:disable=too-many-branches - priority_type = case[0] - insertion_type = case[1] - if insertion_type in [TargetType.OPERATION_WITH_WEIGHTS, TargetType.POST_LAYER_OPERATION]: - hook1 = BaseOp(lambda x: x) - hook2 = BaseOp(lambda x: 2 * x) - hook3 = BaseOp(lambda x: 3 * x) - else: - hook1 = lambda x: x - hook2 = lambda x: 2 * x - hook3 = lambda x: 3 * x - - if insertion_type == TargetType.OPERATION_WITH_WEIGHTS: - point = self.point_for_conv2_weights - elif insertion_type == TargetType.POST_LAYER_OPERATION: - point = self.point_for_conv1_activations - elif insertion_type == TargetType.OPERATOR_PRE_HOOK: - point = self.point_for_linear_weight_input - elif insertion_type == TargetType.OPERATOR_POST_HOOK: - point = self.point_for_relu_activations - else: - pytest.skip("Insertion type {} currently unsupported in PT".format(insertion_type)) - - if priority_type == "same": - # Same-priority commands will be executed in registration order - command1 = PTInsertionCommand(point, hook1, TransformationPriority.DEFAULT_PRIORITY) - command2 = PTInsertionCommand(point, hook2, TransformationPriority.DEFAULT_PRIORITY) - command3 = PTInsertionCommand(point, hook3, TransformationPriority.DEFAULT_PRIORITY) - else: - # Prioritized commands will be executed in ascending priority order - command1 = PTInsertionCommand(point, hook1, TransformationPriority.SPARSIFICATION_PRIORITY) - command2 = PTInsertionCommand(point, hook2, TransformationPriority.QUANTIZATION_PRIORITY) - command3 = PTInsertionCommand(point, hook3, TransformationPriority.DEFAULT_PRIORITY) - - layout = PTTransformationLayout() - layout.register(command1) - layout.register(command2) - layout.register(command3) - self.compressed_model = PTModelTransformer(self.compressed_model).transform(layout) - - hook_list = [hook1, hook2, hook3] - - if priority_type == "same": - order = [0, 1, 2] - elif priority_type == "different": - order = [2, 0, 1] - - # pylint:disable=protected-access - if insertion_type == TargetType.OPERATOR_PRE_HOOK: - ctx = self.compressed_model.nncf.get_tracing_context() - pre_hook_id = PreHookId( - OperationAddress.from_str(point.target_node_name), input_port_id=point.input_port_id - ) - self.check_order(ctx._pre_hooks[pre_hook_id], hook_list, order) - if insertion_type == TargetType.OPERATOR_POST_HOOK: - ctx = self.compressed_model.nncf.get_tracing_context() - self.check_order(ctx._post_hooks[OperationAddress.from_str(point.target_node_name)], hook_list, order) - - if insertion_type == TargetType.OPERATION_WITH_WEIGHTS: - module = self.compressed_model.nncf.get_containing_module(point.target_node_name) - # Works because Pytorch ModuleDict is ordered - self.check_order([x.operand for x in module.pre_ops.values()], hook_list, order) - - if insertion_type == TargetType.POST_LAYER_OPERATION: - module = self.compressed_model.nncf.get_containing_module(point.target_node_name) - # Works because Pytorch ModuleDict is ordered - self.check_order(list(module.post_ops.values()), hook_list, order) - - -MERGE_PATTERN_TEST_CASES = ( - [get_mock_model_graph_with_mergeable_pattern, "basic_pattern"], - [get_mock_model_graph_with_no_mergeable_pattern, "no_pattern"], - [get_mock_model_graph_with_broken_output_edge_pattern, "broken_output_edges_pattern"], -) - - -class TestInsertionPointGraph: - def test_insertion_point_setup(self): - # TODO: Change testing premises when module pre/post-op hooks and input/output nodes - # are correctly handled - mock_graph = get_two_branch_mock_model_graph() - - ip_graph = get_ip_graph_for_test(mock_graph) - - nx_graph = mock_graph.get_nx_graph_copy() - ref_node_len = 3 * len(nx_graph.nodes) # 2 additional nodes per each operator node - ref_edge_len = 3 * len(nx_graph.edges) - - assert len(ip_graph.nodes) == ref_node_len - assert len(ip_graph.edges) == ref_edge_len - - for nncf_node_idx in mock_graph.get_all_node_ids(): - node_key = mock_graph.get_node_key_by_id(nncf_node_idx) - ip_graph_op_node = ip_graph.nodes[node_key] - assert ip_graph_op_node[InsertionPointGraph.NODE_TYPE_NODE_ATTR] == InsertionPointGraphNodeType.OPERATOR - preds = list(ip_graph.predecessors(node_key)) - succs = list(ip_graph.successors(node_key)) - assert len(succs) == 1 - post_hook_ip_node_key = succs[0] - post_hook_ip_node = ip_graph.nodes[succs[0]] - post_hook_ip_node_type = post_hook_ip_node[InsertionPointGraph.NODE_TYPE_NODE_ATTR] - assert post_hook_ip_node_type == InsertionPointGraphNodeType.POST_HOOK - - pre_hook_ip_node_keys = preds - for pre_hook_ip_node_key in pre_hook_ip_node_keys: - pre_hook_ip_node = ip_graph.nodes[pre_hook_ip_node_key] - pre_hook_ip_node_type = pre_hook_ip_node[InsertionPointGraph.NODE_TYPE_NODE_ATTR] - assert pre_hook_ip_node_type == InsertionPointGraphNodeType.PRE_HOOK - - ref_associated_ip_node_keys_set = {*pre_hook_ip_node_keys, post_hook_ip_node_key} - assert ( - ref_associated_ip_node_keys_set - == ip_graph_op_node[InsertionPointGraph.ASSOCIATED_IP_NODE_KEYS_NODE_ATTR] - ) - original_neighbours = nx_graph.neighbors(node_key) - for neighbour in original_neighbours: - # IP node insertion should not disrupt the graph superstructure - ip_graph_paths = list(nx.all_simple_paths(ip_graph, node_key, neighbour)) - for path in ip_graph_paths: - path = path[1:-1] - for path_node_key in path: - node = ip_graph.nodes[path_node_key] - node_type = node[InsertionPointGraph.NODE_TYPE_NODE_ATTR] - assert node_type in [ - InsertionPointGraphNodeType.PRE_HOOK, - InsertionPointGraphNodeType.POST_HOOK, - ] - - for node_key, node in ip_graph.nodes.items(): - preds = list(ip_graph.predecessors(node_key)) - succs = list(ip_graph.successors(node_key)) - assert len(preds) != 0 or len(succs) != 0 - - for from_node_key, to_node_key in ip_graph.edges.keys(): - assert from_node_key in ip_graph.nodes - assert to_node_key in ip_graph.nodes - - def test_insertion_point_data_in_ip_nodes(self): - # TODO: extend for modules - mock_graph = nx.DiGraph() - - mock_graph.add_node("bar") - mock_graph.add_node("baz") - mock_graph.add_edge("bar", "baz") - nncf_graph = get_nncf_graph_from_mock_nx_graph(mock_graph) - - ip_graph = get_ip_graph_for_test(nncf_graph) - - for nncf_node in nncf_graph.get_all_nodes(): - node_id = nncf_node.node_id - node_key = nncf_graph.get_node_key_by_id(node_id) - preds = list(ip_graph.predecessors(node_key)) - succs = list(ip_graph.successors(node_key)) - - post_hook_ip_node = ip_graph.nodes[succs[0]] - post_hook_ip = post_hook_ip_node[InsertionPointGraph.INSERTION_POINT_NODE_ATTR] - assert isinstance(post_hook_ip, PostHookInsertionPoint) - assert post_hook_ip.target_node_name == nncf_node.node_name - - for pre_hook_ip_node_key in preds: - pre_hook_ip_node = ip_graph.nodes[pre_hook_ip_node_key] - pre_hook_ip = pre_hook_ip_node[InsertionPointGraph.INSERTION_POINT_NODE_ATTR] - assert isinstance(pre_hook_ip, PreHookInsertionPoint) - assert pre_hook_ip.target_node_name == nncf_node.node_name - - def test_operator_metatype_marking(self): - from nncf.torch.graph.operator_metatypes import PTAddMetatype - from nncf.torch.graph.operator_metatypes import PTAvgPool2dMetatype - from nncf.torch.graph.operator_metatypes import PTBatchNormMetatype - from nncf.torch.graph.operator_metatypes import PTConvTranspose2dMetatype - from nncf.torch.graph.operator_metatypes import PTDepthwiseConv2dSubtype - from nncf.torch.graph.operator_metatypes import PTLinearMetatype - from nncf.torch.graph.operator_metatypes import PTMaxPool2dMetatype - from nncf.torch.graph.operator_metatypes import PTModuleBatchNormMetatype - from nncf.torch.graph.operator_metatypes import PTModuleConvTranspose2dMetatype - from nncf.torch.graph.operator_metatypes import PTModuleLinearMetatype - from nncf.torch.graph.operator_metatypes import PTRELUMetatype - from nncf.torch.graph.operator_metatypes import PTTransposeMetatype - - ref_scope_vs_metatype_dict = { - "/" + MODEL_INPUT_OP_NAME + "_0": PTInputNoopMetatype, - "ModelForMetatypeTesting/NNCFConv2d[conv_regular]/conv2d_0": PTModuleConv2dMetatype, - "ModelForMetatypeTesting/NNCFBatchNorm2d[bn]/batch_norm_0": PTModuleBatchNormMetatype, - "ModelForMetatypeTesting/batch_norm_0": PTBatchNormMetatype, - "ModelForMetatypeTesting/relu_0": PTRELUMetatype, - "ModelForMetatypeTesting/transpose__0": PTTransposeMetatype, - "ModelForMetatypeTesting/MaxPool2d[max_pool2d]/max_pool2d_0": PTMaxPool2dMetatype, - "ModelForMetatypeTesting/NNCFConvTranspose2d[conv_transpose]/conv_transpose2d_0": ( - PTModuleConvTranspose2dMetatype - ), - "ModelForMetatypeTesting/conv_transpose2d_0": PTConvTranspose2dMetatype, - "ModelForMetatypeTesting/__add___0": PTAddMetatype, - "ModelForMetatypeTesting/NNCFConv2d[conv_depthwise]/conv2d_0": PTDepthwiseConv2dSubtype, - "ModelForMetatypeTesting/conv2d_0": PTConv2dMetatype, - "ModelForMetatypeTesting/__iadd___0": PTAddMetatype, - "ModelForMetatypeTesting/AdaptiveAvgPool2d[adaptive_avg_pool]/adaptive_avg_pool2d_0": PTAvgPool2dMetatype, - "ModelForMetatypeTesting/flatten_0": PTReshapeMetatype, - "ModelForMetatypeTesting/NNCFLinear[linear]/linear_0": PTModuleLinearMetatype, - "ModelForMetatypeTesting/linear_0": PTLinearMetatype, - "/" + MODEL_OUTPUT_OP_NAME + "_0": PTOutputNoopMetatype, - } - - class ModelForMetatypeTesting(torch.nn.Module): - def __init__(self): - super().__init__() - self.conv_regular = torch.nn.Conv2d(in_channels=3, out_channels=16, kernel_size=3) - self.bn = torch.nn.BatchNorm2d(num_features=16) - self.max_pool2d = torch.nn.MaxPool2d(kernel_size=2) - self.conv_transpose = torch.nn.ConvTranspose2d(in_channels=16, out_channels=8, kernel_size=3) - self.conv_depthwise = torch.nn.Conv2d(in_channels=8, out_channels=8, kernel_size=5, groups=8) - self.adaptive_avg_pool = torch.nn.AdaptiveAvgPool2d(output_size=1) - self.linear = torch.nn.Linear(in_features=8, out_features=8) - - def forward(self, input_): - x = self.conv_regular(input_) - x = self.bn(x) - x = F.batch_norm(x, self.bn.running_mean, self.bn.running_var) - x = F.relu(x) - x.transpose_(2, 3) - x = self.max_pool2d(x) - y = self.conv_transpose(x) - z = F.conv_transpose2d(x, self.conv_transpose.weight) - x = y + z - x = self.conv_depthwise(x) - x = F.conv2d(x, self.conv_depthwise.weight, groups=self.conv_depthwise.groups) - x += torch.ones_like(x) - x = self.adaptive_avg_pool(x) - x = self.linear(x.flatten()) - x = F.linear(x, self.linear.weight) - return x - - model = ModelForMetatypeTesting() - nncf_network = NNCFNetwork(model, [ModelInputInfo([1, 3, 300, 300])]) - nncf_graph = nncf_network.nncf.get_original_graph() - - for nncf_node in nncf_graph.get_all_nodes(): # type: NNCFNode - assert nncf_node.node_name in ref_scope_vs_metatype_dict - ref_metatype = ref_scope_vs_metatype_dict[nncf_node.node_name] - assert nncf_node.metatype == ref_metatype - - @pytest.mark.parametrize( - ("mock_graph_factory", "dot_file_name"), MERGE_PATTERN_TEST_CASES, ids=[x[1] for x in MERGE_PATTERN_TEST_CASES] - ) - def test_get_ip_graph_with_merged_operations(self, mock_graph_factory, dot_file_name): - mock_graph = mock_graph_factory() - ip_graph = get_ip_graph_for_test(mock_graph) - pattern = PatternsManager.get_full_hw_pattern_graph(backend=BackendType.TORCH, device=TargetDevice.ANY) - merged_ip_graph = ip_graph.get_ip_graph_with_merged_hw_optimized_operations(pattern) - - data_dir = TEST_ROOT / "torch/data/reference_graphs/pattern_merging" # type: Path - - path_to_dot_file = data_dir / "{}.dot".format(dot_file_name) - - if os.getenv("NNCF_TEST_REGEN_DOT") is not None: - if not os.path.exists(str(data_dir)): - os.makedirs(str(data_dir)) - graph_without_data = get_graph_without_data(merged_ip_graph) - write_dot_graph(graph_without_data, str(path_to_dot_file)) - - load_graph = read_dot_graph(str(path_to_dot_file)) - - for key in load_graph.nodes.keys(): - key.replace(r"\\n", r"\n") # Somehow pydot mangles the \n characters while writing a .dot file - - sanitized_loaded_keys = [key.replace("\\n", "\n") for key in load_graph.nodes.keys()] - sanitized_loaded_edges = [ - (u.replace("\\n", "\n"), v.replace("\\n", "\n")) for u, v in nx.DiGraph(load_graph).edges - ] - - assert Counter(sanitized_loaded_keys) == Counter(list(merged_ip_graph.nodes.keys())) - assert Counter(sanitized_loaded_edges) == Counter(list(merged_ip_graph.edges)) + assert new_node.attributes is nodes_in_scope[0].attributes def test_can_collect_scopes_of_train_only_modules(): @@ -826,6 +403,30 @@ def test_replacing_forward_with_another_own_method(_nncf_caplog): assert "set_original_unbound_forward" in _nncf_caplog.text +def test_replacing_forward_of_original_model(): + def decorator(func): + def wrap(*args): + return func(*args) + + return wrap + + model = BasicConvTestModel() + model.forward = decorator(model.forward) + + fn_id = id(model.__dict__["forward"]) + fn_sign = inspect.signature(model.forward) + # type of current + assert isinstance(model.__dict__["forward"], type(decorator)) + + nncf_net = NNCFNetwork(model, [ModelInputInfo(model.INPUT_SIZE)]) + nncf_net.forward(torch.ones(model.INPUT_SIZE)) + + # Check that forward was updated + assert fn_id != id(nncf_net.__dict__["forward"]) + assert fn_sign == inspect.signature(nncf_net.forward) + assert isinstance(nncf_net.forward, functools.partial) + + def test_temporary_clean_view(): model = TwoConvTestModelWithUserModule() config = get_basic_sparsity_plus_quantization_config() @@ -982,13 +583,6 @@ def test_works_when_wrapped_with_dataparallel(simple_net): dp_model(torch.zeros([10, *simple_net.INPUT_SIZE[1:]], device="cuda")) -def test_warns_on_old_style_calls(simple_net): - with pytest.warns(NNCFDeprecationWarning): - simple_net.get_graph() - with pytest.warns(NNCFDeprecationWarning): - simple_net.get_nncf_wrapped_model() - - def test_class_has_same_name_and_module_as_original(simple_net): assert simple_net.__class__.__name__ == SimplestModel.__name__ assert simple_net.__class__.__module__ == SimplestModel.__module__ @@ -1055,10 +649,10 @@ def test_reset_original_unbound_forward(): inp = torch.ones((1,)) assert nncf_network.forward(inp) == inp - nncf_network.set_original_unbound_forward(model.__class__.other_forward) + nncf_network.nncf.set_original_unbound_forward(model.__class__.other_forward) assert nncf_network.forward(inp) == inp * 2 - nncf_network.reset_original_unbound_forward() + nncf_network.nncf.reset_original_unbound_forward() assert nncf_network.forward(inp) == inp @@ -1126,3 +720,35 @@ def test_safety_change_scope_in_get_nncf_modules(): orig_id = id(list(nncf_net.nncf._nncf_replaced_modules.values())[0][0]) return_id = id(list(nncf_net.nncf.get_nncf_modules().values())[0]) assert orig_id != return_id + + +class EmbeddingWithSharedWeights(torch.nn.Embedding): + def forward(self, x, run_as_matmul=False): # pylint: disable=arguments-renamed + if run_as_matmul: + return F.linear(x, self.weight) + return super().forward(x) + + +class ShortTransformer(torch.nn.Module): + def __init__(self, in_features, num_embeddings): + super().__init__() + self.wte = EmbeddingWithSharedWeights(num_embeddings, in_features) + self.linear = torch.nn.Linear(in_features, in_features) + + def forward(self, input_ids): + x = self.wte(input_ids) + x = self.linear(x) + res = self.wte(x, True) + return res + + +def test_proxy_module_for_forward_with_super(mocker): + num_embeddings = 10 + dim = 10 + model = ShortTransformer(dim, num_embeddings) + + register_module(ignored_algorithms=[])(type(model.wte)) + wrapped_model, _ = replace_modules_by_nncf_modules(model) + + input_ids = torch.randint(num_embeddings, (1, 4)) + wrapped_model(input_ids) diff --git a/tests/torch/test_onnx_export.py b/tests/torch/test_onnx_export.py index e52d07bdd10..f5b321e3da3 100644 --- a/tests/torch/test_onnx_export.py +++ b/tests/torch/test_onnx_export.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from typing import Any import onnx diff --git a/tests/torch/test_pattern_manager.py b/tests/torch/test_pattern_manager.py index 59c8848e934..c44ad738080 100644 --- a/tests/torch/test_pattern_manager.py +++ b/tests/torch/test_pattern_manager.py @@ -8,7 +8,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + from nncf.common.graph.patterns import HWFusedPatternNames +from nncf.common.graph.patterns import IgnoredPatternNames from nncf.common.utils.backend import BackendType from tests.shared.patterns import check_hw_patterns from tests.shared.patterns import check_ignored_patterns @@ -16,18 +18,12 @@ IGNORING_HW_PATTERN_REASONS = { HWFusedPatternNames.ADD_SCALE_SHIFT_OUTPUT: "Not relevant for Torch.", HWFusedPatternNames.BATCH_INDEX: "Not relevant for Torch.", - HWFusedPatternNames.EQUAL_LOGICALNOT: "Not relevant for Torch.", - HWFusedPatternNames.FC_BN_HSWISH_ACTIVATION: "Not relevant for Torch.", HWFusedPatternNames.LINEAR_WITH_BIAS: "Not relevant for Torch.", HWFusedPatternNames.MVN_SCALE_SHIFT: "Not relevant for Torch.", HWFusedPatternNames.NORMALIZE_L2_MULTIPLY: "Not relevant for Torch.", HWFusedPatternNames.SCALE_SHIFT: "Not relevant for Torch.", HWFusedPatternNames.SE_BLOCK: "Not relevant for Torch.", HWFusedPatternNames.SOFTMAX_DIV: "Not relevant for Torch.", - HWFusedPatternNames.SOFTMAX_RESHAPE_MATMUL: "Not relevant for Torch.", - HWFusedPatternNames.SOFTMAX_RESHAPE_TRANSPOSE_GATHER_MATMUL: "Not relevant for Torch.", - HWFusedPatternNames.SOFTMAX_RESHAPE_TRANSPOSE_MATMUL: "Not relevant for Torch.", - HWFusedPatternNames.STABLE_DIFFUSION: "Not relevant for Torch.", HWFusedPatternNames.HSWISH_ACTIVATION: "Not relevant for Torch.", HWFusedPatternNames.HSWISH_ACTIVATION_V2: "Not relevant for Torch.", HWFusedPatternNames.HSWISH_ACTIVATION_WITHOUT_DENOMINATOR: "Not relevant for Torch.", @@ -42,7 +38,6 @@ HWFusedPatternNames.INPUT_REVERSE_ADD: "Not relevant for Torch.", HWFusedPatternNames.INPUT_REVERSE_SCALE_SHIFT: "Not relevant for Torch.", HWFusedPatternNames.INPUT_SCALE_SHIFT: "Not relevant for Torch.", - HWFusedPatternNames.INPUT_SHIFT_SCALE: "Not relevant for Torch.", HWFusedPatternNames.INPUT_TRANSPOSE_PROCESSING: "Not relevant for Torch.", HWFusedPatternNames.INPUT_TRANSPOSE_REVERSE_ADD: "Not relevant for Torch.", HWFusedPatternNames.INPUT_TRANSPOSE_SCALE_SHIFT: "Not relevant for Torch.", @@ -53,6 +48,7 @@ HWFusedPatternNames.BATCH_NORM_SCALE_SHIFT_ACTIVATIONS: "Not relevant for Torch.", HWFusedPatternNames.LINEAR_ACTIVATIONS_SCALE_SHIFT: "Not relevant for Torch.", HWFusedPatternNames.LINEAR_ARITHMETIC_ACTIVATIONS: "Not relevant for Torch.", + HWFusedPatternNames.LINEAR_ARITHMETIC_ACTIVATIONS_ARITHMETIC: "Not relevant for Torch.", HWFusedPatternNames.LINEAR_BATCH_NORM_SCALE_SHIFT_ACTIVATIONS: "Not relevant for Torch.", HWFusedPatternNames.LINEAR_SCALE_SHIFT_ACTIVATIONS: "Not relevant for Torch.", HWFusedPatternNames.SCALE_SHIFT_ACTIVATIONS: "Not relevant for Torch.", @@ -67,9 +63,13 @@ HWFusedPatternNames.LINEAR_BIASED_ACTIVATION_ELEMENTWISE: "Not relevant for Torch.", HWFusedPatternNames.MVN_SCALE_SHIFT_ACTIVATIONS: "Not relevant for Torch.", HWFusedPatternNames.LINEAR_SQUEEZE_ACTIVATIONS: "Not relevant for Torch.", + HWFusedPatternNames.LINEAR_ACTIVATIONS_UNSQUEEZE_BN_SQUEEZE: "Not relevant for Torch.", } -IGNORING_IGNORED_PATTERN_REASONS = {} +IGNORING_IGNORED_PATTERN_REASONS = { + IgnoredPatternNames.FC_BN_HSWISH_ACTIVATION: "Not relevant for Torch.", + IgnoredPatternNames.EQUAL_LOGICALNOT: "Not relevant for Torch.", +} def test_pattern_manager(): diff --git a/tests/torch/test_pytorch_patch.py b/tests/torch/test_pytorch_patch.py index 9e2d0ed24b1..7564a5e0bf1 100644 --- a/tests/torch/test_pytorch_patch.py +++ b/tests/torch/test_pytorch_patch.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import inspect import torch @@ -14,6 +25,7 @@ from tests.torch.helpers import create_compressed_model_and_algo_for_test from tests.torch.helpers import register_bn_adaptation_init_args from tests.torch.pytorch_patch_isolated import test_jit_if_tracing_script_source_equals +from tests.torch.pytorch_patch_isolated import test_jit_script_exception_preserves_patching_isolated def test_get_all_aliases_is_valid(): @@ -74,6 +86,11 @@ def test_jit_if_tracing_script_source(): run_pytest_case_function_in_separate_process(test_jit_if_tracing_script_source_equals) +def test_jit_script_exception_preserves_patching(): + # Run test case in a separate process to track patching of torch by NNCF + run_pytest_case_function_in_separate_process(test_jit_script_exception_preserves_patching_isolated) + + def test_jit_script_signature(): # Check that torch.jit.script has the same signature as the wrapper was designed for signature = inspect.signature(_ORIG_JIT_SCRIPT) diff --git a/tests/torch/test_sanity_sample.py b/tests/torch/test_sanity_sample.py index e7b07dca91e..e7df2c2f1ab 100644 --- a/tests/torch/test_sanity_sample.py +++ b/tests/torch/test_sanity_sample.py @@ -18,7 +18,7 @@ import pytest import torch import torchvision -from pkg_resources import parse_version +from packaging import version from pytest_dependency import depends from examples.common.sample_config import EVAL_ONLY_ERROR_TEXT @@ -185,7 +185,7 @@ def fixture_case_common_dirs(tmp_path_factory): @pytest.mark.parametrize(" multiprocessing_distributed", (True, False), ids=["distributed", "dataparallel"]) def test_pretrained_model_eval(config, tmp_path, multiprocessing_distributed, case_common_dirs): - if parse_version(torchvision.__version__) < parse_version("0.13") and "voc" in str(config["dataset_path"]): + if version.parse(torchvision.__version__) < version.parse("0.13") and "voc" in str(config["dataset_path"]): pytest.skip( f"Test calls sample that uses `datasets.VOCDetection.parse_voc_xml` function from latest " f"torchvision.\nThe signature of the function is not compatible with the corresponding signature " @@ -269,7 +269,7 @@ def depends_on_pretrained_train(request, test_case_id: str, current_multiprocess @pytest.mark.dependency() @pytest.mark.parametrize("multiprocessing_distributed", [True, False], ids=["distributed", "dataparallel"]) def test_trained_model_eval(request, config, tmp_path, multiprocessing_distributed, case_common_dirs): - if parse_version(torchvision.__version__) < parse_version("0.13") and "voc" in str(config["dataset_path"]): + if version.parse(torchvision.__version__) < version.parse("0.13") and "voc" in str(config["dataset_path"]): pytest.skip( f"Test calls sample that uses `datasets.VOCDetection.parse_voc_xml` function from latest " f"torchvision.\nThe signature of the function is not compatible with the corresponding signature " diff --git a/tests/torch/test_sanity_third_party.py b/tests/torch/test_sanity_third_party.py index b829ec539a7..a18a38129bc 100644 --- a/tests/torch/test_sanity_third_party.py +++ b/tests/torch/test_sanity_third_party.py @@ -78,7 +78,7 @@ def __init__(self, venv_path, repo_path): ) ) - def install_env(self, pip_cache_dir, torch_with_cuda11): + def install_env(self, pip_cache_dir): version_string = "{}.{}".format(sys.version_info[0], sys.version_info[1]) subprocess.call("virtualenv -ppython{} {}".format(version_string, self.VENV_PATH), shell=True) pip_runner = CachedPipRunner(self.VENV_ACTIVATE, pip_cache_dir) @@ -87,8 +87,6 @@ def install_env(self, pip_cache_dir, torch_with_cuda11): pip_runner.run_pip("install setuptools") pip_runner.run_pip("install onnx") torch_install_cmd = "install torch=={}".format(BKC_TORCH_VERSION) - if torch_with_cuda11: - pip_runner.run_pip(torch_install_cmd + "+cu116 --extra-index-url https://download.pytorch.org/whl/cu116") pip_runner.run_pip(torch_install_cmd) subprocess.run( "git clone https://github.com/huggingface/transformers {}".format(self.TRANSFORMERS_REPO_PATH), @@ -121,8 +119,8 @@ def setup(self, temp_folder): self.env = TransformersVirtualEnvInstaller(temp_folder["venv"], temp_folder["repo"]) @pytest.mark.dependency(name="install_trans") - def test_install_trans_(self, pip_cache_dir, torch_with_cuda11): - self.env.install_env(pip_cache_dir, torch_with_cuda11) + def test_install_trans_(self, pip_cache_dir): + self.env.install_env(pip_cache_dir) @pytest.mark.dependency(depends=["install_trans"], name="xnli_train") def test_xnli_train(self, temp_folder): diff --git a/tests/torch/test_sota_checkpoints.py b/tests/torch/test_sota_checkpoints.py index 2d29928e77f..d67a3cfd981 100644 --- a/tests/torch/test_sota_checkpoints.py +++ b/tests/torch/test_sota_checkpoints.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import csv import datetime import json diff --git a/tests/torch/test_statistics_aggregator.py b/tests/torch/test_statistics_aggregator.py index f9805f8a3c0..4a4b8f48914 100644 --- a/tests/torch/test_statistics_aggregator.py +++ b/tests/torch/test_statistics_aggregator.py @@ -19,6 +19,7 @@ from nncf import Dataset from nncf.common.graph.transformations.commands import TargetType from nncf.experimental.common.tensor_statistics.collectors import TensorReducerBase +from nncf.quantization.algorithms.fast_bias_correction.torch_backend import PTFastBiasCorrectionAlgoBackend from nncf.quantization.algorithms.min_max.torch_backend import PTMinMaxAlgoBackend from nncf.torch.graph.graph import PTTargetPoint from nncf.torch.statistics.aggregator import PTStatisticsAggregator @@ -49,10 +50,10 @@ def get_min_max_algo_backend_cls(self) -> Type[PTMinMaxAlgoBackend]: return PTMinMaxAlgoBackend def get_bias_correction_algo_backend_cls(self) -> None: - return None + pytest.skip("PTBiasCorrectionAlgoBackend is not implemented") - def get_fast_bias_correction_algo_backend_cls(self) -> None: - return None + def get_fast_bias_correction_algo_backend_cls(self) -> Type[PTFastBiasCorrectionAlgoBackend]: + return PTFastBiasCorrectionAlgoBackend def get_backend_model(self, dataset_samples): sample = dataset_samples[0].reshape(INPUT_SHAPE[1:]) @@ -110,7 +111,7 @@ def inplace_statistics(self, request) -> bool: return request.param @pytest.mark.skip("Merging is not implemented yet") - def test_statistics_merging_simple(self, dataset_samples, inplace_statistics): + def test_statistics_merging_simple(self, dataset_samples, inplace_statistics, statistic_point_params): pass @pytest.mark.skip("Merging is not implemented yet") @@ -120,9 +121,3 @@ def test_statistic_merging(self, dataset_samples, inplace_statistics): @pytest.mark.skip("Merging is not implemented yet") def test_same_collectors_different_attrs_dont_merge(self, statistics_type, test_params, dataset_samples): pass - - @pytest.mark.skip("Bias correction and Fast bias correction is not implemented yet") - def test_statistics_aggregator_bias_correction( - self, dataset_samples, test_params, inplace_statistics, is_stat_in_shape_of_scale - ): - pass diff --git a/tests/torch/test_tensor.py b/tests/torch/test_tensor.py new file mode 100644 index 00000000000..eb4d907022b --- /dev/null +++ b/tests/torch/test_tensor.py @@ -0,0 +1,34 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import pytest +import torch + +from nncf.experimental.tensor import Tensor +from nncf.experimental.tensor.enums import TensorDeviceType +from tests.shared.test_templates.template_test_nncf_tensor import TemplateTestNNCFTensorOperators + + +class TestPTNNCFTensorOperators(TemplateTestNNCFTensorOperators): + @staticmethod + def to_tensor(x): + return torch.tensor(x) + + +class TestCudaPTNNCFTensorOperators(TemplateTestNNCFTensorOperators): + @staticmethod + def to_tensor(x): + if not torch.cuda.is_available(): + pytest.skip("Skipping for CPU-only setups") + return torch.tensor(x).cuda() + + def test_device(self): + tensor = Tensor(self.to_tensor([1])) + assert tensor.device == TensorDeviceType.GPU diff --git a/tests/torch/test_tracing_context.py b/tests/torch/test_tracing_context.py index 30c45858888..caa6def6252 100644 --- a/tests/torch/test_tracing_context.py +++ b/tests/torch/test_tracing_context.py @@ -10,14 +10,14 @@ # limitations under the License. import pytest import torch -from pkg_resources import parse_version +from packaging import version from nncf.torch.dynamic_graph.context import TracingContext from nncf.torch.dynamic_graph.trace_tensor import TracedTensor @pytest.mark.skipif( - parse_version(torch.__version__) < parse_version("1.11"), + version.parse(torch.__version__) < version.parse("1.11"), reason="__getitem__ works unexpectedly for TracedTensor until fix in torch 1.11.\n" "Fix in pytorch: https://github.com/pytorch/pytorch/pull/67202\n" "Related ticket: 82065", diff --git a/tests/torch/test_transform_fn.py b/tests/torch/test_transform_fn.py new file mode 100644 index 00000000000..d0b44c8dce6 --- /dev/null +++ b/tests/torch/test_transform_fn.py @@ -0,0 +1,66 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch +from torch import nn + +import nncf +from tests.torch.test_models.alexnet import AlexNet as ModelWithSingleInput + + +class ModelWithMultipleInputs(nn.Module): + def __init__(self): + super().__init__() + self._conv2d_0 = nn.Conv2d(3, 64, kernel_size=3, stride=2, padding=1) + self._conv2d_1 = nn.Conv2d(3, 64, kernel_size=3, stride=2, padding=1) + + def forward(self, input_0, input_1): + output_0 = self._conv2d_0(input_0) + output_1 = self._conv2d_1(input_1) + return output_0 + output_1 + + +dataset = [ + [ + torch.zeros((3, 32, 32), dtype=torch.float32), + torch.ones((3, 32, 32), dtype=torch.float32), + ] +] + +dataloader = torch.utils.data.DataLoader(dataset, batch_size=1) + + +def single_input_transform_fn(data_item): + return data_item[0] + + +def multiple_inputs_transform_fn(data_item): + return data_item[0], data_item[1] + + +def test_transform_fn_single_input(): + model = ModelWithSingleInput() + + # Check the transformation function + _ = model(single_input_transform_fn(next(iter(dataloader)))) + # Start quantization + calibration_dataset = nncf.Dataset(dataloader, single_input_transform_fn) + _ = nncf.quantize(model, calibration_dataset) + + +def test_transform_fn_multiple_inputs(): + model = ModelWithMultipleInputs() + + # Check the transformation function + _ = model(*multiple_inputs_transform_fn(next(iter(dataloader)))) + # Start quantization + calibration_dataset = nncf.Dataset(dataloader, multiple_inputs_transform_fn) + _ = nncf.quantize(model, calibration_dataset) diff --git a/tests/torch/test_utils.py b/tests/torch/test_utils.py index 34d73faefb0..00d6cecb62a 100644 --- a/tests/torch/test_utils.py +++ b/tests/torch/test_utils.py @@ -1,3 +1,14 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import pytest import torch from torch import nn diff --git a/third_party_integration/huggingface_transformers/README.md b/third_party_integration/huggingface_transformers/README.md index f7dfcf691e4..80ce12b370b 100644 --- a/third_party_integration/huggingface_transformers/README.md +++ b/third_party_integration/huggingface_transformers/README.md @@ -1,9 +1,11 @@ # Integrating NNCF into Transformers + https://github.com/huggingface/transformers -This folder contains a git patch to enable NNCF-based quantization for XNLI, SQuAD and GLUE training pipelines of the huggingface transformers repository. +This folder contains a git patch to enable NNCF-based quantization for XNLI, SQuAD and GLUE training pipelines of the huggingface transformers repository. Instructions: + 1. Apply the `0001-Modifications-for-NNCF-usage.patch` file to the huggingface transformers repository checked out at commit id: `bd469c40659ce76c81f69c7726759d249b4aef49` 2. Install the `transformers` library and the example scripts from the patched repository as described in the documentation for the huggingface transformers repository. @@ -12,13 +14,11 @@ Instructions: The NNCF configs to be used in this way are also provided in the same patch on a per-model, per-compression algorithm basis. Distributed multiprocessing is also supported, simply use the corresponding version of the command line in the huggingface transformers repository with the same additional `--nncf_config` parameter. - - 4. While running with the `--nncf_config` option, the training scripts will output NNCF-wrapped model checkpoints instead of the regular ones. You may evaluate these checkpoints using the same command lines for training above, but with the`--do_train` key omitted. In order to export these checkpoints into ONNX format, further add `--to_onnx ` to your evaluation command line parameters. See exact command lines for each case in the model notes below. -Note that in all cases the training hyperparameters might have to be adjusted to accomodate the hardware you have available. +Note that in all cases the training hyperparameters might have to be adjusted to accommodate the hardware you have available. -## Current best results: +## Current best results All models use as their baselines the checkpoints obtained with the scripts and command line parameters from the corresponding sections in the original repository documentation. While fine-tuning the quantized model, the hyperparameters were left unchanged, i.e. the difference in the training script invocation was limited to adding `--nncf_config` option and specifying the pre-trained baseline model as the starting point for quantization fine-tuning. For RoBERTa-MNLI, no baseline model finetuning was necessary since the `roberta-large-mnli` model pretrained on MNLI was already available for download. @@ -38,7 +38,6 @@ _INT8 model (symmetric weights, asymmetric activations quantization)_ - 77.22% a `python examples/pytorch/text-classification/run_xnli.py --model_name_or_path bert_xnli_int8 --language zh --train_language zh --do_eval --per_gpu_eval_batch_size 1 --max_seq_length 128 --output_dir bert_xnli_int8 --nncf_config nncf_bert_config_xnli.json --to_onnx bert_xnli_int8.onnx` - ### BERT-SQuAD v1.1 _Full-precision FP32 baseline model_ - bert-large-uncased-whole-word-masking model, trained on SQuAD v1.1 - 93.21% F1, 87.2% EM on the dev set, @@ -59,7 +58,6 @@ _INT8 model (symmetric quantization) + Knowledge Distillation_ - 92.89% F1, 86.6 `python examples/pytorch/question-answering/run_qa.py --model_name_or_path bert_squad_int8 --do_eval --dataset_name squad --max_seq_length 384 --doc_stride 128 --output_dir bert_squad_int8 --per_gpu_eval_batch_size=1 --nncf_config nncf_bert_config_squad.json --to_onnx bert_squad_int8.onnx` - ### BERT-CoNLL2003 _Full-precision FP32 baseline model_ - bert-base-cased model, trained on CoNLL2003 - 99.17% acc, 95.03% F1 @@ -70,12 +68,10 @@ _INT8 model (symmetric quantization)_ - 99.18% acc, 95.31% F1 `python examples/pytorch/token-classification/run_ner.py --model_name_or_path *path_to_fp32_finetuned_model* --dataset_name conll2003 --output_dir bert_base_cased_conll_int8 --do_train --do_eval --save_strategy epoch --evaluation_strategy epoch --nncf_config nncf_bert_config_conll.json` - **Fine-tuned INT8 model evaluation and ONNX export command line:** `python examples/pytorch/token-classification/run_ner.py --model_name_or_path bert_base_cased_conll_int8 --dataset_name conll2003 --output_dir bert_base_cased_conll_int8 --do_eval --nncf_config nncf_bert_config_squad.json --to_onnx bert_base_cased_conll_int8.onnx` - ### BERT-MRPC _Full-precision FP32 baseline model_ - bert-base-cased-finetuned-mrpc, 84.56% acc @@ -100,12 +96,10 @@ _INT8 model (asymmetrically quantized)_ - 89.25% accuracy (matched), 88.9% accur `python examples/pytorch/text-classification/run_glue.py --model_name_or_path roberta-large-mnli --task_name mnli --do_train --do_eval --per_gpu_train_batch_size 24 --per_gpu_eval_batch_size 1 --learning_rate 2e-5 --num_train_epochs 3.0 --max_seq_length 128 --output_dir roberta_mnli_int8 --save_steps 400 --nncf_config nncf_roberta_config_mnli.json` - **Fine-tuned INT8 model evaluation and ONNX export command line:** `python examples/pytorch/text-classification/run_glue.py --model_name_or_path roberta_mnli_int8 --task_name mnli --do_eval --learning_rate 2e-5 --num_train_epochs 3.0 --max_seq_length 128 --per_gpu_eval_batch_size 1 --output_dir roberta_mnli_int8 --save_steps 400 --nncf_config nncf_roberta_config_mnli.json --to_onnx roberta_mnli_int8.onnx` - ### DistilBERT-SST-2 _Full-precision FP32 baseline model_ - distilbert-base-uncased-finetuned-sst-2-english, pre-trained on SST-2 - 91.1% accuracy @@ -116,12 +110,10 @@ _INT8 model (symmetrically quantized)_ - 90.94% accuracy `python examples/pytorch/text-classification/run_glue.py --model_name_or_path distilbert-base-uncased-finetuned-sst-2-english --task_name sst2 --do_train --do_eval --per_gpu_train_batch_size 16 --per_gpu_eval_batch_size 1 --learning_rate 5e-5 --num_train_epochs 3.0 --max_seq_length 128 --output_dir distilbert_sst2_int8 --save_steps 100000 --nncf_config nncf_distilbert_config_sst2.json` - **Fine-tuned INT8 model evaluation and ONNX export command line:** `python examples/pytorch/text-classification/run_glue.py --model_name_or_path distilbert_sst2_int8 --task_name sst2 --do_eval --per_gpu_eval_batch_size 1 --max_seq_length 128 --output_dir distilbert_sst2_int8 --save_steps 100000 --nncf_config nncf_distilbert_config_sst2.json --to_onnx distilbert_sst2_int8.onnx` - ### MobileBERT-SQuAD v1.1 _Full-precision FP32 baseline model_ - google/mobilebert-uncased, trained on SQuAD v1.1 - 89.98% F1, 82.61% EM on the dev set, @@ -142,7 +134,6 @@ _Full-precision FP32 baseline model_ - 19.73 perplexity on the test set _INT8 model (symmetric quantization)_ - 20.9 perplexity on the test set - **INT8 model quantization-aware training command line (trained on 1x Tesla V100):** `python examples/pytorch/language-modeling/run_clm.py --model_name_or_path --do_train --do_eval --dataset_name wikitext --num_train_epochs 3 --output_dir gpt2_wikitext2_int8 --per_gpu_eval_batch_size=1 --per_gpu_train_batch_size=4 --save_steps=591 --nncf_config nncf_gpt2_config_wikitext_hw_config.json` @@ -150,4 +141,3 @@ _INT8 model (symmetric quantization)_ - 20.9 perplexity on the test set **Fine-tuned INT8 model evaluation and ONNX export command line:** `python examples/pytorch/language-modeling/run_clm.py --model_name_or_path gpt2_wikitext2_int8 --do_eval --dataset_name wikitext --output_dir gpt2_wikitext2_int8 --per_gpu_eval_batch_size=1 --nncf_config nncf_gpt2_config_wikitext_hw_config.json --to_onnx gpt2_wikitext2_int8.onnx` - diff --git a/tools/benchmark.py b/tools/benchmark.py index 5d3bafdff9b..2717fb881df 100644 --- a/tools/benchmark.py +++ b/tools/benchmark.py @@ -1,5 +1,17 @@ +# Copyright (c) 2023 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import math import time +from typing import Dict, List import torch import torch.distributed as dist @@ -15,7 +27,7 @@ def warmup(layer, input_, runs, forward_only=False): new_i[0].sum().backward() -def run_wall(layer, input_size_, device, runs, is_print=True, dtype=torch.float, output=None): +def run_wall(layer, input_size_, device, runs, is_print=True, dtype=torch.float) -> Dict[str, float]: input_ = torch.randn(input_size_, device=torch.device(device), dtype=dtype) # Force CUDA initialization & warm up @@ -35,17 +47,15 @@ def run_wall(layer, input_size_, device, runs, is_print=True, dtype=torch.float, if is_print: print("Forward&Backward: {0:.3f} {1}".format(fbtime, ctime)) - if output is not None: - output.append({"forward + backward": fbtime}) + return {"forward + backward": fbtime} -def run_profile(layer, input_size_, device, runs, forward_only=False, dtype=torch.float, output=None): +def run_profile(layer, input_size_, device, runs, forward_only=False, dtype=torch.float) -> Dict[str, float]: input_ = torch.randn(input_size_, device=torch.device(device), dtype=dtype) # Force CUDA initialization & warm up warmup(layer, input_, 100, forward_only) - start = time.time() forward_min = math.inf forward_time = 0 backward_min = math.inf @@ -82,18 +92,15 @@ def run_profile(layer, input_size_, device, runs, forward_only=False, dtype=torc ) ) - if output is not None: - output.append( - { - "forward_min": forward_min, - "forward_avg": forward_average, - "backward_min": backward_min, - "backward_avg": backward_average, - } - ) + return { + "forward_min": forward_min, + "forward_avg": forward_average, + "backward_min": backward_min, + "backward_avg": backward_average, + } -def run_worker(gpu, world_size, layer, input_size_, runs, dtype=torch.float, output=None): +def run_worker(gpu, world_size, layer, input_size_, runs, dtype=torch.float, output: List[Dict[str, int]] = None): dist.init_process_group(backend="nccl", init_method="tcp://127.0.0.1:8899", world_size=world_size, rank=gpu) device = torch.device("cuda:%d" % gpu) @@ -110,4 +117,6 @@ def run_worker(gpu, world_size, layer, input_size_, runs, dtype=torch.float, out run_model = layer.to(device) run_model = nn.parallel.DistributedDataParallel(run_model, device_ids=[gpu]) - run_wall(run_model, run_size, device, runs, (gpu == 0), dtype, output) + retval = run_wall(run_model, run_size, device, runs, (gpu == 0), dtype) + if output is not None: + output.append(retval) diff --git a/tools/benchmark_quantize_layers.py b/tools/benchmark_quantize_layers.py index fb72b6455ba..ec4f4e8a443 100644 --- a/tools/benchmark_quantize_layers.py +++ b/tools/benchmark_quantize_layers.py @@ -9,22 +9,26 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os +import sys +from dataclasses import asdict +from dataclasses import dataclass +from enum import Enum from itertools import product -from typing import Any, Optional, Tuple +from typing import Any, Dict, List, Optional, Tuple import pandas as pd import torch import torch.multiprocessing as mp -from torch import nn from tqdm import tqdm from nncf.common.quantization.structs import QuantizationMode from nncf.torch.quantization.layers import AsymmetricQuantizer +from nncf.torch.quantization.layers import BaseQuantizer from nncf.torch.quantization.layers import PTQuantizerSpec from nncf.torch.quantization.layers import SymmetricQuantizer from nncf.torch.quantization.layers import get_per_channel_scale_shape -from nncf.torch.utils import sum_like +from nncf.torch.quantization.reference import ReferenceBackendType +from nncf.torch.quantization.reference import ReferenceQuantize from tools.benchmark import run_profile from tools.benchmark import run_wall from tools.benchmark import run_worker @@ -38,56 +42,115 @@ HIGH_BATCH_INPUT_SIZE = [128, 96, 64, 64] -TEST_PLACES = ["weights", "activations"] -TEST_GRANULARITY = ["per_tensor", "per_channel"] -TEST_SYMMETRIC = [True, False] -TEST_DEVICES = [torch.device("cuda"), torch.device("cpu")] -TEST_BATCHES = [ - { - "mode": "low batch", - "input_size": LOW_BATCH_INPUT_SIZE, - "runs": {torch.device("cuda"): GPU_RUNS_LOW_BATCH, torch.device("cpu"): CPU_RUNS}, - }, - { - "mode": "high batch", - "input_size": HIGH_BATCH_INPUT_SIZE, - "runs": {torch.device("cuda"): GPU_RUNS_HIGH_BATCH, torch.device("cpu"): CPU_RUNS}, - }, +class BatchMode(Enum): + LOW = "low" + HIGH = "high" + + +class ExecutionType(Enum): + REGULAR = "regular" + DATA_PARALLEL = "data_parallel" + DISTRIBUTED_DATA_PARALLEL = "distributed_data_parallel" + + +class TimingMode(Enum): + KERNEL = "kernel" + WALL = "wall" + + +@dataclass +class BatchDescriptor: + mode: BatchMode + input_size: List[int] + num_runs: Dict[torch.device, int] + + +class TensorType(Enum): + WEIGHTS = "weights" + ACTIVATIONS = "activations" + + +class GranularityType(Enum): + PER_TENSOR = "per_tensor" + PER_CHANNEL = "per_channel" + + +TEST_TENSOR_TYPES: List[TensorType] = [TensorType.WEIGHTS, TensorType.ACTIVATIONS] +TEST_GRANULARITY: List[GranularityType] = [GranularityType.PER_TENSOR, GranularityType.PER_CHANNEL] +TEST_SYMMETRIC: List[bool] = [True, False] +TEST_DEVICES: List[torch.device] = [torch.device("cuda"), torch.device("cpu")] + +TEST_BATCHES: List[BatchDescriptor] = [ + BatchDescriptor( + mode=BatchMode.LOW, + input_size=LOW_BATCH_INPUT_SIZE, + num_runs={torch.device("cuda"): GPU_RUNS_LOW_BATCH, torch.device("cpu"): CPU_RUNS}, + ), + BatchDescriptor( + mode=BatchMode.HIGH, + input_size=HIGH_BATCH_INPUT_SIZE, + num_runs={torch.device("cuda"): GPU_RUNS_HIGH_BATCH, torch.device("cpu"): CPU_RUNS}, + ), ] -TEST_DTYPES = [torch.float, torch.half] -TEST_DISTR_MODE = ["SYNK", "DATAPARALLEL", "DATADISTRIBUTED"] -TEST_NARROW_RANGE = [False, True] -TEST_TIMING_MODE = ["KERNEL", "WALL"] -TEST_REFERENCE = [True, False] - -TEST_PARAMS_STRUCT = [ - { - "dtype": dtype, - "device": device, - "batch": batch, - "place": place, - "granularity": granularity, - "symmetric": symmetric, - "narrow_range": narrow_range, - "mode": distr_mode, - "timing": timing, - "ref": ref, - } - for dtype, device, distr_mode, place, granularity, symmetric, narrow_range, timing, ref, batch in product( +TEST_DTYPES: List[torch.dtype] = [torch.float, torch.half] +TEST_EXEC_TYPES: List[ExecutionType] = [ + ExecutionType.REGULAR, + ExecutionType.DISTRIBUTED_DATA_PARALLEL, + ExecutionType.DATA_PARALLEL, +] +TEST_NARROW_RANGE: List[bool] = [False, True] +TEST_TIMING_MODE: List[TimingMode] = [TimingMode.WALL, TimingMode.KERNEL] +TEST_REFERENCE: List[bool] = [False, True] + + +@dataclass +class ParamStruct: + dtype: torch.dtype + device: torch.device + exec_type: ExecutionType + batch: BatchDescriptor + tensor_type: TensorType + granularity: GranularityType + symmetric: bool + narrow_range: bool + timing_mode: TimingMode + ref: bool + + def to_dict(self) -> Dict: + dct = asdict(self) + dct.pop("batch") + dct["num_runs"] = self.batch.num_runs[self.device] + dct["input_size"] = self.batch.input_size + return dct + + +TEST_PARAM_STRUCTS: List[ParamStruct] = [ + ParamStruct( + dtype=dtype, + device=device, + exec_type=exec_type, + batch=batch, + tensor_type=tensor_type, + granularity=granularity, + symmetric=symmetric, + narrow_range=narrow_range, + timing_mode=timing, + ref=ref, + ) + for ref, timing, narrow_range, dtype, exec_type, batch, device, tensor_type, granularity, symmetric, in product( + TEST_REFERENCE, + TEST_TIMING_MODE, + TEST_NARROW_RANGE, TEST_DTYPES, + TEST_EXEC_TYPES, + TEST_BATCHES, TEST_DEVICES, - TEST_DISTR_MODE, - TEST_PLACES, + TEST_TENSOR_TYPES, TEST_GRANULARITY, TEST_SYMMETRIC, - TEST_NARROW_RANGE, - TEST_TIMING_MODE, - TEST_REFERENCE, - TEST_BATCHES, ) if not (device == torch.device("cpu") and dtype == torch.half) - and not (device == torch.device("cpu") and distr_mode != "SYNK") - and not (device == torch.device("cuda") and distr_mode != "SYNK" and batch["mode"] == "low_batch") + and not (device == torch.device("cpu") and exec_type == ExecutionType.DISTRIBUTED_DATA_PARALLEL) ] @@ -105,125 +168,63 @@ def __init__( super().__init__(num_bits, mode, signedness_to_force, narrow_range, half_range, scale_shape, logarithm_scale) -# reference impl -class ReferenceQuantizeSymmetric(torch.autograd.Function): - # pylint:disable=abstract-method - @staticmethod - def forward(ctx, input_, scale, bits): - level_high = scale.new_tensor([2 ** (bits - 1) - 1]) - level_low = scale.new_tensor([-(level_high + 1)]) - s = level_high / scale - - output = input_ * s - output = output.clamp(min=level_low[0], max=level_high[0]) - output = output.round() - output = output / s - - ctx.save_for_backward(input_, scale, output) - ctx.level_high = level_high - ctx.level_low = level_low - - return output - - @staticmethod - def backward(ctx: Any, *grad_outputs: Any) -> Any: - grad_output = grad_outputs[0] - input_, scale, output = ctx.saved_tensors - level_high = ctx.level_high - level_low = ctx.level_low - - alpha = float(level_low) / float(level_high) - mask_hi = (input_ > scale).type(input_.dtype) - mask_lo = (input_ < scale * alpha).type(input_.dtype) - mask_in = 1 - mask_hi - mask_lo - - val_grad_out = mask_hi + alpha * mask_lo - err = (output - input_) * scale.reciprocal() - grad_scale = grad_output * (err * mask_in + val_grad_out) - grad_scale = sum_like(grad_scale, scale) +RQ = ReferenceQuantize(backend_type=ReferenceBackendType.TORCH) - # calc gradient for input - grad_input = grad_output * mask_in - return grad_input, grad_scale, None +def get_module(params_struct: ParamStruct) -> BaseQuantizer: + input_shape = params_struct.batch.input_size + is_weights = params_struct.tensor_type == TensorType.WEIGHTS + scale_shape = [ + 1, + ] + if params_struct.granularity == GranularityType.PER_CHANNEL: + scale_shape = get_per_channel_scale_shape(input_shape, is_weights=is_weights) + specs = DefaultedPTQuantizerSpec(scale_shape=scale_shape, narrow_range=params_struct.narrow_range, num_bits=NBITS) -class ReferenceQuantize(nn.Module): - def __init__(self, num_bits=8, input_shape=None, is_weights=True, per_channel=False): - super().__init__() - self.input_shape = input_shape - self.is_weights = is_weights - scale_shape = [1] - if per_channel: - scale_shape = get_per_channel_scale_shape(self.input_shape, self.is_weights) + module_cls = SymmetricQuantizer if params_struct.symmetric else AsymmetricQuantizer + m = module_cls(specs) + m = m.to(params_struct.device) + if params_struct.dtype == torch.half: + m.half() - self.scale = nn.Parameter(torch.ones(scale_shape)) - self.num_bits = num_bits - self.level_high = 2 ** (self.num_bits - 1) - 1 - self.level_low = -(self.level_high + 1) - self.quantize = ReferenceQuantizeSymmetric.apply - - def get_scale(self): - return self.scale - - def forward(self, input_): - return self.quantize(input_, self.scale, self.num_bits) - - -def get_module(params, per_tensor_scale_shape): - input_shape = params["batch"]["input_size"] - is_weights = params["place"] == "weights" - - if params["ref"]: - module = ReferenceQuantize(NBITS, input_shape, is_weights, per_channel=params["granularity"] == "per_channel") - else: - scale_shape = per_tensor_scale_shape - if params["granularity"] == "per_channel": - scale_shape = get_per_channel_scale_shape(input_shape, is_weights=is_weights) - specs = DefaultedPTQuantizerSpec(scale_shape=scale_shape, narrow_range=params["narrow_range"], num_bits=NBITS) - - module_cls = SymmetricQuantizer if params["symmetric"] else AsymmetricQuantizer - module = module_cls(specs) - - module = module.to(params["device"]) - if params["dtype"] == torch.half: - module.half() - - if params["ref"] and params["mode"] == "DATAPARALLEL": - module = nn.parallel.DataParallel(module, range(torch.cuda.device_count())) - return module + return m if __name__ == "__main__": - file_name = "benchmark_quantize_layers_result.csv" if len(os.argv) == 1 else os.argv[1] + file_name = "benchmark_quantize_layers_result.csv" if len(sys.argv) == 1 else sys.argv[1] print(f"Benchmark results will be saved to file {file_name}") - benchmark_data = [] - per_tensor_scale_shape = (1,) + benchmark_data = [] # type: List[Dict[str, Any]] device_ids = range(torch.cuda.device_count()) ngpus_per_node = len(device_ids) world_size = ngpus_per_node - for params in tqdm(TEST_PARAMS_STRUCT): - print(params) - module = get_module(params, per_tensor_scale_shape) - call_fn = run_wall if params["timing"] == "WALL" else run_profile - runs = params["batch"]["runs"][params["device"]] - - input_size = params["batch"]["input_size"] - if params["mode"] == "DATADISTRIBUTED": - mp.spawn( - run_worker, - nprocs=ngpus_per_node, - args=(world_size, module, input_size, runs, params["dtype"], benchmark_data), - ) + for param_struct in tqdm(TEST_PARAM_STRUCTS): + param_struct: ParamStruct + print(param_struct) + module = get_module(param_struct) + call_fn = run_wall if param_struct.timing_mode == TimingMode.WALL else run_profile + num_runs = param_struct.batch.num_runs[param_struct.device] + + input_size = param_struct.batch.input_size + if param_struct.exec_type == ExecutionType.DISTRIBUTED_DATA_PARALLEL: + output = [] # type: List[Dict[str, float]] + try: + mp.spawn( + run_worker, + nprocs=ngpus_per_node, + args=(world_size, module, input_size, num_runs, param_struct.dtype, output), + ) + run_data = output[0] + except: # pylint:disable=bare-except + run_data = {"time": -1} else: - call_fn(module, input_size, params["device"], runs, dtype=params["dtype"], output=benchmark_data) - batch_data = params.pop("batch") - batch_data.update({"runs": batch_data["runs"][params["device"]]}) - params.update(batch_data) - benchmark_data[-1] = {**params, **benchmark_data[-1]} + run_data = call_fn(module, input_size, param_struct.device, num_runs, dtype=param_struct.dtype) + + runtime = next(iter(run_data.values())) + benchmark_data.append({**param_struct.to_dict(), "time_ms": runtime}) - df = pd.DataFrame(benchmark_data) + df = pd.DataFrame(benchmark_data) - df.to_csv(file_name, index=False) + df.to_csv(file_name, index=False) print("Done!") diff --git a/tools/collect_pylint_input_files_for_backend.py b/tools/collect_pylint_input_files_for_backend.py new file mode 100755 index 00000000000..94d10c99d7b --- /dev/null +++ b/tools/collect_pylint_input_files_for_backend.py @@ -0,0 +1,59 @@ +import os +import subprocess +import sys +from collections import defaultdict +from pathlib import Path + + +def is_in_backend_directory(file_path: Path, backend: str): + return backend in file_path.parts + + +def is_file_name_starts_with_backend_name(file_path: Path, backend: str): + return file_path.stem.startswith(backend) + + +PYTHON_FILES_EXT = ".py" +BACKENDS = ["torch", "tensorflow", "openvino", "onnx"] +COMMON_BACKEND_NAME = "common" +REPO_DIR = Path(__file__).parents[1] +IGNORED_FILES = ["docs/api/source/conf.py"] +IGNORED_DIRS = ["tools"] +CHECKS_FILE_PATH_BELONGS_TO_BACKEND = [is_in_backend_directory, is_file_name_starts_with_backend_name] + + +def main(target_backend: str): + if target_backend not in BACKENDS + [COMMON_BACKEND_NAME]: + raise RuntimeError( + f"Wrong backend passed: {target_backend}. Please choose one of available backends: {BACKENDS}" + ) + + cmd_output = subprocess.check_output("git ls-files", shell=True, cwd=REPO_DIR).decode() + file_paths = list(map(Path, cmd_output.split(os.linesep))) + python_file_paths = [file_path for file_path in file_paths if file_path.suffix == PYTHON_FILES_EXT] + # 1) Ignore some dirs + python_file_paths = [ + file_path + for file_path in python_file_paths + if not any(os.path.commonpath([file_path, dir_name]) for dir_name in IGNORED_DIRS) + ] + + # 2) Ignore some files + for ignored_path in IGNORED_FILES: + python_file_paths.remove(Path(ignored_path)) + + # 3) Filter files by backend + backend_files_map = defaultdict(list) + for file_path in python_file_paths: + for backend in BACKENDS: + if any(check(file_path, backend) for check in CHECKS_FILE_PATH_BELONGS_TO_BACKEND): + backend_files_map[backend].append(file_path) + break + else: + backend_files_map[COMMON_BACKEND_NAME].append(file_path) + + print(*backend_files_map[target_backend], sep=os.linesep) + + +if __name__ == "__main__": + main(sys.argv[1]) diff --git a/tools/update_eval_results.py b/tools/update_eval_results.py index 58744599409..1197785e1f5 100644 --- a/tools/update_eval_results.py +++ b/tools/update_eval_results.py @@ -41,7 +41,7 @@ # python tools/update_eval_results.py -f tf -r path/to/metrics.json -i -BASE_CHECKPOINT_URL = "https://storage.openvinotoolkit.org/repositories/nncf/models/develop/" +BASE_CHECKPOINT_URL = "https://storage.openvinotoolkit.org/repositories/nncf/models/v2.6.0/" @dataclass